hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 958k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c459cb9695ce51149e5eae19d31908ca788d5d5 | 6,562 | py | Python | seq2seq/tasks/decode_text.py | chunfengh/seq2seq | cc6e1a15f523c2ead809d48b1f6eebbeb94e3f0b | [
"Apache-2.0"
] | null | null | null | seq2seq/tasks/decode_text.py | chunfengh/seq2seq | cc6e1a15f523c2ead809d48b1f6eebbeb94e3f0b | [
"Apache-2.0"
] | null | null | null | seq2seq/tasks/decode_text.py | chunfengh/seq2seq | cc6e1a15f523c2ead809d48b1f6eebbeb94e3f0b | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Task where both the input and output sequence are plain text.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import functools
from pydoc import locate
import numpy as np
import tensorflow as tf
from tensorflow import gfile
from seq2seq.tasks.inference_task import InferenceTask, unbatch_dict
def _get_prediction_length(predictions_dict):
"""Returns the length of the prediction based on the index
of the first SEQUENCE_END token.
"""
tokens_iter = enumerate(predictions_dict["predicted_tokens"])
return next(((i + 1) for i, _ in tokens_iter if _ == "SEQUENCE_END"),
len(predictions_dict["predicted_tokens"]))
def _get_unk_mapping(filename):
"""Reads a file that specifies a mapping from source to target tokens.
The file must contain lines of the form <source>\t<target>"
Args:
filename: path to the mapping file
Returns:
A dictionary that maps from source -> target tokens.
"""
with gfile.GFile(filename, "r") as mapping_file:
lines = mapping_file.readlines()
mapping = dict([_.split("\t")[0:2] for _ in lines])
mapping = {k.strip(): v.strip() for k, v in mapping.items()}
return mapping
def _unk_replace(source_tokens,
predicted_tokens,
attention_scores,
mapping=None):
"""Replaces UNK tokens with tokens from the source or a
provided mapping based on the attention scores.
Args:
source_tokens: A numpy array of strings.
predicted_tokens: A numpy array of strings.
attention_scores: A numeric numpy array
of shape `[prediction_length, source_length]` that contains
the attention scores.
mapping: If not provided, an UNK token is replaced with the
source token that has the highest attention score. If provided
the token is insead replaced with `mapping[chosen_source_token]`.
Returns:
A new `predicted_tokens` array.
"""
result = []
for token, scores in zip(predicted_tokens, attention_scores):
if token == "UNK":
max_score_index = np.argmax(scores)
chosen_source_token = source_tokens[max_score_index]
new_target = chosen_source_token
if mapping is not None and chosen_source_token in mapping:
new_target = mapping[chosen_source_token]
result.append(new_target)
else:
result.append(token)
return np.array(result)
class DecodeText(InferenceTask):
"""Defines inference for tasks where both the input and output sequences
are plain text.
Params:
delimiter: Character by which tokens are delimited. Defaults to space.
unk_replace: If true, enable unknown token replacement based on attention
scores.
unk_mapping: If `unk_replace` is true, this can be the path to a file
defining a dictionary to improve UNK token replacement. Refer to the
documentation for more details.
dump_attention_dir: Save attention scores and plots to this directory.
dump_attention_no_plot: If true, only save attention scores, not
attention plots.
dump_beams: Write beam search debugging information to this file.
"""
def __init__(self, params):
super(DecodeText, self).__init__(params)
self._unk_mapping = None
self._unk_replace_fn = None
if self.params["unk_mapping"] is not None:
self._unk_mapping = _get_unk_mapping(self.params["unk_mapping"])
if self.params["unk_replace"]:
self._unk_replace_fn = functools.partial(
_unk_replace, mapping=self._unk_mapping)
self._postproc_fn = None
if self.params["postproc_fn"]:
self._postproc_fn = locate(self.params["postproc_fn"])
if self._postproc_fn is None:
raise ValueError("postproc_fn not found: {}".format(
self.params["postproc_fn"]))
@staticmethod
def default_params():
params = {}
params.update({
"delimiter": " ",
"postproc_fn": "",
"unk_replace": False,
"unk_mapping": None,
})
return params
def before_run(self, _run_context):
fetches = {}
fetches["predicted_tokens"] = self._predictions["predicted_tokens"]
fetches["features.source_len"] = self._predictions["features.source_len"]
fetches["features.source_tokens"] = self._predictions[
"features.source_tokens"]
if "attention_scores" in self._predictions:
fetches["attention_scores"] = self._predictions["attention_scores"]
return tf.train.SessionRunArgs(fetches)
def after_run(self, _run_context, run_values):
fetches_batch = run_values.results
print (fetches_batch)
for fetches in unbatch_dict(fetches_batch):
# Convert to unicode
fetches["predicted_tokens"] = np.char.decode(
fetches["predicted_tokens"].astype("S"), "utf-8")
predicted_tokens = fetches["predicted_tokens"]
# If we're using beam search we take the first beam
if np.ndim(predicted_tokens) > 1:
predicted_tokens = predicted_tokens[:, 0]
fetches["features.source_tokens"] = np.char.decode(
fetches["features.source_tokens"].astype("S"), "utf-8")
source_tokens = fetches["features.source_tokens"]
source_len = fetches["features.source_len"]
if self._unk_replace_fn is not None:
# We slice the attention scores so that we do not
# accidentially replace UNK with a SEQUENCE_END token
attention_scores = fetches["attention_scores"]
attention_scores = attention_scores[:, :source_len - 1]
predicted_tokens = self._unk_replace_fn(
source_tokens=source_tokens,
predicted_tokens=predicted_tokens,
attention_scores=attention_scores)
sent = self.params["delimiter"].join(predicted_tokens).split(
"SEQUENCE_END")[0]
# Apply postproc
if self._postproc_fn:
sent = self._postproc_fn(sent)
sent = sent.strip()
print(sent.encode('utf-8'))
| 34.536842 | 77 | 0.704358 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import functools
from pydoc import locate
import numpy as np
import tensorflow as tf
from tensorflow import gfile
from seq2seq.tasks.inference_task import InferenceTask, unbatch_dict
def _get_prediction_length(predictions_dict):
tokens_iter = enumerate(predictions_dict["predicted_tokens"])
return next(((i + 1) for i, _ in tokens_iter if _ == "SEQUENCE_END"),
len(predictions_dict["predicted_tokens"]))
def _get_unk_mapping(filename):
with gfile.GFile(filename, "r") as mapping_file:
lines = mapping_file.readlines()
mapping = dict([_.split("\t")[0:2] for _ in lines])
mapping = {k.strip(): v.strip() for k, v in mapping.items()}
return mapping
def _unk_replace(source_tokens,
predicted_tokens,
attention_scores,
mapping=None):
result = []
for token, scores in zip(predicted_tokens, attention_scores):
if token == "UNK":
max_score_index = np.argmax(scores)
chosen_source_token = source_tokens[max_score_index]
new_target = chosen_source_token
if mapping is not None and chosen_source_token in mapping:
new_target = mapping[chosen_source_token]
result.append(new_target)
else:
result.append(token)
return np.array(result)
class DecodeText(InferenceTask):
def __init__(self, params):
super(DecodeText, self).__init__(params)
self._unk_mapping = None
self._unk_replace_fn = None
if self.params["unk_mapping"] is not None:
self._unk_mapping = _get_unk_mapping(self.params["unk_mapping"])
if self.params["unk_replace"]:
self._unk_replace_fn = functools.partial(
_unk_replace, mapping=self._unk_mapping)
self._postproc_fn = None
if self.params["postproc_fn"]:
self._postproc_fn = locate(self.params["postproc_fn"])
if self._postproc_fn is None:
raise ValueError("postproc_fn not found: {}".format(
self.params["postproc_fn"]))
@staticmethod
def default_params():
params = {}
params.update({
"delimiter": " ",
"postproc_fn": "",
"unk_replace": False,
"unk_mapping": None,
})
return params
def before_run(self, _run_context):
fetches = {}
fetches["predicted_tokens"] = self._predictions["predicted_tokens"]
fetches["features.source_len"] = self._predictions["features.source_len"]
fetches["features.source_tokens"] = self._predictions[
"features.source_tokens"]
if "attention_scores" in self._predictions:
fetches["attention_scores"] = self._predictions["attention_scores"]
return tf.train.SessionRunArgs(fetches)
def after_run(self, _run_context, run_values):
fetches_batch = run_values.results
print (fetches_batch)
for fetches in unbatch_dict(fetches_batch):
fetches["predicted_tokens"] = np.char.decode(
fetches["predicted_tokens"].astype("S"), "utf-8")
predicted_tokens = fetches["predicted_tokens"]
if np.ndim(predicted_tokens) > 1:
predicted_tokens = predicted_tokens[:, 0]
fetches["features.source_tokens"] = np.char.decode(
fetches["features.source_tokens"].astype("S"), "utf-8")
source_tokens = fetches["features.source_tokens"]
source_len = fetches["features.source_len"]
if self._unk_replace_fn is not None:
# We slice the attention scores so that we do not
# accidentially replace UNK with a SEQUENCE_END token
attention_scores = fetches["attention_scores"]
attention_scores = attention_scores[:, :source_len - 1]
predicted_tokens = self._unk_replace_fn(
source_tokens=source_tokens,
predicted_tokens=predicted_tokens,
attention_scores=attention_scores)
sent = self.params["delimiter"].join(predicted_tokens).split(
"SEQUENCE_END")[0]
# Apply postproc
if self._postproc_fn:
sent = self._postproc_fn(sent)
sent = sent.strip()
print(sent.encode('utf-8'))
| true | true |
1c459cbafa7959829a0eb6b44a0612c737b7663e | 782 | py | Python | jorldy/config/m_dqn/cartpole.py | Kyushik/JORLDY | 6a24a2195e5e87ade157ee53f631af2221f0a188 | [
"Apache-2.0"
] | 300 | 2021-11-03T07:06:34.000Z | 2022-03-24T02:23:56.000Z | jorldy/config/m_dqn/cartpole.py | Kyushik/JORLDY | 6a24a2195e5e87ade157ee53f631af2221f0a188 | [
"Apache-2.0"
] | 37 | 2021-11-04T04:31:07.000Z | 2022-03-30T01:40:49.000Z | jorldy/config/m_dqn/cartpole.py | Kyushik/JORLDY | 6a24a2195e5e87ade157ee53f631af2221f0a188 | [
"Apache-2.0"
] | 45 | 2021-11-03T08:05:56.000Z | 2022-03-24T08:35:05.000Z | ### Munchausen DQN CartPole Config ###
env = {
"name": "cartpole",
"action_type": "discrete",
"render": False,
}
agent = {
"name": "m_dqn",
"network": "discrete_q_network",
"gamma": 0.99,
"epsilon_init": 1.0,
"epsilon_min": 0.01,
"explore_ratio": 0.2,
"buffer_size": 50000,
"batch_size": 32,
"start_train_step": 2000,
"target_update_period": 500,
"lr_decay": True,
# M-DQN Parameters
"alpha": 0.9,
"tau": 0.03,
"l_0": -1,
}
optim = {
"name": "adam",
"lr": 0.0001,
}
train = {
"training": True,
"load_path": None,
"run_step": 100000,
"print_period": 1000,
"save_period": 10000,
"eval_iteration": 10,
# distributed setting
"update_period": 32,
"num_workers": 8,
}
| 18.186047 | 38 | 0.553708 | False,
}
agent = {
"name": "m_dqn",
"network": "discrete_q_network",
"gamma": 0.99,
"epsilon_init": 1.0,
"epsilon_min": 0.01,
"explore_ratio": 0.2,
"buffer_size": 50000,
"batch_size": 32,
"start_train_step": 2000,
"target_update_period": 500,
"lr_decay": True,
"alpha": 0.9,
"tau": 0.03,
"l_0": -1,
}
optim = {
"name": "adam",
"lr": 0.0001,
}
train = {
"training": True,
"load_path": None,
"run_step": 100000,
"print_period": 1000,
"save_period": 10000,
"eval_iteration": 10,
"update_period": 32,
"num_workers": 8,
}
| true | true |
1c459d4fd01576a1d2a19cab06b15dcefae8bd24 | 336 | py | Python | setup.py | ippee/py_init | 0d997ec5ddaee95ef71562f14542e74f40e88646 | [
"CC0-1.0"
] | null | null | null | setup.py | ippee/py_init | 0d997ec5ddaee95ef71562f14542e74f40e88646 | [
"CC0-1.0"
] | null | null | null | setup.py | ippee/py_init | 0d997ec5ddaee95ef71562f14542e74f40e88646 | [
"CC0-1.0"
] | null | null | null | # coding: UTF-8
from setuptools import setup
install_requires = []
packages = []
setup(
name='',
version='0.1.0',
license='',
description='',
author='you',
author_email='',
url='',
packages=packages,
install_requires=install_requires,
setup_requires=['pytest-runner'],
tests_require=['pytest', "pytest-cov"]
)
| 15.272727 | 40 | 0.660714 |
from setuptools import setup
install_requires = []
packages = []
setup(
name='',
version='0.1.0',
license='',
description='',
author='you',
author_email='',
url='',
packages=packages,
install_requires=install_requires,
setup_requires=['pytest-runner'],
tests_require=['pytest', "pytest-cov"]
)
| true | true |
1c459d5ed4db13f7e8ef93008315c97790ecb9b7 | 5,618 | py | Python | commitizen/commands/init.py | christian-hawk/commitizen | 5c0dd546866f2bd2ab6b4ecd27035441b7b4692b | [
"MIT"
] | null | null | null | commitizen/commands/init.py | christian-hawk/commitizen | 5c0dd546866f2bd2ab6b4ecd27035441b7b4692b | [
"MIT"
] | null | null | null | commitizen/commands/init.py | christian-hawk/commitizen | 5c0dd546866f2bd2ab6b4ecd27035441b7b4692b | [
"MIT"
] | null | null | null | import os
import questionary
import yaml
from packaging.version import Version
from commitizen import cmd, factory, out
from commitizen.__version__ import __version__
from commitizen.config import BaseConfig, TomlConfig
from commitizen.cz import registry
from commitizen.defaults import config_files
from commitizen.exceptions import NoAnswersError
from commitizen.git import get_latest_tag_name, get_tag_names
class Init:
def __init__(self, config: BaseConfig, *args):
self.config: BaseConfig = config
self.cz = factory.commiter_factory(self.config)
def __call__(self):
values_to_add = {}
# No config for commitizen exist
if not self.config.path:
config_path = self._ask_config_path()
if "toml" in config_path:
self.config = TomlConfig(data="", path=config_path)
self.config.init_empty_config_content()
values_to_add["name"] = self._ask_name()
tag = self._ask_tag()
values_to_add["version"] = Version(tag).public
values_to_add["tag_format"] = self._ask_tag_format(tag)
self._update_config_file(values_to_add)
if questionary.confirm("Do you want to install pre-commit hook?").ask():
self._install_pre_commit_hook()
out.write("You can bump the version and create changelog running:\n")
out.info("cz bump --changelog")
out.success("The configuration are all set.")
else:
out.line(f"Config file {self.config.path} already exists")
def _ask_config_path(self) -> str:
name = questionary.select(
"Please choose a supported config file: (default: pyproject.toml)",
choices=config_files,
default="pyproject.toml",
style=self.cz.style,
).ask()
return name
def _ask_name(self) -> str:
name = questionary.select(
"Please choose a cz (commit rule): (default: cz_conventional_commits)",
choices=list(registry.keys()),
default="cz_conventional_commits",
style=self.cz.style,
).ask()
return name
def _ask_tag(self) -> str:
latest_tag = get_latest_tag_name()
if not latest_tag:
out.error("No Existing Tag. Set tag to v0.0.1")
return "0.0.1"
is_correct_tag = questionary.confirm(
f"Is {latest_tag} the latest tag?", style=self.cz.style, default=False
).ask()
if not is_correct_tag:
tags = get_tag_names()
if not tags:
out.error("No Existing Tag. Set tag to v0.0.1")
return "0.0.1"
latest_tag = questionary.select(
"Please choose the latest tag: ",
choices=get_tag_names(),
style=self.cz.style,
).ask()
if not latest_tag:
raise NoAnswersError("Tag is required!")
return latest_tag
def _ask_tag_format(self, latest_tag) -> str:
is_correct_format = False
if latest_tag.startswith("v"):
tag_format = r"v$version"
is_correct_format = questionary.confirm(
f'Is "{tag_format}" the correct tag format?', style=self.cz.style
).ask()
if not is_correct_format:
tag_format = questionary.text(
'Please enter the correct version format: (default: "$version")',
style=self.cz.style,
).ask()
if not tag_format:
tag_format = "$version"
return tag_format
def _install_pre_commit_hook(self):
pre_commit_config_filename = ".pre-commit-config.yaml"
cz_hook_config = {
"repo": "https://github.com/commitizen-tools/commitizen",
"rev": f"v{__version__}",
"hooks": [{"id": "commitizen", "stages": ["commit-msg"]}],
}
config_data = {}
if not os.path.isfile(pre_commit_config_filename):
# .pre-commit-config does not exist
config_data["repos"] = [cz_hook_config]
else:
# breakpoint()
with open(pre_commit_config_filename) as config_file:
yaml_data = yaml.safe_load(config_file)
if yaml_data:
config_data = yaml_data
if "repos" in config_data:
for pre_commit_hook in config_data["repos"]:
if "commitizen" in pre_commit_hook["repo"]:
out.write("commitizen already in pre-commit config")
break
else:
config_data["repos"].append(cz_hook_config)
else:
# .pre-commit-config exists but there's no "repos" key
config_data["repos"] = [cz_hook_config]
with open(pre_commit_config_filename, "w") as config_file:
yaml.safe_dump(config_data, stream=config_file)
c = cmd.run("pre-commit install --hook-type commit-msg")
if c.return_code == 127:
out.error(
"pre-commit is not installed in current environement.\n"
"Run 'pre-commit install --hook-type commit-msg' again after it's installed"
)
elif c.return_code != 0:
out.error(c.err)
else:
out.write("commitizen pre-commit hook is now installed in your '.git'\n")
def _update_config_file(self, values):
for key, value in values.items():
self.config.set_key(key, value)
| 36.245161 | 92 | 0.58455 | import os
import questionary
import yaml
from packaging.version import Version
from commitizen import cmd, factory, out
from commitizen.__version__ import __version__
from commitizen.config import BaseConfig, TomlConfig
from commitizen.cz import registry
from commitizen.defaults import config_files
from commitizen.exceptions import NoAnswersError
from commitizen.git import get_latest_tag_name, get_tag_names
class Init:
def __init__(self, config: BaseConfig, *args):
self.config: BaseConfig = config
self.cz = factory.commiter_factory(self.config)
def __call__(self):
values_to_add = {}
if not self.config.path:
config_path = self._ask_config_path()
if "toml" in config_path:
self.config = TomlConfig(data="", path=config_path)
self.config.init_empty_config_content()
values_to_add["name"] = self._ask_name()
tag = self._ask_tag()
values_to_add["version"] = Version(tag).public
values_to_add["tag_format"] = self._ask_tag_format(tag)
self._update_config_file(values_to_add)
if questionary.confirm("Do you want to install pre-commit hook?").ask():
self._install_pre_commit_hook()
out.write("You can bump the version and create changelog running:\n")
out.info("cz bump --changelog")
out.success("The configuration are all set.")
else:
out.line(f"Config file {self.config.path} already exists")
def _ask_config_path(self) -> str:
name = questionary.select(
"Please choose a supported config file: (default: pyproject.toml)",
choices=config_files,
default="pyproject.toml",
style=self.cz.style,
).ask()
return name
def _ask_name(self) -> str:
name = questionary.select(
"Please choose a cz (commit rule): (default: cz_conventional_commits)",
choices=list(registry.keys()),
default="cz_conventional_commits",
style=self.cz.style,
).ask()
return name
def _ask_tag(self) -> str:
latest_tag = get_latest_tag_name()
if not latest_tag:
out.error("No Existing Tag. Set tag to v0.0.1")
return "0.0.1"
is_correct_tag = questionary.confirm(
f"Is {latest_tag} the latest tag?", style=self.cz.style, default=False
).ask()
if not is_correct_tag:
tags = get_tag_names()
if not tags:
out.error("No Existing Tag. Set tag to v0.0.1")
return "0.0.1"
latest_tag = questionary.select(
"Please choose the latest tag: ",
choices=get_tag_names(),
style=self.cz.style,
).ask()
if not latest_tag:
raise NoAnswersError("Tag is required!")
return latest_tag
def _ask_tag_format(self, latest_tag) -> str:
is_correct_format = False
if latest_tag.startswith("v"):
tag_format = r"v$version"
is_correct_format = questionary.confirm(
f'Is "{tag_format}" the correct tag format?', style=self.cz.style
).ask()
if not is_correct_format:
tag_format = questionary.text(
'Please enter the correct version format: (default: "$version")',
style=self.cz.style,
).ask()
if not tag_format:
tag_format = "$version"
return tag_format
def _install_pre_commit_hook(self):
pre_commit_config_filename = ".pre-commit-config.yaml"
cz_hook_config = {
"repo": "https://github.com/commitizen-tools/commitizen",
"rev": f"v{__version__}",
"hooks": [{"id": "commitizen", "stages": ["commit-msg"]}],
}
config_data = {}
if not os.path.isfile(pre_commit_config_filename):
config_data["repos"] = [cz_hook_config]
else:
with open(pre_commit_config_filename) as config_file:
yaml_data = yaml.safe_load(config_file)
if yaml_data:
config_data = yaml_data
if "repos" in config_data:
for pre_commit_hook in config_data["repos"]:
if "commitizen" in pre_commit_hook["repo"]:
out.write("commitizen already in pre-commit config")
break
else:
config_data["repos"].append(cz_hook_config)
else:
config_data["repos"] = [cz_hook_config]
with open(pre_commit_config_filename, "w") as config_file:
yaml.safe_dump(config_data, stream=config_file)
c = cmd.run("pre-commit install --hook-type commit-msg")
if c.return_code == 127:
out.error(
"pre-commit is not installed in current environement.\n"
"Run 'pre-commit install --hook-type commit-msg' again after it's installed"
)
elif c.return_code != 0:
out.error(c.err)
else:
out.write("commitizen pre-commit hook is now installed in your '.git'\n")
def _update_config_file(self, values):
for key, value in values.items():
self.config.set_key(key, value)
| true | true |
1c459db6c393559c2cd965467577c6bdcb250d28 | 1,090 | py | Python | hpc-historias-clinicas/fojas_quirurgicas/migrations/0005_auto_20150505_0101.py | btenaglia/hpc-historias-clinicas | 649d8660381381b1c591667760c122d73071d5ec | [
"BSD-3-Clause"
] | null | null | null | hpc-historias-clinicas/fojas_quirurgicas/migrations/0005_auto_20150505_0101.py | btenaglia/hpc-historias-clinicas | 649d8660381381b1c591667760c122d73071d5ec | [
"BSD-3-Clause"
] | null | null | null | hpc-historias-clinicas/fojas_quirurgicas/migrations/0005_auto_20150505_0101.py | btenaglia/hpc-historias-clinicas | 649d8660381381b1c591667760c122d73071d5ec | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('fojas_quirurgicas', '0004_auto_20150504_2120'),
]
operations = [
migrations.AlterField(
model_name='fojasquirurgicas',
name='fecha',
field=models.DateField(default=datetime.datetime(2015, 5, 5, 1, 1, 1, 702694)),
preserve_default=True,
),
migrations.AlterField(
model_name='fojasquirurgicas',
name='hora_comienzo',
field=models.TimeField(default=datetime.datetime(2015, 5, 5, 1, 1, 1, 702739), verbose_name='Hora / Comienzo Operac\xf3n'),
preserve_default=True,
),
migrations.AlterField(
model_name='fojasquirurgicas',
name='hora_fin',
field=models.TimeField(default=datetime.datetime(2015, 5, 5, 1, 1, 1, 702778), verbose_name='Hora / Termin\xf3 Operac\xf3n'),
preserve_default=True,
),
]
| 32.058824 | 137 | 0.612844 |
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('fojas_quirurgicas', '0004_auto_20150504_2120'),
]
operations = [
migrations.AlterField(
model_name='fojasquirurgicas',
name='fecha',
field=models.DateField(default=datetime.datetime(2015, 5, 5, 1, 1, 1, 702694)),
preserve_default=True,
),
migrations.AlterField(
model_name='fojasquirurgicas',
name='hora_comienzo',
field=models.TimeField(default=datetime.datetime(2015, 5, 5, 1, 1, 1, 702739), verbose_name='Hora / Comienzo Operac\xf3n'),
preserve_default=True,
),
migrations.AlterField(
model_name='fojasquirurgicas',
name='hora_fin',
field=models.TimeField(default=datetime.datetime(2015, 5, 5, 1, 1, 1, 702778), verbose_name='Hora / Termin\xf3 Operac\xf3n'),
preserve_default=True,
),
]
| true | true |
1c459dbc87ad166cc650a1298f694761c0c2d4ae | 14,998 | py | Python | utils.py | RachithP/rpg_public_dronet | 244b44c6d321e77cfe326071f8413ea1f7e438cb | [
"MIT"
] | null | null | null | utils.py | RachithP/rpg_public_dronet | 244b44c6d321e77cfe326071f8413ea1f7e438cb | [
"MIT"
] | null | null | null | utils.py | RachithP/rpg_public_dronet | 244b44c6d321e77cfe326071f8413ea1f7e438cb | [
"MIT"
] | 1 | 2019-12-10T02:48:20.000Z | 2019-12-10T02:48:20.000Z | import re
import os
import numpy as np
import tensorflow as tf
import json
import time
from keras import backend as K
from keras.preprocessing.image import Iterator
from keras.preprocessing.image import ImageDataGenerator
from keras.utils.generic_utils import Progbar
from keras.models import model_from_json
import img_utils
class DroneDataGenerator(ImageDataGenerator):
"""
Generate minibatches of images and labels with real-time augmentation.
The only function that changes w.r.t. parent class is the flow that
generates data. This function needed in fact adaptation for different
directory structure and labels. All the remaining functions remain
unchanged.
For an example usage, see the evaluate.py script
"""
def flow_from_directory(self, directory, target_size=(224,224),
crop_size=(250,250), color_mode='grayscale', batch_size=32,
shuffle=True, seed=None, follow_links=False):
return DroneDirectoryIterator(
directory, self,
target_size=target_size, crop_size=crop_size, color_mode=color_mode,
batch_size=batch_size, shuffle=shuffle, seed=seed,
follow_links=follow_links)
class DroneDirectoryIterator(Iterator):
"""
Class for managing data loading.of images and labels
We assume that the folder structure is:
root_folder/
folder_1/
images/
sync_steering.txt or labels.txt
folder_2/
images/
sync_steering.txt or labels.txt
.
.
folder_n/
images/
sync_steering.txt or labels.txt
# Arguments
directory: Path to the root directory to read data from.
image_data_generator: Image Generator.
target_size: tuple of integers, dimensions to resize input images to.
crop_size: tuple of integers, dimensions to crop input images.
color_mode: One of `"rgb"`, `"grayscale"`. Color mode to read images.
batch_size: The desired batch size
shuffle: Whether to shuffle data or not
seed : numpy seed to shuffle data
follow_links: Bool, whether to follow symbolic links or not
# TODO: Add functionality to save images to have a look at the augmentation
"""
def __init__(self, directory, image_data_generator,
target_size=(224,224), crop_size = (250,250), color_mode='grayscale',
batch_size=32, shuffle=True, seed=None, follow_links=False):
self.directory = directory
self.image_data_generator = image_data_generator
self.target_size = tuple(target_size)
self.crop_size = tuple(crop_size)
self.follow_links = follow_links
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
if self.color_mode == 'rgb':
self.image_shape = self.crop_size + (3,)
else:
self.image_shape = self.crop_size + (1,)
# First count how many experiments are out there
self.samples = 0
experiments = []
for subdir in sorted(os.listdir(directory)):
if os.path.isdir(os.path.join(directory, subdir)):
experiments.append(subdir)
self.num_experiments = len(experiments)
self.formats = {'png', 'jpg'}
# Idea = associate each filename with a corresponding steering or label
self.filenames = []
self.ground_truth = []
# Determine the type of experiment (steering or collision) to compute
# the loss
self.exp_type = []
for subdir in experiments:
subpath = os.path.join(directory, subdir)
self._decode_experiment_dir(subpath)
# Conversion of list into array
self.ground_truth = np.array(self.ground_truth, dtype = K.floatx())
assert self.samples > 0, "Did not find any data"
print('Found {} images belonging to {} experiments.'.format(
self.samples, self.num_experiments))
super(DroneDirectoryIterator, self).__init__(self.samples,
batch_size, shuffle, seed)
def _recursive_list(self, subpath):
return sorted(os.walk(subpath, followlinks=self.follow_links),
key=lambda tpl: tpl[0])
def _decode_experiment_dir(self, dir_subpath):
# Load steerings or labels in the experiment dir
steerings_filename = os.path.join(dir_subpath, "sync_steering.txt")
labels_filename = os.path.join(dir_subpath, "labels.txt")
# Try to load steerings first. Make sure that the steering angle or the
# label file is in the first column. Note also that the first line are
# comments so it should be skipped.
try:
ground_truth = np.loadtxt(steerings_filename, usecols=0,
delimiter=',', skiprows=1)
exp_type = 1
except OSError as e:
# Try load collision labels if there are no steerings
try:
ground_truth = np.loadtxt(labels_filename, usecols=0)
exp_type = 0
except OSError as e:
print("Neither steerings nor labels found in dir {}".format(
dir_subpath))
raise IOError
# Now fetch all images in the image subdir
image_dir_path = os.path.join(dir_subpath, "images")
for root, _, files in self._recursive_list(image_dir_path):
sorted_files = sorted(files,
key = lambda fname: int(re.search(r'\d+',fname).group()))
for frame_number, fname in enumerate(sorted_files):
is_valid = False
for extension in self.formats:
if fname.lower().endswith('.' + extension):
is_valid = True
break
if is_valid:
absolute_path = os.path.join(root, fname)
self.filenames.append(os.path.relpath(absolute_path,
self.directory))
self.ground_truth.append(ground_truth[frame_number])
self.exp_type.append(exp_type)
self.samples += 1
def next(self):
with self.lock:
index_array = next(self.index_generator)
# The transformation of images is not under thread lock
# so it can be done in parallel
return self._get_batches_of_transformed_samples(index_array)
def _get_batches_of_transformed_samples(self, index_array) :
"""
Public function to fetch next batch.
# Returns
The next batch of images and labels.
"""
current_batch_size = index_array.shape[0]
# Image transformation is not under thread lock, so it can be done in
# parallel
batch_x = np.zeros((current_batch_size,) + self.image_shape,
dtype=K.floatx())
batch_steer = np.zeros((current_batch_size, 2,),
dtype=K.floatx())
batch_coll = np.zeros((current_batch_size, 2,),
dtype=K.floatx())
grayscale = self.color_mode == 'grayscale'
# Build batch of image data
for i, j in enumerate(index_array):
fname = self.filenames[j]
x = img_utils.load_img(os.path.join(self.directory, fname),
grayscale=grayscale,
crop_size=self.crop_size,
target_size=self.target_size)
x = self.image_data_generator.random_transform(x)
x = self.image_data_generator.standardize(x)
batch_x[i] = x
# Build batch of steering and collision data
if self.exp_type[index_array[i]] == 1:
# Steering experiment (t=1)
batch_steer[i,0] =1.0
batch_steer[i,1] = self.ground_truth[index_array[i]]
batch_coll[i] = np.array([1.0, 0.0])
else:
# Collision experiment (t=0)
batch_steer[i] = np.array([0.0, 0.0])
batch_coll[i,0] = 0.0
batch_coll[i,1] = self.ground_truth[index_array[i]]
batch_y = [batch_steer, batch_coll]
return batch_x, batch_y
def compute_predictions_and_gt(model, generator, steps,
max_q_size=10,
pickle_safe=False, verbose=0):
"""
Generate predictions and associated ground truth
for the input samples from a data generator.
The generator should return the same kind of data as accepted by
`predict_on_batch`.
Function adapted from keras `predict_generator`.
# Arguments
generator: Generator yielding batches of input samples.
steps: Total number of steps (batches of samples)
to yield from `generator` before stopping.
max_q_size: Maximum size for the generator queue.
pickle_safe: If `True`, use process based threading.
Note that because
this implementation relies on multiprocessing,
you should not pass
non picklable arguments to the generator
as they can't be passed
easily to children processes.
verbose: verbosity mode, 0 or 1.
# Returns
Numpy array(s) of predictions and associated ground truth.
# Raises
ValueError: In case the generator yields
data in an invalid format.
"""
steps_done = 0
all_outs = []
all_labels = []
all_ts = []
if verbose == 1:
progbar = Progbar(target=steps)
while steps_done < steps:
generator_output = next(generator)
if isinstance(generator_output, tuple):
if len(generator_output) == 2:
x, gt_lab = generator_output
elif len(generator_output) == 3:
x, gt_lab, _ = generator_output
else:
raise ValueError('output of generator should be '
'a tuple `(x, y, sample_weight)` '
'or `(x, y)`. Found: ' +
str(generator_output))
else:
raise ValueError('Output not valid for current evaluation')
start_time = time.time()
outs = model.predict_on_batch(x)
time_diff = time.time() - start_time
print("\n Time Diff: ", time_diff)
print("Batch Size: ", len(x))
print("FPS: ", len(x)/time_diff)
if not isinstance(outs, list):
outs = [outs]
if not isinstance(gt_lab, list):
gt_lab = [gt_lab]
if not all_outs:
for out in outs:
# Len of this list is related to the number of
# outputs per model(1 in our case)
all_outs.append([])
if not all_labels:
# Len of list related to the number of gt_commands
# per model (1 in our case )
for lab in gt_lab:
all_labels.append([])
all_ts.append([])
for i, out in enumerate(outs):
all_outs[i].append(out)
for i, lab in enumerate(gt_lab):
all_labels[i].append(lab[:,1])
all_ts[i].append(lab[:,0])
steps_done += 1
if verbose == 1:
progbar.update(steps_done)
if steps_done == 1:
return [out for out in all_outs], [lab for lab in all_labels], np.concatenate(all_ts[0])
else:
return np.squeeze(np.array([np.concatenate(out) for out in all_outs])).T, \
np.array([np.concatenate(lab) for lab in all_labels]).T, \
np.concatenate(all_ts[0])
def hard_mining_mse(k):
"""
Compute MSE for steering evaluation and hard-mining for the current batch.
# Arguments
k: number of samples for hard-mining.
# Returns
custom_mse: average MSE for the current batch.
"""
def custom_mse(y_true, y_pred):
# Parameter t indicates the type of experiment
t = y_true[:,0]
# Number of steering samples
samples_steer = tf.cast(tf.equal(t,1), tf.int32)
n_samples_steer = tf.reduce_sum(samples_steer)
if n_samples_steer == 0:
return 0.0
else:
# Predicted and real steerings
pred_steer = tf.squeeze(y_pred, squeeze_dims=-1)
true_steer = y_true[:,1]
# Steering loss
l_steer = tf.multiply(t, K.square(pred_steer - true_steer))
# Hard mining
k_min = tf.minimum(k, n_samples_steer)
_, indices = tf.nn.top_k(l_steer, k=k_min)
max_l_steer = tf.gather(l_steer, indices)
hard_l_steer = tf.divide(tf.reduce_sum(max_l_steer), tf.cast(k,tf.float32))
return hard_l_steer
return custom_mse
def hard_mining_entropy(k):
"""
Compute binary cross-entropy for collision evaluation and hard-mining.
# Arguments
k: Number of samples for hard-mining.
# Returns
custom_bin_crossentropy: average binary cross-entropy for the current batch.
"""
def custom_bin_crossentropy(y_true, y_pred):
# Parameter t indicates the type of experiment
t = y_true[:,0]
# Number of collision samples
samples_coll = tf.cast(tf.equal(t,0), tf.int32)
n_samples_coll = tf.reduce_sum(samples_coll)
if n_samples_coll == 0:
return 0.0
else:
# Predicted and real labels
pred_coll = tf.squeeze(y_pred, squeeze_dims=-1)
true_coll = y_true[:,1]
# Collision loss
l_coll = tf.multiply((1-t), K.binary_crossentropy(true_coll, pred_coll))
# Hard mining
k_min = tf.minimum(k, n_samples_coll)
_, indices = tf.nn.top_k(l_coll, k=k_min)
max_l_coll = tf.gather(l_coll, indices)
hard_l_coll = tf.divide(tf.reduce_sum(max_l_coll), tf.cast(k, tf.float32))
return hard_l_coll
return custom_bin_crossentropy
def modelToJson(model, json_model_path):
"""
Serialize model into json.
"""
model_json = model.to_json()
with open(json_model_path,"w") as f:
f.write(model_json)
def jsonToModel(json_model_path):
"""
Serialize json into model.
"""
with open(json_model_path, 'r') as json_file:
loaded_model_json = json_file.read()
model = model_from_json(loaded_model_json)
return model
def write_to_file(dictionary, fname):
"""
Writes everything is in a dictionary in json model.
"""
with open(fname, "w") as f:
json.dump(dictionary,f)
print("Written file {}".format(fname))
| 35.206573 | 96 | 0.595146 | import re
import os
import numpy as np
import tensorflow as tf
import json
import time
from keras import backend as K
from keras.preprocessing.image import Iterator
from keras.preprocessing.image import ImageDataGenerator
from keras.utils.generic_utils import Progbar
from keras.models import model_from_json
import img_utils
class DroneDataGenerator(ImageDataGenerator):
def flow_from_directory(self, directory, target_size=(224,224),
crop_size=(250,250), color_mode='grayscale', batch_size=32,
shuffle=True, seed=None, follow_links=False):
return DroneDirectoryIterator(
directory, self,
target_size=target_size, crop_size=crop_size, color_mode=color_mode,
batch_size=batch_size, shuffle=shuffle, seed=seed,
follow_links=follow_links)
class DroneDirectoryIterator(Iterator):
def __init__(self, directory, image_data_generator,
target_size=(224,224), crop_size = (250,250), color_mode='grayscale',
batch_size=32, shuffle=True, seed=None, follow_links=False):
self.directory = directory
self.image_data_generator = image_data_generator
self.target_size = tuple(target_size)
self.crop_size = tuple(crop_size)
self.follow_links = follow_links
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
if self.color_mode == 'rgb':
self.image_shape = self.crop_size + (3,)
else:
self.image_shape = self.crop_size + (1,)
self.samples = 0
experiments = []
for subdir in sorted(os.listdir(directory)):
if os.path.isdir(os.path.join(directory, subdir)):
experiments.append(subdir)
self.num_experiments = len(experiments)
self.formats = {'png', 'jpg'}
self.filenames = []
self.ground_truth = []
self.exp_type = []
for subdir in experiments:
subpath = os.path.join(directory, subdir)
self._decode_experiment_dir(subpath)
self.ground_truth = np.array(self.ground_truth, dtype = K.floatx())
assert self.samples > 0, "Did not find any data"
print('Found {} images belonging to {} experiments.'.format(
self.samples, self.num_experiments))
super(DroneDirectoryIterator, self).__init__(self.samples,
batch_size, shuffle, seed)
def _recursive_list(self, subpath):
return sorted(os.walk(subpath, followlinks=self.follow_links),
key=lambda tpl: tpl[0])
def _decode_experiment_dir(self, dir_subpath):
steerings_filename = os.path.join(dir_subpath, "sync_steering.txt")
labels_filename = os.path.join(dir_subpath, "labels.txt")
try:
ground_truth = np.loadtxt(steerings_filename, usecols=0,
delimiter=',', skiprows=1)
exp_type = 1
except OSError as e:
try:
ground_truth = np.loadtxt(labels_filename, usecols=0)
exp_type = 0
except OSError as e:
print("Neither steerings nor labels found in dir {}".format(
dir_subpath))
raise IOError
image_dir_path = os.path.join(dir_subpath, "images")
for root, _, files in self._recursive_list(image_dir_path):
sorted_files = sorted(files,
key = lambda fname: int(re.search(r'\d+',fname).group()))
for frame_number, fname in enumerate(sorted_files):
is_valid = False
for extension in self.formats:
if fname.lower().endswith('.' + extension):
is_valid = True
break
if is_valid:
absolute_path = os.path.join(root, fname)
self.filenames.append(os.path.relpath(absolute_path,
self.directory))
self.ground_truth.append(ground_truth[frame_number])
self.exp_type.append(exp_type)
self.samples += 1
def next(self):
with self.lock:
index_array = next(self.index_generator)
return self._get_batches_of_transformed_samples(index_array)
def _get_batches_of_transformed_samples(self, index_array) :
current_batch_size = index_array.shape[0]
batch_x = np.zeros((current_batch_size,) + self.image_shape,
dtype=K.floatx())
batch_steer = np.zeros((current_batch_size, 2,),
dtype=K.floatx())
batch_coll = np.zeros((current_batch_size, 2,),
dtype=K.floatx())
grayscale = self.color_mode == 'grayscale'
for i, j in enumerate(index_array):
fname = self.filenames[j]
x = img_utils.load_img(os.path.join(self.directory, fname),
grayscale=grayscale,
crop_size=self.crop_size,
target_size=self.target_size)
x = self.image_data_generator.random_transform(x)
x = self.image_data_generator.standardize(x)
batch_x[i] = x
if self.exp_type[index_array[i]] == 1:
batch_steer[i,0] =1.0
batch_steer[i,1] = self.ground_truth[index_array[i]]
batch_coll[i] = np.array([1.0, 0.0])
else:
batch_steer[i] = np.array([0.0, 0.0])
batch_coll[i,0] = 0.0
batch_coll[i,1] = self.ground_truth[index_array[i]]
batch_y = [batch_steer, batch_coll]
return batch_x, batch_y
def compute_predictions_and_gt(model, generator, steps,
max_q_size=10,
pickle_safe=False, verbose=0):
steps_done = 0
all_outs = []
all_labels = []
all_ts = []
if verbose == 1:
progbar = Progbar(target=steps)
while steps_done < steps:
generator_output = next(generator)
if isinstance(generator_output, tuple):
if len(generator_output) == 2:
x, gt_lab = generator_output
elif len(generator_output) == 3:
x, gt_lab, _ = generator_output
else:
raise ValueError('output of generator should be '
'a tuple `(x, y, sample_weight)` '
'or `(x, y)`. Found: ' +
str(generator_output))
else:
raise ValueError('Output not valid for current evaluation')
start_time = time.time()
outs = model.predict_on_batch(x)
time_diff = time.time() - start_time
print("\n Time Diff: ", time_diff)
print("Batch Size: ", len(x))
print("FPS: ", len(x)/time_diff)
if not isinstance(outs, list):
outs = [outs]
if not isinstance(gt_lab, list):
gt_lab = [gt_lab]
if not all_outs:
for out in outs:
all_outs.append([])
if not all_labels:
for lab in gt_lab:
all_labels.append([])
all_ts.append([])
for i, out in enumerate(outs):
all_outs[i].append(out)
for i, lab in enumerate(gt_lab):
all_labels[i].append(lab[:,1])
all_ts[i].append(lab[:,0])
steps_done += 1
if verbose == 1:
progbar.update(steps_done)
if steps_done == 1:
return [out for out in all_outs], [lab for lab in all_labels], np.concatenate(all_ts[0])
else:
return np.squeeze(np.array([np.concatenate(out) for out in all_outs])).T, \
np.array([np.concatenate(lab) for lab in all_labels]).T, \
np.concatenate(all_ts[0])
def hard_mining_mse(k):
def custom_mse(y_true, y_pred):
t = y_true[:,0]
samples_steer = tf.cast(tf.equal(t,1), tf.int32)
n_samples_steer = tf.reduce_sum(samples_steer)
if n_samples_steer == 0:
return 0.0
else:
pred_steer = tf.squeeze(y_pred, squeeze_dims=-1)
true_steer = y_true[:,1]
l_steer = tf.multiply(t, K.square(pred_steer - true_steer))
k_min = tf.minimum(k, n_samples_steer)
_, indices = tf.nn.top_k(l_steer, k=k_min)
max_l_steer = tf.gather(l_steer, indices)
hard_l_steer = tf.divide(tf.reduce_sum(max_l_steer), tf.cast(k,tf.float32))
return hard_l_steer
return custom_mse
def hard_mining_entropy(k):
def custom_bin_crossentropy(y_true, y_pred):
t = y_true[:,0]
samples_coll = tf.cast(tf.equal(t,0), tf.int32)
n_samples_coll = tf.reduce_sum(samples_coll)
if n_samples_coll == 0:
return 0.0
else:
pred_coll = tf.squeeze(y_pred, squeeze_dims=-1)
true_coll = y_true[:,1]
l_coll = tf.multiply((1-t), K.binary_crossentropy(true_coll, pred_coll))
k_min = tf.minimum(k, n_samples_coll)
_, indices = tf.nn.top_k(l_coll, k=k_min)
max_l_coll = tf.gather(l_coll, indices)
hard_l_coll = tf.divide(tf.reduce_sum(max_l_coll), tf.cast(k, tf.float32))
return hard_l_coll
return custom_bin_crossentropy
def modelToJson(model, json_model_path):
model_json = model.to_json()
with open(json_model_path,"w") as f:
f.write(model_json)
def jsonToModel(json_model_path):
with open(json_model_path, 'r') as json_file:
loaded_model_json = json_file.read()
model = model_from_json(loaded_model_json)
return model
def write_to_file(dictionary, fname):
with open(fname, "w") as f:
json.dump(dictionary,f)
print("Written file {}".format(fname))
| true | true |
1c459f2b6ff309defaa99622a9e67444b25d1a67 | 309 | py | Python | testing_8709/main.py | akvrdata/testing_8709 | b9987a6a14d582a062f08d9de13f9b46f38989b1 | [
"MIT"
] | null | null | null | testing_8709/main.py | akvrdata/testing_8709 | b9987a6a14d582a062f08d9de13f9b46f38989b1 | [
"MIT"
] | null | null | null | testing_8709/main.py | akvrdata/testing_8709 | b9987a6a14d582a062f08d9de13f9b46f38989b1 | [
"MIT"
] | null | null | null | import sys
import click
@click.command()
@click.option('--count',default=1,help='Number of prints required')
@click.option('--name',help='name to print')
def hello(count,name):
'''Click Cli testing'''
for x in range(count):
click.echo('Hello %s' %name)
if __name__ == '__main__':
hello() | 23.769231 | 67 | 0.653722 | import sys
import click
@click.command()
@click.option('--count',default=1,help='Number of prints required')
@click.option('--name',help='name to print')
def hello(count,name):
for x in range(count):
click.echo('Hello %s' %name)
if __name__ == '__main__':
hello() | true | true |
1c459f2e63f5d6cbc44f6b3304bb888e1f9f90a0 | 3,711 | py | Python | bauh/api/http.py | Flash1232/bauh | 6f65556c05ae272c1dbbd557c7f80a606658eb56 | [
"Zlib"
] | 507 | 2019-08-12T16:15:55.000Z | 2022-03-28T15:49:39.000Z | bauh/api/http.py | Flash1232/bauh | 6f65556c05ae272c1dbbd557c7f80a606658eb56 | [
"Zlib"
] | 176 | 2019-08-14T02:35:21.000Z | 2022-03-31T21:43:56.000Z | bauh/api/http.py | Flash1232/bauh | 6f65556c05ae272c1dbbd557c7f80a606658eb56 | [
"Zlib"
] | 57 | 2019-09-02T04:09:22.000Z | 2022-03-21T21:37:16.000Z | import logging
import time
import traceback
from typing import Optional
import requests
import yaml
from bauh.commons import system
class HttpClient:
def __init__(self, logger: logging.Logger, max_attempts: int = 2, timeout: int = 30, sleep: float = 0.5):
self.max_attempts = max_attempts
self.session = requests.Session()
self.timeout = timeout
self.sleep = sleep
self.logger = logger
def get(self, url: str, params: dict = None, headers: dict = None, allow_redirects: bool = True, ignore_ssl: bool = False, single_call: bool = False, session: bool = True) -> Optional[requests.Response]:
cur_attempts = 1
while cur_attempts <= self.max_attempts:
cur_attempts += 1
try:
args = {'timeout': self.timeout, 'allow_redirects': allow_redirects}
if params:
args['params'] = params
if headers:
args['headers'] = headers
if ignore_ssl:
args['verify'] = False
if session:
res = self.session.get(url, **args)
else:
res = requests.get(url, **args)
if res.status_code == 200:
return res
if single_call:
return res
if self.sleep > 0:
time.sleep(self.sleep)
except Exception as e:
if isinstance(e, requests.exceptions.ConnectionError):
self.logger.error('Internet seems to be off')
raise
self.logger.error("Could not retrieve data from '{}'".format(url))
traceback.print_exc()
continue
self.logger.warning("Could not retrieve data from '{}'".format(url))
def get_json(self, url: str, params: dict = None, headers: dict = None, allow_redirects: bool = True, session: bool = True):
res = self.get(url, params=params, headers=headers, allow_redirects=allow_redirects, session=session)
return res.json() if res else None
def get_yaml(self, url: str, params: dict = None, headers: dict = None, allow_redirects: bool = True, session: bool = True):
res = self.get(url, params=params, headers=headers, allow_redirects=allow_redirects, session=session)
return yaml.safe_load(res.text) if res else None
def get_content_length_in_bytes(self, url: str, session: bool = True) -> Optional[int]:
params = {'url': url, 'allow_redirects': True, 'stream': True}
try:
if session:
res = self.session.get(**params)
else:
res = requests.get(**params)
except requests.exceptions.ConnectionError:
self.logger.info("Internet seems to be off. Could not reach '{}'".format(url))
return
if res.status_code == 200:
size = res.headers.get('Content-Length')
if size:
try:
return int(size)
except:
pass
def get_content_length(self, url: str, session: bool = True) -> Optional[str]:
size = self.get_content_length_in_bytes(url, session)
if size:
return system.get_human_size_str(size)
def exists(self, url: str, session: bool = True, timeout: int = 5) -> bool:
params = {'url': url, 'allow_redirects': True, 'verify': False, 'timeout': timeout}
if session:
res = self.session.head(**params)
else:
res = self.session.get(**params)
return res.status_code in (200, 403)
| 35.009434 | 207 | 0.565346 | import logging
import time
import traceback
from typing import Optional
import requests
import yaml
from bauh.commons import system
class HttpClient:
def __init__(self, logger: logging.Logger, max_attempts: int = 2, timeout: int = 30, sleep: float = 0.5):
self.max_attempts = max_attempts
self.session = requests.Session()
self.timeout = timeout
self.sleep = sleep
self.logger = logger
def get(self, url: str, params: dict = None, headers: dict = None, allow_redirects: bool = True, ignore_ssl: bool = False, single_call: bool = False, session: bool = True) -> Optional[requests.Response]:
cur_attempts = 1
while cur_attempts <= self.max_attempts:
cur_attempts += 1
try:
args = {'timeout': self.timeout, 'allow_redirects': allow_redirects}
if params:
args['params'] = params
if headers:
args['headers'] = headers
if ignore_ssl:
args['verify'] = False
if session:
res = self.session.get(url, **args)
else:
res = requests.get(url, **args)
if res.status_code == 200:
return res
if single_call:
return res
if self.sleep > 0:
time.sleep(self.sleep)
except Exception as e:
if isinstance(e, requests.exceptions.ConnectionError):
self.logger.error('Internet seems to be off')
raise
self.logger.error("Could not retrieve data from '{}'".format(url))
traceback.print_exc()
continue
self.logger.warning("Could not retrieve data from '{}'".format(url))
def get_json(self, url: str, params: dict = None, headers: dict = None, allow_redirects: bool = True, session: bool = True):
res = self.get(url, params=params, headers=headers, allow_redirects=allow_redirects, session=session)
return res.json() if res else None
def get_yaml(self, url: str, params: dict = None, headers: dict = None, allow_redirects: bool = True, session: bool = True):
res = self.get(url, params=params, headers=headers, allow_redirects=allow_redirects, session=session)
return yaml.safe_load(res.text) if res else None
def get_content_length_in_bytes(self, url: str, session: bool = True) -> Optional[int]:
params = {'url': url, 'allow_redirects': True, 'stream': True}
try:
if session:
res = self.session.get(**params)
else:
res = requests.get(**params)
except requests.exceptions.ConnectionError:
self.logger.info("Internet seems to be off. Could not reach '{}'".format(url))
return
if res.status_code == 200:
size = res.headers.get('Content-Length')
if size:
try:
return int(size)
except:
pass
def get_content_length(self, url: str, session: bool = True) -> Optional[str]:
size = self.get_content_length_in_bytes(url, session)
if size:
return system.get_human_size_str(size)
def exists(self, url: str, session: bool = True, timeout: int = 5) -> bool:
params = {'url': url, 'allow_redirects': True, 'verify': False, 'timeout': timeout}
if session:
res = self.session.head(**params)
else:
res = self.session.get(**params)
return res.status_code in (200, 403)
| true | true |
1c459fbfb4d5f376b961ba213a2581525628f906 | 398 | py | Python | accounts/migrations/0002_account_points.py | ebar0n/palermo-coin | 63dc14fce31fbeae50ec7ebf5ea97efbb1ec18fd | [
"MIT"
] | null | null | null | accounts/migrations/0002_account_points.py | ebar0n/palermo-coin | 63dc14fce31fbeae50ec7ebf5ea97efbb1ec18fd | [
"MIT"
] | 15 | 2019-05-13T23:40:06.000Z | 2022-03-11T23:39:57.000Z | accounts/migrations/0002_account_points.py | ebar0n/leviatan-backend | 63dc14fce31fbeae50ec7ebf5ea97efbb1ec18fd | [
"MIT"
] | null | null | null | # Generated by Django 2.1.7 on 2019-03-07 07:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='account',
name='points',
field=models.PositiveIntegerField(default=0, editable=False),
),
]
| 20.947368 | 73 | 0.603015 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='account',
name='points',
field=models.PositiveIntegerField(default=0, editable=False),
),
]
| true | true |
1c45a0ca7dda5396a87bdbca7a0a71105cce95b6 | 1,359 | py | Python | python2/timeout.py | SLongofono/Python-Misc | c6c2735f65b7f06e31996140c2921315b1a6cf9e | [
"MIT"
] | 2 | 2017-07-24T17:46:13.000Z | 2017-12-09T16:00:40.000Z | python2/timeout.py | SLongofono/Python-Misc | c6c2735f65b7f06e31996140c2921315b1a6cf9e | [
"MIT"
] | null | null | null | python2/timeout.py | SLongofono/Python-Misc | c6c2735f65b7f06e31996140c2921315b1a6cf9e | [
"MIT"
] | 1 | 2018-09-18T15:18:47.000Z | 2018-09-18T15:18:47.000Z | def timed_func(f, args=(), kwargs=None, timeout=30, default=None, errormsg="Timeout error"):
# Since kwargs are mutable, assume they don't exist via optional arguments. If they do in fact exist,
# they will exist in this context and be assigned. Otherwise, set to an empty dict and proceed.
kwargs = kwargs or {}
import signal
class TimeoutError(Exception):
pass
def timeout_handler(signum, frame):
raise TimeoutError
# Register a signal to our handler
signal.signal(signal.SIGALRM, timeout_handler)
# Trigger an alarm after timeout seconds
signal.alarm(timeout)
# Try a function call:
# If it returns normally before the timeout, pass along the value
# Otherwise, print the specific error and return the default value
try:
result = f(*args, **kwargs)
except TimeoutError:
result = default
print(errormsg)
finally:
signal.alarm(0)
return result
# Silly function that never returns
def forever():
import time
while True:
time.sleep(1)
# Function that may or may not complete depending on the timeout
def andever(a,b):
result = a
while True:
result += b
#if result > 200000000:
if result > 100000000:
return result
# Test
print(timed_func(forever, timeout=2, default="no response", errormsg="failed to update"))
print(timed_func(andever, (1,2), timeout=5, default=-1, errormsg="computation timeout"))
| 28.3125 | 103 | 0.734364 | def timed_func(f, args=(), kwargs=None, timeout=30, default=None, errormsg="Timeout error"):
# they will exist in this context and be assigned. Otherwise, set to an empty dict and proceed.
kwargs = kwargs or {}
import signal
class TimeoutError(Exception):
pass
def timeout_handler(signum, frame):
raise TimeoutError
# Register a signal to our handler
signal.signal(signal.SIGALRM, timeout_handler)
# Trigger an alarm after timeout seconds
signal.alarm(timeout)
# Try a function call:
# If it returns normally before the timeout, pass along the value
# Otherwise, print the specific error and return the default value
try:
result = f(*args, **kwargs)
except TimeoutError:
result = default
print(errormsg)
finally:
signal.alarm(0)
return result
# Silly function that never returns
def forever():
import time
while True:
time.sleep(1)
# Function that may or may not complete depending on the timeout
def andever(a,b):
result = a
while True:
result += b
#if result > 200000000:
if result > 100000000:
return result
# Test
print(timed_func(forever, timeout=2, default="no response", errormsg="failed to update"))
print(timed_func(andever, (1,2), timeout=5, default=-1, errormsg="computation timeout"))
| true | true |
1c45a0f0a16e4c957d53072ae53309de03cc22ef | 6,090 | py | Python | docs/conf.py | open-datastudio/datastudio | 5055579adf969ad6d7491454b30ab2fedbaaa067 | [
"MIT"
] | 10 | 2020-06-23T13:45:44.000Z | 2021-11-04T13:31:43.000Z | docs/conf.py | open-datastudio/datastudio | 5055579adf969ad6d7491454b30ab2fedbaaa067 | [
"MIT"
] | 1 | 2020-06-23T23:15:10.000Z | 2020-08-11T04:41:25.000Z | docs/conf.py | open-datastudio/datastudio | 5055579adf969ad6d7491454b30ab2fedbaaa067 | [
"MIT"
] | 2 | 2021-11-20T21:24:36.000Z | 2022-01-05T03:35:32.000Z | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = u'Open Data Studio'
copyright = u'Open Data Studio Authors'
author = u'Open Data Studio Authors'
# The short X.Y version
version = u''
# The full version, including alpha/beta/rc tags
release = u''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autosectionlabel',
'aafigure.sphinxext'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
html_logo = '_static/open-datastudio-logo.svg'
# Enable link of 'View page source'
#html_show_sourcelink = False
# Add 'Edit on Github' link instead of 'View page source'
# reference:https://docs.readthedocs.io/en/latest/vcs.html
html_context = {
# Enable the "Edit in GitHub link within the header of each page.
'display_github': True,
# Set the following variables to generate the resulting github URL for each page.
# Format Template: https://{{ github_host|default("github.com") }}/{{ github_user }}
#/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}{{ suffix }}
#https://github.com/runawayhorse001/SphinxGithub/blob/master/doc/index.rst
'github_user': 'open-datastudio',
'github_repo': 'datastudio',
'github_version': 'master/docs/',
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'OpenDataStudioDoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'OPENDATASTUDIO.tex', u'Open Data Studio Documentation',
u'Open Data Studio', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'open data studio', u'Open Data Studio Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Open Data Studio', u'Open Data Studio Documentation',
author, 'Open Data Studio', 'Cloud data tools',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| 31.71875 | 93 | 0.663054 |
project = u'Open Data Studio'
copyright = u'Open Data Studio Authors'
author = u'Open Data Studio Authors'
version = u''
release = u''
extensions = [
'sphinx.ext.autosectionlabel',
'aafigure.sphinxext'
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = None
exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store']
pygments_style = None
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
html_logo = '_static/open-datastudio-logo.svg'
# Enable link of 'View page source'
#html_show_sourcelink = False
# Add 'Edit on Github' link instead of 'View page source'
# reference:https://docs.readthedocs.io/en/latest/vcs.html
html_context = {
# Enable the "Edit in GitHub link within the header of each page.
'display_github': True,
# Set the following variables to generate the resulting github URL for each page.
# Format Template: https://{{ github_host|default("github.com") }}/{{ github_user }}
#/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}{{ suffix }}
#https://github.com/runawayhorse001/SphinxGithub/blob/master/doc/index.rst
'github_user': 'open-datastudio',
'github_repo': 'datastudio',
'github_version': 'master/docs/',
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'OpenDataStudioDoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'OPENDATASTUDIO.tex', u'Open Data Studio Documentation',
u'Open Data Studio', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'open data studio', u'Open Data Studio Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Open Data Studio', u'Open Data Studio Documentation',
author, 'Open Data Studio', 'Cloud data tools',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| true | true |
1c45a10a9ddde743dce9b343e4d18f568bb05e72 | 3,531 | py | Python | python/paddle/fluid/tests/unittests/dist_mnist.py | hshen14/Paddle | 0962be9c800d29e0804fc3135163bdfba1564c61 | [
"Apache-2.0"
] | 2 | 2019-04-03T05:36:17.000Z | 2020-04-29T03:38:54.000Z | python/paddle/fluid/tests/unittests/dist_mnist.py | hshen14/Paddle | 0962be9c800d29e0804fc3135163bdfba1564c61 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/dist_mnist.py | hshen14/Paddle | 0962be9c800d29e0804fc3135163bdfba1564c61 | [
"Apache-2.0"
] | 3 | 2019-01-07T06:50:29.000Z | 2019-03-13T08:48:23.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import argparse
import time
import math
import paddle
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
from paddle.fluid import core
import unittest
from multiprocessing import Process
import os
import signal
from functools import reduce
from test_dist_base import TestDistRunnerBase, runtime_main
DTYPE = "float32"
paddle.dataset.mnist.fetch()
# Fix seed for test
fluid.default_startup_program().random_seed = 1
fluid.default_main_program().random_seed = 1
def cnn_model(data):
conv_pool_1 = fluid.nets.simple_img_conv_pool(
input=data,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu",
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(
value=0.01)))
conv_pool_2 = fluid.nets.simple_img_conv_pool(
input=conv_pool_1,
filter_size=5,
num_filters=50,
pool_size=2,
pool_stride=2,
act="relu",
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(
value=0.01)))
SIZE = 10
input_shape = conv_pool_2.shape
param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE]
scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5
predict = fluid.layers.fc(
input=conv_pool_2,
size=SIZE,
act="softmax",
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01)))
return predict
class TestDistMnist2x2(TestDistRunnerBase):
def get_model(self, batch_size=2):
# Input data
images = fluid.layers.data(name='pixel', shape=[1, 28, 28], dtype=DTYPE)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
# Train program
predict = cnn_model(images)
cost = fluid.layers.cross_entropy(input=predict, label=label)
avg_cost = fluid.layers.mean(x=cost)
# Evaluator
batch_size_tensor = fluid.layers.create_tensor(dtype='int64')
batch_acc = fluid.layers.accuracy(
input=predict, label=label, total=batch_size_tensor)
inference_program = fluid.default_main_program().clone()
# Optimization
# TODO(typhoonzero): fix distributed adam optimizer
# opt = fluid.optimizer.AdamOptimizer(
# learning_rate=0.001, beta1=0.9, beta2=0.999)
opt = fluid.optimizer.Momentum(learning_rate=self.lr, momentum=0.9)
# Reader
train_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
test_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
opt.minimize(avg_cost)
return inference_program, avg_cost, train_reader, test_reader, batch_acc, predict
if __name__ == "__main__":
runtime_main(TestDistMnist2x2)
| 32.394495 | 89 | 0.687624 |
from __future__ import print_function
import numpy as np
import argparse
import time
import math
import paddle
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
from paddle.fluid import core
import unittest
from multiprocessing import Process
import os
import signal
from functools import reduce
from test_dist_base import TestDistRunnerBase, runtime_main
DTYPE = "float32"
paddle.dataset.mnist.fetch()
fluid.default_startup_program().random_seed = 1
fluid.default_main_program().random_seed = 1
def cnn_model(data):
conv_pool_1 = fluid.nets.simple_img_conv_pool(
input=data,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu",
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(
value=0.01)))
conv_pool_2 = fluid.nets.simple_img_conv_pool(
input=conv_pool_1,
filter_size=5,
num_filters=50,
pool_size=2,
pool_stride=2,
act="relu",
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(
value=0.01)))
SIZE = 10
input_shape = conv_pool_2.shape
param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE]
scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5
predict = fluid.layers.fc(
input=conv_pool_2,
size=SIZE,
act="softmax",
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.Constant(value=0.01)))
return predict
class TestDistMnist2x2(TestDistRunnerBase):
def get_model(self, batch_size=2):
images = fluid.layers.data(name='pixel', shape=[1, 28, 28], dtype=DTYPE)
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
predict = cnn_model(images)
cost = fluid.layers.cross_entropy(input=predict, label=label)
avg_cost = fluid.layers.mean(x=cost)
batch_size_tensor = fluid.layers.create_tensor(dtype='int64')
batch_acc = fluid.layers.accuracy(
input=predict, label=label, total=batch_size_tensor)
inference_program = fluid.default_main_program().clone()
opt = fluid.optimizer.Momentum(learning_rate=self.lr, momentum=0.9)
train_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
test_reader = paddle.batch(
paddle.dataset.mnist.test(), batch_size=batch_size)
opt.minimize(avg_cost)
return inference_program, avg_cost, train_reader, test_reader, batch_acc, predict
if __name__ == "__main__":
runtime_main(TestDistMnist2x2)
| true | true |
1c45a12fb0bf22d70b2259e645866d62d1c2fa9f | 5,240 | py | Python | tests/test_cli.py | steffenschumacher/NIPAP | 200ec08ce02ba9f782b276510bc7bb23b20d7570 | [
"MIT"
] | 1 | 2018-12-07T15:59:27.000Z | 2018-12-07T15:59:27.000Z | tests/test_cli.py | steffenschumacher/NIPAP | 200ec08ce02ba9f782b276510bc7bb23b20d7570 | [
"MIT"
] | 1 | 2021-07-24T14:44:10.000Z | 2021-07-24T14:44:10.000Z | tests/test_cli.py | steffenschumacher/NIPAP | 200ec08ce02ba9f782b276510bc7bb23b20d7570 | [
"MIT"
] | 1 | 2020-05-27T15:28:03.000Z | 2020-05-27T15:28:03.000Z | #!/usr/bin/env python
# vim: et :
import logging
import unittest
import sys
sys.path.append('../nipap/')
from nipap.backend import Nipap
from nipap.authlib import SqliteAuth
from nipap.nipapconfig import NipapConfig
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
log_format = "%(levelname)-8s %(message)s"
import xmlrpclib
server_url = "http://unittest:gottatest@127.0.0.1:1337/XMLRPC"
s = xmlrpclib.Server(server_url, allow_none=1);
ad = { 'authoritative_source': 'nipap' }
nipap_bin = '../nipap-cli/nipap'
class NipapCliTest(unittest.TestCase):
""" Tests the NIPAP CLI
We presume the database is empty
"""
maxDiff = None
logger = None
cfg = None
nipap = None
def setUp(self):
# logging
self.logger = logging.getLogger(self.__class__.__name__)
# NIPAP
self.cfg = NipapConfig('/etc/nipap/nipap.conf')
self.nipap = Nipap()
# create dummy auth object
# As the authentication is performed before the query hits the Nipap
# class, it does not matter what user we use here
self.auth = SqliteAuth('local', 'unittest', 'unittest', 'unittest')
self.auth.authenticated_as = 'unittest'
self.auth.full_name = 'Unit test'
# have to delete hosts before we can delete the rest
self.nipap._execute("DELETE FROM ip_net_plan WHERE masklen(prefix) = 32")
# the rest
self.nipap._execute("DELETE FROM ip_net_plan")
# delete all except for the default VRF with id 0
self.nipap._execute("DELETE FROM ip_net_vrf WHERE id > 0")
# set default info for VRF 0
self.nipap._execute("UPDATE ip_net_vrf SET name = 'default', description = 'The default VRF, typically the Internet.' WHERE id = 0")
self.nipap._execute("DELETE FROM ip_net_pool")
self.nipap._execute("DELETE FROM ip_net_asn")
def _mangle_prefix_result(self, res):
""" Mangle prefix result for easier testing
We can never predict the values of things like the ID (okay, that
one is actually kind of doable) or the added and last_modified
timestamp. This function will make sure the values are present but
then strip them to make it easier to test against an expected
result.
"""
if isinstance(res, list):
# res from list_prefix
for p in res:
self.assertIn('added', p)
self.assertIn('last_modified', p)
del(p['added'])
del(p['last_modified'])
del(p['total_addresses'])
del(p['used_addresses'])
del(p['free_addresses'])
elif isinstance(res, dict) and 'result' in res:
# res from smart search
for p in res['result']:
self.assertIn('added', p)
self.assertIn('last_modified', p)
del(p['added'])
del(p['last_modified'])
del(p['total_addresses'])
del(p['used_addresses'])
del(p['free_addresses'])
elif isinstance(res, dict):
# just one single prefix
self.assertIn('added', p)
self.assertIn('last_modified', p)
del(p['added'])
del(p['last_modified'])
del(res['total_addresses'])
del(res['used_addresses'])
del(res['free_addresses'])
return res
def _run_cmd(self, cmd):
""" Run a command
"""
import subprocess
return subprocess.check_output(cmd)
def test_prefix_add_list(self):
""" Add a prefix and verify result in database
"""
ref = {
'prefix': '1.3.3.0/24',
'type': 'assignment',
'status': 'assigned',
'description': 'foo description',
'comment': 'comment bar',
'country': 'AB',
'alarm_priority': 'high',
'monitor': 'true',
'order_id': '123',
'customer_id': '66'
}
cmd = [nipap_bin, 'address', 'add']
for key in ref:
cmd.append(key)
cmd.append(ref[key])
ref['display_prefix'] = '1.3.3.0/24'
ref['indent'] = 0
ref['family'] = 4
ref['monitor'] = True
ref['pool_id'] = None
ref['pool_name'] = None
ref['vrf_id'] = 0
ref['vrf_name'] = 'default'
ref['vrf_rt'] = None
ref['external_key'] = None
ref['node'] = None
ref['authoritative_source'] = 'nipap'
ref['vlan'] = None
ref['inherited_tags'] = []
ref['tags'] = []
ref['avps'] = {}
ref['expires'] = None
self._run_cmd(cmd)
res = self._mangle_prefix_result(s.list_prefix({ 'auth': ad, 'spec': {} }))
del(res[0]['id'])
self.assertEqual(res, [ ref, ])
if __name__ == '__main__':
# set up logging
log = logging.getLogger()
logging.basicConfig()
log.setLevel(logging.INFO)
if sys.version_info >= (2,7):
unittest.main(verbosity=2)
else:
unittest.main()
| 29.438202 | 140 | 0.55687 |
import logging
import unittest
import sys
sys.path.append('../nipap/')
from nipap.backend import Nipap
from nipap.authlib import SqliteAuth
from nipap.nipapconfig import NipapConfig
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
log_format = "%(levelname)-8s %(message)s"
import xmlrpclib
server_url = "http://unittest:gottatest@127.0.0.1:1337/XMLRPC"
s = xmlrpclib.Server(server_url, allow_none=1);
ad = { 'authoritative_source': 'nipap' }
nipap_bin = '../nipap-cli/nipap'
class NipapCliTest(unittest.TestCase):
maxDiff = None
logger = None
cfg = None
nipap = None
def setUp(self):
self.logger = logging.getLogger(self.__class__.__name__)
self.cfg = NipapConfig('/etc/nipap/nipap.conf')
self.nipap = Nipap()
self.auth = SqliteAuth('local', 'unittest', 'unittest', 'unittest')
self.auth.authenticated_as = 'unittest'
self.auth.full_name = 'Unit test'
self.nipap._execute("DELETE FROM ip_net_plan WHERE masklen(prefix) = 32")
self.nipap._execute("DELETE FROM ip_net_plan")
self.nipap._execute("DELETE FROM ip_net_vrf WHERE id > 0")
self.nipap._execute("UPDATE ip_net_vrf SET name = 'default', description = 'The default VRF, typically the Internet.' WHERE id = 0")
self.nipap._execute("DELETE FROM ip_net_pool")
self.nipap._execute("DELETE FROM ip_net_asn")
def _mangle_prefix_result(self, res):
if isinstance(res, list):
for p in res:
self.assertIn('added', p)
self.assertIn('last_modified', p)
del(p['added'])
del(p['last_modified'])
del(p['total_addresses'])
del(p['used_addresses'])
del(p['free_addresses'])
elif isinstance(res, dict) and 'result' in res:
for p in res['result']:
self.assertIn('added', p)
self.assertIn('last_modified', p)
del(p['added'])
del(p['last_modified'])
del(p['total_addresses'])
del(p['used_addresses'])
del(p['free_addresses'])
elif isinstance(res, dict):
self.assertIn('added', p)
self.assertIn('last_modified', p)
del(p['added'])
del(p['last_modified'])
del(res['total_addresses'])
del(res['used_addresses'])
del(res['free_addresses'])
return res
def _run_cmd(self, cmd):
import subprocess
return subprocess.check_output(cmd)
def test_prefix_add_list(self):
ref = {
'prefix': '1.3.3.0/24',
'type': 'assignment',
'status': 'assigned',
'description': 'foo description',
'comment': 'comment bar',
'country': 'AB',
'alarm_priority': 'high',
'monitor': 'true',
'order_id': '123',
'customer_id': '66'
}
cmd = [nipap_bin, 'address', 'add']
for key in ref:
cmd.append(key)
cmd.append(ref[key])
ref['display_prefix'] = '1.3.3.0/24'
ref['indent'] = 0
ref['family'] = 4
ref['monitor'] = True
ref['pool_id'] = None
ref['pool_name'] = None
ref['vrf_id'] = 0
ref['vrf_name'] = 'default'
ref['vrf_rt'] = None
ref['external_key'] = None
ref['node'] = None
ref['authoritative_source'] = 'nipap'
ref['vlan'] = None
ref['inherited_tags'] = []
ref['tags'] = []
ref['avps'] = {}
ref['expires'] = None
self._run_cmd(cmd)
res = self._mangle_prefix_result(s.list_prefix({ 'auth': ad, 'spec': {} }))
del(res[0]['id'])
self.assertEqual(res, [ ref, ])
if __name__ == '__main__':
log = logging.getLogger()
logging.basicConfig()
log.setLevel(logging.INFO)
if sys.version_info >= (2,7):
unittest.main(verbosity=2)
else:
unittest.main()
| true | true |
1c45a18a21fd6fbd0b288b2271b398a0ed9f080d | 12,540 | py | Python | napari/_qt/widgets/qt_viewer_dock_widget.py | Mishrasubha/napari | c4d1038fc3ed30dc228949cbdedf12826ec2efc2 | [
"BSD-3-Clause"
] | null | null | null | napari/_qt/widgets/qt_viewer_dock_widget.py | Mishrasubha/napari | c4d1038fc3ed30dc228949cbdedf12826ec2efc2 | [
"BSD-3-Clause"
] | 3 | 2020-11-14T08:35:18.000Z | 2021-07-26T10:06:32.000Z | napari/_qt/widgets/qt_viewer_dock_widget.py | Mishrasubha/napari | c4d1038fc3ed30dc228949cbdedf12826ec2efc2 | [
"BSD-3-Clause"
] | null | null | null | import warnings
from functools import reduce
from itertools import count
from operator import ior
from typing import List, Optional
from qtpy.QtCore import Qt
from qtpy.QtWidgets import (
QDockWidget,
QFrame,
QHBoxLayout,
QLabel,
QPushButton,
QSizePolicy,
QVBoxLayout,
QWidget,
)
from ...utils.translations import trans
from ..utils import combine_widgets, qt_signals_blocked
counter = count()
_sentinel = object()
_SHORTCUT_DEPRECATION_STRING = trans._(
'The shortcut parameter is deprecated since version 0.4.8, please use the action and shortcut manager APIs. The new action manager and shortcut API allow user configuration and localisation. (got {shortcut})',
shortcut="{shortcut}",
)
class QtViewerDockWidget(QDockWidget):
"""Wrap a QWidget in a QDockWidget and forward viewer events
Parameters
----------
qt_viewer : QtViewer
The QtViewer instance that this dock widget will belong to.
widget : QWidget
`widget` that will be added as QDockWidget's main widget.
name : str
Name of dock widget.
area : str
Side of the main window to which the new dock widget will be added.
Must be in {'left', 'right', 'top', 'bottom'}
allowed_areas : list[str], optional
Areas, relative to main window, that the widget is allowed dock.
Each item in list must be in {'left', 'right', 'top', 'bottom'}
By default, all areas are allowed.
shortcut : str, optional
Keyboard shortcut to appear in dropdown menu.
.. deprecated:: 0.4.8
The shortcut parameter is deprecated since version 0.4.8, please use
the action and shortcut manager APIs. The new action manager and
shortcut API allow user configuration and localisation.
add_vertical_stretch : bool, optional
Whether to add stretch to the bottom of vertical widgets (pushing
widgets up towards the top of the allotted area, instead of letting
them distribute across the vertical space). By default, True.
"""
def __init__(
self,
qt_viewer,
widget: QWidget,
*,
name: str = '',
area: str = 'right',
allowed_areas: Optional[List[str]] = None,
shortcut=_sentinel,
object_name: str = '',
add_vertical_stretch=True,
):
self.qt_viewer = qt_viewer
super().__init__(name)
self._parent = qt_viewer
self.name = name
areas = {
'left': Qt.LeftDockWidgetArea,
'right': Qt.RightDockWidgetArea,
'top': Qt.TopDockWidgetArea,
'bottom': Qt.BottomDockWidgetArea,
}
if area not in areas:
raise ValueError(
trans._(
'area argument must be in {areas}',
deferred=True,
areas=list(areas.keys()),
)
)
self.area = area
self.qt_area = areas[area]
if shortcut is not _sentinel:
warnings.warn(
_SHORTCUT_DEPRECATION_STRING.format(shortcut=shortcut),
FutureWarning,
stacklevel=2,
)
else:
shortcut = None
self._shortcut = shortcut
if allowed_areas:
if not isinstance(allowed_areas, (list, tuple)):
raise TypeError(
trans._(
'`allowed_areas` must be a list or tuple',
deferred=True,
)
)
if any(area not in areas for area in allowed_areas):
raise ValueError(
trans._(
'all allowed_areas argument must be in {areas}',
deferred=True,
areas=list(areas.keys()),
)
)
allowed_areas = reduce(ior, [areas[a] for a in allowed_areas])
else:
allowed_areas = Qt.AllDockWidgetAreas
self.setAllowedAreas(allowed_areas)
self.setMinimumHeight(50)
self.setMinimumWidth(50)
# FIXME:
self.setObjectName(object_name or name)
is_vertical = area in {'left', 'right'}
widget = combine_widgets(widget, vertical=is_vertical)
self.setWidget(widget)
if is_vertical and add_vertical_stretch:
self._maybe_add_vertical_stretch(widget)
self._features = self.features()
self.dockLocationChanged.connect(self._set_title_orientation)
# custom title bar
self.title = QtCustomTitleBar(self, title=self.name)
self.setTitleBarWidget(self.title)
self.visibilityChanged.connect(self._on_visibility_changed)
def destroyOnClose(self):
"""Destroys dock plugin dock widget when 'x' is clicked."""
self.qt_viewer.viewer.window.remove_dock_widget(self)
def _maybe_add_vertical_stretch(self, widget):
"""Add vertical stretch to the bottom of a vertical layout only
...if there is not already a widget that wants vertical space
(like a textedit or listwidget or something).
"""
exempt_policies = {
QSizePolicy.Expanding,
QSizePolicy.MinimumExpanding,
QSizePolicy.Ignored,
}
if widget.sizePolicy().verticalPolicy() in exempt_policies:
return
# not uncommon to see people shadow the builtin layout() method
# which breaks our ability to add vertical stretch...
try:
wlayout = widget.layout()
if wlayout is None:
return
except TypeError:
return
for i in range(wlayout.count()):
wdg = wlayout.itemAt(i).widget()
if (
wdg is not None
and wdg.sizePolicy().verticalPolicy() in exempt_policies
):
return
# not all widgets have addStretch...
if hasattr(wlayout, 'addStretch'):
wlayout.addStretch(next(counter))
@property
def shortcut(self):
warnings.warn(
_SHORTCUT_DEPRECATION_STRING,
FutureWarning,
stacklevel=2,
)
return self._shortcut
def setFeatures(self, features):
super().setFeatures(features)
self._features = self.features()
def keyPressEvent(self, event):
# if you subclass QtViewerDockWidget and override the keyPressEvent
# method, be sure to call super().keyPressEvent(event) at the end of
# your method to pass uncaught key-combinations to the viewer.
return self.qt_viewer.keyPressEvent(event)
def _set_title_orientation(self, area):
if area in (Qt.LeftDockWidgetArea, Qt.RightDockWidgetArea):
features = self._features
if features & self.DockWidgetVerticalTitleBar:
features = features ^ self.DockWidgetVerticalTitleBar
else:
features = self._features | self.DockWidgetVerticalTitleBar
self.setFeatures(features)
@property
def is_vertical(self):
if not self.isFloating():
par = self.parent()
if par and hasattr(par, 'dockWidgetArea'):
return par.dockWidgetArea(self) in (
Qt.LeftDockWidgetArea,
Qt.RightDockWidgetArea,
)
return self.size().height() > self.size().width()
def _on_visibility_changed(self, visible):
try:
actions = [
action.text()
for action in self.qt_viewer.viewer.window.plugins_menu.actions()
]
idx = actions.index(self.name)
current_action = (
self.qt_viewer.viewer.window.plugins_menu.actions()[idx]
)
current_action.setChecked(visible)
self.setVisible(visible)
except (AttributeError, ValueError):
# AttributeError: This error happens when the plugins menu is not yet built.
# ValueError: This error is when the action is from the windows menu.
pass
if not visible:
return
with qt_signals_blocked(self):
self.setTitleBarWidget(None)
if not self.isFloating():
self.title = QtCustomTitleBar(
self, title=self.name, vertical=not self.is_vertical
)
self.setTitleBarWidget(self.title)
def setWidget(self, widget):
widget._parent = self
super().setWidget(widget)
class QtCustomTitleBar(QLabel):
"""A widget to be used as the titleBar in the QtViewerDockWidget.
Keeps vertical size minimal, has a hand cursor and styles (in stylesheet)
for hover. Close and float buttons.
Parameters
----------
parent : QDockWidget
The QtViewerDockWidget to which this titlebar belongs
title : str
A string to put in the titlebar.
vertical : bool
Whether this titlebar is oriented vertically or not.
"""
def __init__(self, parent, title: str = '', vertical=False):
super().__init__(parent)
self.setObjectName("QtCustomTitleBar")
self.setProperty('vertical', str(vertical))
self.vertical = vertical
self.setToolTip(trans._('drag to move. double-click to float'))
line = QFrame(self)
line.setObjectName("QtCustomTitleBarLine")
add_close = False
try:
# if the plugins menu is already created, check to see if this is a plugin
# dock widget. If it is, then add the close button option to the title bar.
actions = [
action.text()
for action in self.parent().qt_viewer.viewer.window.plugins_menu.actions()
]
if self.parent().name in actions:
add_close = True
self.close_button = QPushButton(self)
self.close_button.setToolTip(trans._('close this panel'))
self.close_button.setObjectName("QTitleBarCloseButton")
self.close_button.setCursor(Qt.ArrowCursor)
self.close_button.clicked.connect(
lambda: self.parent().destroyOnClose()
)
else:
add_close = False
except AttributeError:
pass
self.hide_button = QPushButton(self)
self.hide_button.setToolTip(trans._('hide this panel'))
self.hide_button.setObjectName("QTitleBarHideButton")
self.hide_button.setCursor(Qt.ArrowCursor)
self.hide_button.clicked.connect(lambda: self.parent().close())
self.float_button = QPushButton(self)
self.float_button.setToolTip(trans._('float this panel'))
self.float_button.setObjectName("QTitleBarFloatButton")
self.float_button.setCursor(Qt.ArrowCursor)
self.float_button.clicked.connect(
lambda: self.parent().setFloating(not self.parent().isFloating())
)
self.title = QLabel(title, self)
self.title.setSizePolicy(
QSizePolicy(QSizePolicy.Policy.Maximum, QSizePolicy.Policy.Maximum)
)
if vertical:
layout = QVBoxLayout()
layout.setSpacing(4)
layout.setContentsMargins(0, 8, 0, 8)
line.setFixedWidth(1)
if add_close:
layout.addWidget(self.close_button, 0, Qt.AlignHCenter)
layout.addWidget(self.hide_button, 0, Qt.AlignHCenter)
layout.addWidget(self.float_button, 0, Qt.AlignHCenter)
layout.addWidget(line, 0, Qt.AlignHCenter)
self.title.hide()
else:
layout = QHBoxLayout()
layout.setSpacing(4)
layout.setContentsMargins(8, 1, 8, 0)
line.setFixedHeight(1)
if add_close:
layout.addWidget(self.close_button)
layout.addWidget(self.hide_button)
layout.addWidget(self.float_button)
layout.addWidget(line)
layout.addWidget(self.title)
self.setLayout(layout)
self.setCursor(Qt.OpenHandCursor)
def sizeHint(self):
# this seems to be the correct way to set the height of the titlebar
szh = super().sizeHint()
if self.vertical:
szh.setWidth(20)
else:
szh.setHeight(20)
return szh
| 35.12605 | 213 | 0.596332 | import warnings
from functools import reduce
from itertools import count
from operator import ior
from typing import List, Optional
from qtpy.QtCore import Qt
from qtpy.QtWidgets import (
QDockWidget,
QFrame,
QHBoxLayout,
QLabel,
QPushButton,
QSizePolicy,
QVBoxLayout,
QWidget,
)
from ...utils.translations import trans
from ..utils import combine_widgets, qt_signals_blocked
counter = count()
_sentinel = object()
_SHORTCUT_DEPRECATION_STRING = trans._(
'The shortcut parameter is deprecated since version 0.4.8, please use the action and shortcut manager APIs. The new action manager and shortcut API allow user configuration and localisation. (got {shortcut})',
shortcut="{shortcut}",
)
class QtViewerDockWidget(QDockWidget):
def __init__(
self,
qt_viewer,
widget: QWidget,
*,
name: str = '',
area: str = 'right',
allowed_areas: Optional[List[str]] = None,
shortcut=_sentinel,
object_name: str = '',
add_vertical_stretch=True,
):
self.qt_viewer = qt_viewer
super().__init__(name)
self._parent = qt_viewer
self.name = name
areas = {
'left': Qt.LeftDockWidgetArea,
'right': Qt.RightDockWidgetArea,
'top': Qt.TopDockWidgetArea,
'bottom': Qt.BottomDockWidgetArea,
}
if area not in areas:
raise ValueError(
trans._(
'area argument must be in {areas}',
deferred=True,
areas=list(areas.keys()),
)
)
self.area = area
self.qt_area = areas[area]
if shortcut is not _sentinel:
warnings.warn(
_SHORTCUT_DEPRECATION_STRING.format(shortcut=shortcut),
FutureWarning,
stacklevel=2,
)
else:
shortcut = None
self._shortcut = shortcut
if allowed_areas:
if not isinstance(allowed_areas, (list, tuple)):
raise TypeError(
trans._(
'`allowed_areas` must be a list or tuple',
deferred=True,
)
)
if any(area not in areas for area in allowed_areas):
raise ValueError(
trans._(
'all allowed_areas argument must be in {areas}',
deferred=True,
areas=list(areas.keys()),
)
)
allowed_areas = reduce(ior, [areas[a] for a in allowed_areas])
else:
allowed_areas = Qt.AllDockWidgetAreas
self.setAllowedAreas(allowed_areas)
self.setMinimumHeight(50)
self.setMinimumWidth(50)
self.setObjectName(object_name or name)
is_vertical = area in {'left', 'right'}
widget = combine_widgets(widget, vertical=is_vertical)
self.setWidget(widget)
if is_vertical and add_vertical_stretch:
self._maybe_add_vertical_stretch(widget)
self._features = self.features()
self.dockLocationChanged.connect(self._set_title_orientation)
self.title = QtCustomTitleBar(self, title=self.name)
self.setTitleBarWidget(self.title)
self.visibilityChanged.connect(self._on_visibility_changed)
def destroyOnClose(self):
self.qt_viewer.viewer.window.remove_dock_widget(self)
def _maybe_add_vertical_stretch(self, widget):
exempt_policies = {
QSizePolicy.Expanding,
QSizePolicy.MinimumExpanding,
QSizePolicy.Ignored,
}
if widget.sizePolicy().verticalPolicy() in exempt_policies:
return
try:
wlayout = widget.layout()
if wlayout is None:
return
except TypeError:
return
for i in range(wlayout.count()):
wdg = wlayout.itemAt(i).widget()
if (
wdg is not None
and wdg.sizePolicy().verticalPolicy() in exempt_policies
):
return
if hasattr(wlayout, 'addStretch'):
wlayout.addStretch(next(counter))
@property
def shortcut(self):
warnings.warn(
_SHORTCUT_DEPRECATION_STRING,
FutureWarning,
stacklevel=2,
)
return self._shortcut
def setFeatures(self, features):
super().setFeatures(features)
self._features = self.features()
def keyPressEvent(self, event):
return self.qt_viewer.keyPressEvent(event)
def _set_title_orientation(self, area):
if area in (Qt.LeftDockWidgetArea, Qt.RightDockWidgetArea):
features = self._features
if features & self.DockWidgetVerticalTitleBar:
features = features ^ self.DockWidgetVerticalTitleBar
else:
features = self._features | self.DockWidgetVerticalTitleBar
self.setFeatures(features)
@property
def is_vertical(self):
if not self.isFloating():
par = self.parent()
if par and hasattr(par, 'dockWidgetArea'):
return par.dockWidgetArea(self) in (
Qt.LeftDockWidgetArea,
Qt.RightDockWidgetArea,
)
return self.size().height() > self.size().width()
def _on_visibility_changed(self, visible):
try:
actions = [
action.text()
for action in self.qt_viewer.viewer.window.plugins_menu.actions()
]
idx = actions.index(self.name)
current_action = (
self.qt_viewer.viewer.window.plugins_menu.actions()[idx]
)
current_action.setChecked(visible)
self.setVisible(visible)
except (AttributeError, ValueError):
pass
if not visible:
return
with qt_signals_blocked(self):
self.setTitleBarWidget(None)
if not self.isFloating():
self.title = QtCustomTitleBar(
self, title=self.name, vertical=not self.is_vertical
)
self.setTitleBarWidget(self.title)
def setWidget(self, widget):
widget._parent = self
super().setWidget(widget)
class QtCustomTitleBar(QLabel):
def __init__(self, parent, title: str = '', vertical=False):
super().__init__(parent)
self.setObjectName("QtCustomTitleBar")
self.setProperty('vertical', str(vertical))
self.vertical = vertical
self.setToolTip(trans._('drag to move. double-click to float'))
line = QFrame(self)
line.setObjectName("QtCustomTitleBarLine")
add_close = False
try:
actions = [
action.text()
for action in self.parent().qt_viewer.viewer.window.plugins_menu.actions()
]
if self.parent().name in actions:
add_close = True
self.close_button = QPushButton(self)
self.close_button.setToolTip(trans._('close this panel'))
self.close_button.setObjectName("QTitleBarCloseButton")
self.close_button.setCursor(Qt.ArrowCursor)
self.close_button.clicked.connect(
lambda: self.parent().destroyOnClose()
)
else:
add_close = False
except AttributeError:
pass
self.hide_button = QPushButton(self)
self.hide_button.setToolTip(trans._('hide this panel'))
self.hide_button.setObjectName("QTitleBarHideButton")
self.hide_button.setCursor(Qt.ArrowCursor)
self.hide_button.clicked.connect(lambda: self.parent().close())
self.float_button = QPushButton(self)
self.float_button.setToolTip(trans._('float this panel'))
self.float_button.setObjectName("QTitleBarFloatButton")
self.float_button.setCursor(Qt.ArrowCursor)
self.float_button.clicked.connect(
lambda: self.parent().setFloating(not self.parent().isFloating())
)
self.title = QLabel(title, self)
self.title.setSizePolicy(
QSizePolicy(QSizePolicy.Policy.Maximum, QSizePolicy.Policy.Maximum)
)
if vertical:
layout = QVBoxLayout()
layout.setSpacing(4)
layout.setContentsMargins(0, 8, 0, 8)
line.setFixedWidth(1)
if add_close:
layout.addWidget(self.close_button, 0, Qt.AlignHCenter)
layout.addWidget(self.hide_button, 0, Qt.AlignHCenter)
layout.addWidget(self.float_button, 0, Qt.AlignHCenter)
layout.addWidget(line, 0, Qt.AlignHCenter)
self.title.hide()
else:
layout = QHBoxLayout()
layout.setSpacing(4)
layout.setContentsMargins(8, 1, 8, 0)
line.setFixedHeight(1)
if add_close:
layout.addWidget(self.close_button)
layout.addWidget(self.hide_button)
layout.addWidget(self.float_button)
layout.addWidget(line)
layout.addWidget(self.title)
self.setLayout(layout)
self.setCursor(Qt.OpenHandCursor)
def sizeHint(self):
szh = super().sizeHint()
if self.vertical:
szh.setWidth(20)
else:
szh.setHeight(20)
return szh
| true | true |
1c45a1a090a13d50476e4eb2e61b77dfeabe3a7e | 22,311 | py | Python | test/functional/importmulti.py | DeepPool/test | c6d99f019667ea4bf51139adff2a98d46c0015ed | [
"MIT"
] | null | null | null | test/functional/importmulti.py | DeepPool/test | c6d99f019667ea4bf51139adff2a98d46c0015ed | [
"MIT"
] | null | null | null | test/functional/importmulti.py | DeepPool/test | c6d99f019667ea4bf51139adff2a98d46c0015ed | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importmulti RPC."""
from test_framework.test_framework import DietBitcoinTestFramework
from test_framework.util import *
class ImportMultiTest (DietBitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.setup_nodes()
def run_test (self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
# keyword definition
PRIV_KEY = 'privkey'
PUB_KEY = 'pubkey'
ADDRESS_KEY = 'address'
SCRIPT_KEY = 'script'
node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
#Check only one address
assert_equal(node0_address1['ismine'], True)
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),1)
#Address Test - before import
address_info = self.nodes[1].validateaddress(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# RPC importmulti -----------------------------------------------
# DietBitcoin Address
self.log.info("Should import an address")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_address = address['address']
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": "not valid address",
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Invalid address')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + !internal
self.log.info("Should not import a scriptPubKey without internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Public key + !Internal
self.log.info("Should import an address with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + internal
self.log.info("Should import a scriptPubKey with internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + !internal
self.log.info("Should not import a scriptPubKey without internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import an address with private key if is already imported")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -4)
assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script')
# Address + Private key + watchonly
self.log.info("Should not import an address with private key and with watchonly")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal
self.log.info("Should import a scriptPubKey with internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Private key + !internal
self.log.info("Should not import a scriptPubKey without internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# P2SH address
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['isscript'], True)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
# P2SH + Redeem script
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript']
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + !Watchonly
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
# Address + Public key + !Internal + Wrong pubkey
self.log.info("Should not import an address with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly + Wrong private key
self.log.info("Should not import an address with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Importing existing watch only address with new timestamp should replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": watchonly_address,
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_timestamp = timestamp
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
self.start_nodes()
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_message(JSONRPCException, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
}])
assert_raises_message(JSONRPCException, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "",
}])
if __name__ == '__main__':
ImportMultiTest ().main ()
| 48.084052 | 137 | 0.631841 |
from test_framework.test_framework import DietBitcoinTestFramework
from test_framework.util import *
class ImportMultiTest (DietBitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.setup_nodes()
def run_test (self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
PRIV_KEY = 'privkey'
PUB_KEY = 'pubkey'
ADDRESS_KEY = 'address'
SCRIPT_KEY = 'script'
node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
assert_equal(node0_address1['ismine'], True)
assert_equal(self.nodes[1].getblockcount(),1)
address_info = self.nodes[1].validateaddress(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
self.log.info("Should import an address")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_address = address['address']
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": "not valid address",
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Invalid address')
self.log.info("Should import a scriptPubKey with internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import a scriptPubKey without internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should import an address with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should import a scriptPubKey with internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import a scriptPubKey without internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should import an address with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import an address with private key if is already imported")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -4)
assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script')
self.log.info("Should not import an address with private key and with watchonly")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should import a scriptPubKey with internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
self.log.info("Should not import a scriptPubKey without internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['isscript'], True)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript']
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
self.log.info("Should not import an address with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should not import an address with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": watchonly_address,
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_timestamp = timestamp
self.stop_nodes()
self.start_nodes()
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], watchonly_timestamp)
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_message(JSONRPCException, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
}])
assert_raises_message(JSONRPCException, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "",
}])
if __name__ == '__main__':
ImportMultiTest ().main ()
| true | true |
1c45a2de98069c080d2cca90e61524a21453a51c | 1,957 | py | Python | examples/get-started/play_mp3/example_test.py | kigor302/esp-adf | 7feaf6c4b23d2a06850f96c302eebb814516239c | [
"MIT-0"
] | 12 | 2021-04-15T14:15:27.000Z | 2022-01-17T03:40:35.000Z | examples/get-started/play_mp3/example_test.py | Tianxiaomo/esp-adf | fae539c3035b2c041f49c5b01cdc4c99038595b0 | [
"MIT-0"
] | 2 | 2021-04-03T22:00:11.000Z | 2021-10-03T18:27:39.000Z | examples/get-started/play_mp3/example_test.py | Tianxiaomo/esp-adf | fae539c3035b2c041f49c5b01cdc4c99038595b0 | [
"MIT-0"
] | 4 | 2021-06-22T10:08:07.000Z | 2021-11-17T23:21:04.000Z | import os
import sys
# this is a test case written with tiny-test-fw.
# to run test cases outside tiny-test-fw,
# we need to set environment variable `TEST_FW_PATH`,
# then get and insert `TEST_FW_PATH` to sys path before import FW module
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
auto_test_path = os.getenv("AUTO_TEST_PATH")
if auto_test_path and auto_test_path not in sys.path:
sys.path.insert(0, auto_test_path)
import TinyFW
import NormalProject
from NormalProject.ProjectDUT import ProDUT
from NormalProject.ProjectApp import Example
from BasicUtility.RecordAudioFile import AudioRecord
import ADFExampleTest
@NormalProject.example_test(env_tag="Example_AUDIO_PLAY", ignore=True)
@ADFExampleTest.play_test(os.path.join(os.getenv("ADF_PATH"), "examples/get-started/play_mp3/main/adf_music.mp3"),
os.path.join(os.getenv("ADF_PATH"), "examples/get-started/play_mp3/main/dest.wav"))
def example_test_play_mp3(env, extra_data):
dut1 = env.get_dut("play_mp3", "examples/get-started/play_mp3", pro_path=os.getenv("ADF_PATH"))
# start test
dut1.start_app()
dut1.reset()
dut1.expect("[ 1 ] Start audio codec chip", timeout=30)
dut1.expect("[ 2 ] Create audio pipeline, add all elements to pipeline, and subscribe pipeline event")
dut1.expect("[2.1] Create mp3 decoder to decode mp3 file and set custom read callback")
dut1.expect("[2.2] Create i2s stream to write data to codec chip")
dut1.expect("[2.3] Register all elements to audio pipeline")
dut1.expect("[2.4] Link it together [mp3_music_read_cb]-->mp3_decoder-->i2s_stream-->[codec_chip]")
dut1.expect("[ 3 ] Setup event listener")
dut1.expect("[3.1] Listening event from all elements of pipeline")
dut1.expect("[ 4 ] Start audio_pipeline")
dut1.expect("[ 5 ] Stop audio_pipeline", timeout=30)
if __name__ == '__main__':
example_test_play_mp3()
| 39.938776 | 114 | 0.748084 | import os
import sys
test_fw_path = os.getenv("TEST_FW_PATH")
if test_fw_path and test_fw_path not in sys.path:
sys.path.insert(0, test_fw_path)
auto_test_path = os.getenv("AUTO_TEST_PATH")
if auto_test_path and auto_test_path not in sys.path:
sys.path.insert(0, auto_test_path)
import TinyFW
import NormalProject
from NormalProject.ProjectDUT import ProDUT
from NormalProject.ProjectApp import Example
from BasicUtility.RecordAudioFile import AudioRecord
import ADFExampleTest
@NormalProject.example_test(env_tag="Example_AUDIO_PLAY", ignore=True)
@ADFExampleTest.play_test(os.path.join(os.getenv("ADF_PATH"), "examples/get-started/play_mp3/main/adf_music.mp3"),
os.path.join(os.getenv("ADF_PATH"), "examples/get-started/play_mp3/main/dest.wav"))
def example_test_play_mp3(env, extra_data):
dut1 = env.get_dut("play_mp3", "examples/get-started/play_mp3", pro_path=os.getenv("ADF_PATH"))
dut1.start_app()
dut1.reset()
dut1.expect("[ 1 ] Start audio codec chip", timeout=30)
dut1.expect("[ 2 ] Create audio pipeline, add all elements to pipeline, and subscribe pipeline event")
dut1.expect("[2.1] Create mp3 decoder to decode mp3 file and set custom read callback")
dut1.expect("[2.2] Create i2s stream to write data to codec chip")
dut1.expect("[2.3] Register all elements to audio pipeline")
dut1.expect("[2.4] Link it together [mp3_music_read_cb]-->mp3_decoder-->i2s_stream-->[codec_chip]")
dut1.expect("[ 3 ] Setup event listener")
dut1.expect("[3.1] Listening event from all elements of pipeline")
dut1.expect("[ 4 ] Start audio_pipeline")
dut1.expect("[ 5 ] Stop audio_pipeline", timeout=30)
if __name__ == '__main__':
example_test_play_mp3()
| true | true |
1c45a4aba3bdd23727ad80971a816dcd80684560 | 2,390 | py | Python | lib/util.py | ks-tec/Hydroponic | d9347f82698841d85c0a45908e8671b36c50ffce | [
"MIT"
] | 1 | 2021-05-27T13:32:45.000Z | 2021-05-27T13:32:45.000Z | lib/util.py | ks-tec/Hydroponic | d9347f82698841d85c0a45908e8671b36c50ffce | [
"MIT"
] | null | null | null | lib/util.py | ks-tec/Hydroponic | d9347f82698841d85c0a45908e8671b36c50ffce | [
"MIT"
] | null | null | null | # MicroPython utility methods.
#
# Copyright (c) 2020 ks-tec
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to dealin the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sellcopies of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE NOT LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS INTHE SOFTWARE.
def strtobool(value):
"""
This method convert string to bool.
Return False for values of the keywords "false" "f" "no" "n" "off" "0" or 0.
Or, return True for values of the keywords "true" "t" "yes" "y" "on" "1" or 1.
Or, othres return None.
Args:
value : string value
Return:
Return False for values of the keywords "false" "f" "no" "n" "off" "0" or 0.
Or, return True for values of the keywords "true" "t" "yes" "y" "on" "1" or 1.
Or, othres return None.
Raises:
TypeError : The type of parameter is not string.
ValueError : The parameter value can not be interpreted as a bool value.
"""
if type(value) is not str and value not in [0, 1]:
raise TypeError("The type of parameter value must be string.")
ret_value = None
if value.lower() in ["false", "f", "no", "n", "off", "0"] or value == 0:
ret_value = False
elif value.lower() in ["true", "t", "yes", "y", "on", "1"] or value == 1:
ret_value = True
else:
raise ValueError("not supported bool value.")
return ret_value
def conv_temperature_unit(value, unit):
"""
"""
if type(value) is str and value.upper() in ["C", "F"]:
raise TypeError("the type of paramter unit must be string.")
if unit.upper() == "C":
pass
elif unit.upper() == "F":
value = value * 1.8 + 32
else:
raise ValueError("not supported temperature unit.")
return value
| 35.147059 | 82 | 0.684519 |
def strtobool(value):
if type(value) is not str and value not in [0, 1]:
raise TypeError("The type of parameter value must be string.")
ret_value = None
if value.lower() in ["false", "f", "no", "n", "off", "0"] or value == 0:
ret_value = False
elif value.lower() in ["true", "t", "yes", "y", "on", "1"] or value == 1:
ret_value = True
else:
raise ValueError("not supported bool value.")
return ret_value
def conv_temperature_unit(value, unit):
if type(value) is str and value.upper() in ["C", "F"]:
raise TypeError("the type of paramter unit must be string.")
if unit.upper() == "C":
pass
elif unit.upper() == "F":
value = value * 1.8 + 32
else:
raise ValueError("not supported temperature unit.")
return value
| true | true |
1c45a56482a78277a224da1cf5efdb87161f30b9 | 626 | py | Python | manage.py | agamgn/django-Tourism | ee8fae54981d135cbd7ddaf9131eb77ea7b2fb8a | [
"MIT"
] | 9 | 2019-06-30T06:34:22.000Z | 2021-11-09T17:21:16.000Z | manage.py | agamgn/django-Tourism | ee8fae54981d135cbd7ddaf9131eb77ea7b2fb8a | [
"MIT"
] | 14 | 2019-12-22T02:04:18.000Z | 2022-03-11T23:44:38.000Z | manage.py | agamgn/django-Tourism | ee8fae54981d135cbd7ddaf9131eb77ea7b2fb8a | [
"MIT"
] | 3 | 2019-06-30T06:35:57.000Z | 2019-12-18T03:42:43.000Z | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'treval.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.454545 | 73 | 0.682109 |
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'treval.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
1c45a58e90e653de1bb431003c78566d25a7d67b | 57,074 | py | Python | improver/ensemble_copula_coupling/ensemble_copula_coupling.py | VictoriaLouiseS/improver | 86470bff973e21fbd5f24e26047871ad3bc2f3db | [
"BSD-3-Clause"
] | null | null | null | improver/ensemble_copula_coupling/ensemble_copula_coupling.py | VictoriaLouiseS/improver | 86470bff973e21fbd5f24e26047871ad3bc2f3db | [
"BSD-3-Clause"
] | 3 | 2020-04-25T12:55:42.000Z | 2020-07-23T11:50:46.000Z | improver/ensemble_copula_coupling/ensemble_copula_coupling.py | Kat-90/improver | a5c31be3430df429ae38e7c16e267fcbc2af1858 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2017-2020 Met Office.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
This module defines the plugins required for Ensemble Copula Coupling.
"""
import warnings
import iris
import numpy as np
from iris.exceptions import CoordinateNotFoundError, InvalidCubeError
from scipy import stats
from improver import BasePlugin
from improver.calibration.utilities import convert_cube_data_to_2d
from improver.ensemble_copula_coupling.utilities import (
choose_set_of_percentiles,
concatenate_2d_array_with_2d_array_endpoints,
create_cube_with_percentiles,
get_bounds_of_distribution,
insert_lower_and_upper_endpoint_to_1d_array,
restore_non_percentile_dimensions,
)
from improver.metadata.probabilistic import (
extract_diagnostic_name,
find_percentile_coordinate,
find_threshold_coordinate,
)
from improver.utilities.cube_checker import (
check_cube_coordinates,
check_for_x_and_y_axes,
)
from improver.utilities.cube_manipulation import (
MergeCubes,
enforce_coordinate_ordering,
get_dim_coord_names,
)
from improver.utilities.indexing_operations import choose
class RebadgePercentilesAsRealizations(BasePlugin):
"""
Class to rebadge percentiles as ensemble realizations.
This will allow the quantisation to percentiles to be completed, without
a subsequent EnsembleReordering step to restore spatial correlations,
if required.
"""
@staticmethod
def process(cube, ensemble_realization_numbers=None):
"""
Rebadge percentiles as ensemble realizations. The ensemble
realization numbering will depend upon the number of percentiles in
the input cube i.e. 0, 1, 2, 3, ..., n-1, if there are n percentiles.
Args:
cube (iris.cube.Cube):
Cube containing a percentile coordinate, which will be
rebadged as ensemble realization.
ensemble_realization_numbers (numpy.ndarray):
An array containing the ensemble numbers required in the output
realization coordinate. Default is None, meaning the
realization coordinate will be numbered 0, 1, 2 ... n-1 for n
percentiles on the input cube.
Raises:
InvalidCubeError:
If the realization coordinate already exists on the cube.
"""
percentile_coord_name = find_percentile_coordinate(cube).name()
if ensemble_realization_numbers is None:
ensemble_realization_numbers = np.arange(
len(cube.coord(percentile_coord_name).points), dtype=np.int32
)
cube.coord(percentile_coord_name).points = ensemble_realization_numbers
# we can't rebadge if the realization coordinate already exists:
try:
realization_coord = cube.coord("realization")
except CoordinateNotFoundError:
realization_coord = None
if realization_coord:
raise InvalidCubeError(
"Cannot rebadge percentile coordinate to realization "
"coordinate because a realization coordinate already exists."
)
cube.coord(percentile_coord_name).rename("realization")
cube.coord("realization").units = "1"
cube.coord("realization").points = cube.coord("realization").points.astype(
np.int32
)
return cube
class ResamplePercentiles(BasePlugin):
"""
Class for resampling percentiles from an existing set of percentiles.
In combination with the Ensemble Reordering plugin, this is a variant of
Ensemble Copula Coupling.
This class includes the ability to linearly interpolate from an
input set of percentiles to a different output set of percentiles.
"""
def __init__(self, ecc_bounds_warning=False):
"""
Initialise the class.
Args:
ecc_bounds_warning (bool):
If true and ECC bounds are exceeded by the percentile values,
a warning will be generated rather than an exception.
Default value is FALSE.
"""
self.ecc_bounds_warning = ecc_bounds_warning
def _add_bounds_to_percentiles_and_forecast_at_percentiles(
self, percentiles, forecast_at_percentiles, bounds_pairing
):
"""
Padding of the lower and upper bounds of the percentiles for a
given phenomenon, and padding of forecast values using the
constant lower and upper bounds.
Args:
percentiles (numpy.ndarray):
Array of percentiles from a Cumulative Distribution Function.
forecast_at_percentiles (numpy.ndarray):
Array containing the underlying forecast values at each
percentile.
bounds_pairing (tuple):
Lower and upper bound to be used as the ends of the
cumulative distribution function.
Raises:
ValueError: If the percentile points are outside the ECC bounds
and self.ecc_bounds_warning is False.
ValueError: If the percentiles are not in ascending order.
Warns:
Warning: If the percentile points are outside the ECC bounds
and self.ecc_bounds_warning is True.
"""
lower_bound, upper_bound = bounds_pairing
percentiles = insert_lower_and_upper_endpoint_to_1d_array(percentiles, 0, 100)
forecast_at_percentiles_with_endpoints = concatenate_2d_array_with_2d_array_endpoints(
forecast_at_percentiles, lower_bound, upper_bound
)
if np.any(np.diff(forecast_at_percentiles_with_endpoints) < 0):
out_of_bounds_vals = forecast_at_percentiles_with_endpoints[
np.where(np.diff(forecast_at_percentiles_with_endpoints) < 0)
]
msg = (
"Forecast values exist that fall outside the expected extrema "
"values that are defined as bounds in "
"ensemble_copula_coupling/constants.py. "
"Applying the extrema values as end points to the distribution "
"would result in non-monotonically increasing values. "
"The defined extremes are {}, whilst the following forecast "
"values exist outside this range: {}.".format(
bounds_pairing, out_of_bounds_vals
)
)
if self.ecc_bounds_warning:
warn_msg = msg + (
" The percentile values that have "
"exceeded the existing bounds will be used "
"as new bounds."
)
warnings.warn(warn_msg)
if upper_bound < forecast_at_percentiles_with_endpoints.max():
upper_bound = forecast_at_percentiles_with_endpoints.max()
if lower_bound > forecast_at_percentiles_with_endpoints.min():
lower_bound = forecast_at_percentiles_with_endpoints.min()
forecast_at_percentiles_with_endpoints = concatenate_2d_array_with_2d_array_endpoints(
forecast_at_percentiles, lower_bound, upper_bound
)
else:
raise ValueError(msg)
if np.any(np.diff(percentiles) < 0):
msg = (
"The percentiles must be in ascending order."
"The input percentiles were {}".format(percentiles)
)
raise ValueError(msg)
return percentiles, forecast_at_percentiles_with_endpoints
def _interpolate_percentiles(
self,
forecast_at_percentiles,
desired_percentiles,
bounds_pairing,
percentile_coord_name,
):
"""
Interpolation of forecast for a set of percentiles from an initial
set of percentiles to a new set of percentiles. This is constructed
by linearly interpolating between the original set of percentiles
to a new set of percentiles.
Args:
forecast_at_percentiles (iris.cube.Cube):
Cube containing a percentile coordinate.
desired_percentiles (numpy.ndarray):
Array of the desired percentiles.
bounds_pairing (tuple):
Lower and upper bound to be used as the ends of the
cumulative distribution function.
percentile_coord_name (str):
Name of required percentile coordinate.
Returns:
iris.cube.Cube:
Cube containing values for the required diagnostic e.g.
air_temperature at the required percentiles.
"""
original_percentiles = forecast_at_percentiles.coord(
percentile_coord_name
).points
# Ensure that the percentile dimension is first, so that the
# conversion to a 2d array produces data in the desired order.
enforce_coordinate_ordering(forecast_at_percentiles, percentile_coord_name)
forecast_at_reshaped_percentiles = convert_cube_data_to_2d(
forecast_at_percentiles, coord=percentile_coord_name
)
(
original_percentiles,
forecast_at_reshaped_percentiles,
) = self._add_bounds_to_percentiles_and_forecast_at_percentiles(
original_percentiles, forecast_at_reshaped_percentiles, bounds_pairing
)
forecast_at_interpolated_percentiles = np.empty(
(len(desired_percentiles), forecast_at_reshaped_percentiles.shape[0]),
dtype=np.float32,
)
for index in range(forecast_at_reshaped_percentiles.shape[0]):
forecast_at_interpolated_percentiles[:, index] = np.interp(
desired_percentiles,
original_percentiles,
forecast_at_reshaped_percentiles[index, :],
)
# Reshape forecast_at_percentiles, so the percentiles dimension is
# first, and any other dimension coordinates follow.
forecast_at_percentiles_data = restore_non_percentile_dimensions(
forecast_at_interpolated_percentiles,
next(forecast_at_percentiles.slices_over(percentile_coord_name)),
len(desired_percentiles),
)
template_cube = next(forecast_at_percentiles.slices_over(percentile_coord_name))
template_cube.remove_coord(percentile_coord_name)
percentile_cube = create_cube_with_percentiles(
desired_percentiles, template_cube, forecast_at_percentiles_data,
)
return percentile_cube
def process(
self, forecast_at_percentiles, no_of_percentiles=None, sampling="quantile"
):
"""
1. Creates a list of percentiles.
2. Accesses the lower and upper bound pair of the forecast values,
in order to specify lower and upper bounds for the percentiles.
3. Interpolate the percentile coordinate into an alternative
set of percentiles using linear interpolation.
Args:
forecast_at_percentiles (iris.cube.Cube):
Cube expected to contain a percentile coordinate.
no_of_percentiles (int or None):
Number of percentiles
If None, the number of percentiles within the input
forecast_at_percentiles cube is used as the
number of percentiles.
sampling (str):
Type of sampling of the distribution to produce a set of
percentiles e.g. quantile or random.
Accepted options for sampling are:
* Quantile: A regular set of equally-spaced percentiles aimed
at dividing a Cumulative Distribution Function into
blocks of equal probability.
* Random: A random set of ordered percentiles.
Returns:
iris.cube.Cube:
Cube with forecast values at the desired set of percentiles.
The percentile coordinate is always the zeroth dimension.
"""
percentile_coord = find_percentile_coordinate(forecast_at_percentiles)
if no_of_percentiles is None:
no_of_percentiles = len(
forecast_at_percentiles.coord(percentile_coord).points
)
percentiles = choose_set_of_percentiles(no_of_percentiles, sampling=sampling)
cube_units = forecast_at_percentiles.units
bounds_pairing = get_bounds_of_distribution(
forecast_at_percentiles.name(), cube_units
)
forecast_at_percentiles = self._interpolate_percentiles(
forecast_at_percentiles,
percentiles,
bounds_pairing,
percentile_coord.name(),
)
return forecast_at_percentiles
class ConvertProbabilitiesToPercentiles(BasePlugin):
"""
Class for generating percentiles from probabilities.
In combination with the Ensemble Reordering plugin, this is a variant
Ensemble Copula Coupling.
This class includes the ability to interpolate between probabilities
specified using multiple thresholds in order to generate the percentiles,
see Figure 1 from Flowerdew, 2014.
Scientific Reference:
Flowerdew, J., 2014.
Calibrated ensemble reliability whilst preserving spatial structure.
Tellus Series A, Dynamic Meteorology and Oceanography, 66, 22662.
"""
def __init__(self, ecc_bounds_warning=False):
"""
Initialise the class.
Args:
ecc_bounds_warning (bool):
If true and ECC bounds are exceeded by the percentile values,
a warning will be generated rather than an exception.
Default value is FALSE.
"""
self.ecc_bounds_warning = ecc_bounds_warning
def _add_bounds_to_thresholds_and_probabilities(
self, threshold_points, probabilities_for_cdf, bounds_pairing
):
"""
Padding of the lower and upper bounds of the distribution for a
given phenomenon for the threshold_points, and padding of
probabilities of 0 and 1 to the forecast probabilities.
Args:
threshold_points (numpy.ndarray):
Array of threshold values used to calculate the probabilities.
probabilities_for_cdf (numpy.ndarray):
Array containing the probabilities used for constructing an
cumulative distribution function i.e. probabilities
below threshold.
bounds_pairing (tuple):
Lower and upper bound to be used as the ends of the
cumulative distribution function.
Returns:
(tuple): tuple containing:
**threshold_points** (numpy.ndarray):
Array of threshold values padded with the lower and upper
bound of the distribution.
**probabilities_for_cdf** (numpy.ndarray):
Array containing the probabilities padded with 0 and 1 at
each end.
Raises:
ValueError: If the thresholds exceed the ECC bounds for
the diagnostic and self.ecc_bounds_warning is False.
Warns:
Warning: If the thresholds exceed the ECC bounds for
the diagnostic and self.ecc_bounds_warning is True.
"""
lower_bound, upper_bound = bounds_pairing
threshold_points_with_endpoints = insert_lower_and_upper_endpoint_to_1d_array(
threshold_points, lower_bound, upper_bound
)
probabilities_for_cdf = concatenate_2d_array_with_2d_array_endpoints(
probabilities_for_cdf, 0, 1
)
if np.any(np.diff(threshold_points_with_endpoints) < 0):
msg = (
"The calculated threshold values {} are not in ascending "
"order as required for the cumulative distribution "
"function (CDF). This is due to the threshold values "
"exceeding the range given by the ECC bounds {}.".format(
threshold_points_with_endpoints, bounds_pairing
)
)
# If ecc_bounds_warning has been set, generate a warning message
# rather than raising an exception so that subsequent processing
# can continue. Then apply the new bounds as necessary to
# ensure the threshold values and endpoints are in ascending
# order and avoid problems further along the processing chain.
if self.ecc_bounds_warning:
warn_msg = msg + (
" The threshold points that have "
"exceeded the existing bounds will be used "
"as new bounds."
)
warnings.warn(warn_msg)
if upper_bound < max(threshold_points_with_endpoints):
upper_bound = max(threshold_points_with_endpoints)
if lower_bound > min(threshold_points_with_endpoints):
lower_bound = min(threshold_points_with_endpoints)
threshold_points_with_endpoints = insert_lower_and_upper_endpoint_to_1d_array(
threshold_points, lower_bound, upper_bound
)
else:
raise ValueError(msg)
return threshold_points_with_endpoints, probabilities_for_cdf
def _probabilities_to_percentiles(
self, forecast_probabilities, percentiles, bounds_pairing
):
"""
Conversion of probabilities to percentiles through the construction
of an cumulative distribution function. This is effectively
constructed by linear interpolation from the probabilities associated
with each threshold to a set of percentiles.
Args:
forecast_probabilities (iris.cube.Cube):
Cube with a threshold coordinate.
percentiles (numpy.ndarray):
Array of percentiles, at which the corresponding values will be
calculated.
bounds_pairing (tuple):
Lower and upper bound to be used as the ends of the
cumulative distribution function.
Returns:
iris.cube.Cube:
Cube containing values for the required diagnostic e.g.
air_temperature at the required percentiles.
Raises:
NotImplementedError: If the threshold coordinate has an
spp__relative_to_threshold attribute that is not either
"above" or "below".
Warns:
Warning: If the probability values are not ascending, so the
resulting cdf is not monotonically increasing.
"""
threshold_coord = find_threshold_coordinate(forecast_probabilities)
threshold_unit = threshold_coord.units
threshold_points = threshold_coord.points
# Ensure that the percentile dimension is first, so that the
# conversion to a 2d array produces data in the desired order.
enforce_coordinate_ordering(forecast_probabilities, threshold_coord.name())
prob_slices = convert_cube_data_to_2d(
forecast_probabilities, coord=threshold_coord.name()
)
# The requirement below for a monotonically changing probability
# across thresholds can be thwarted by precision errors of order 1E-10,
# as such, here we round to a precision of 9 decimal places.
prob_slices = np.around(prob_slices, 9)
# Invert probabilities for data thresholded above thresholds.
relation = find_threshold_coordinate(forecast_probabilities).attributes[
"spp__relative_to_threshold"
]
if relation == "above":
probabilities_for_cdf = 1 - prob_slices
elif relation == "below":
probabilities_for_cdf = prob_slices
else:
msg = (
"Probabilities to percentiles only implemented for "
"thresholds above or below a given value."
"The relation to threshold is given as {}".format(relation)
)
raise NotImplementedError(msg)
(
threshold_points,
probabilities_for_cdf,
) = self._add_bounds_to_thresholds_and_probabilities(
threshold_points, probabilities_for_cdf, bounds_pairing
)
if np.any(np.diff(probabilities_for_cdf) < 0):
msg = (
"The probability values used to construct the "
"Cumulative Distribution Function (CDF) "
"must be ascending i.e. in order to yield "
"a monotonically increasing CDF."
"The probabilities are {}".format(probabilities_for_cdf)
)
warnings.warn(msg)
# Convert percentiles into fractions.
percentiles_as_fractions = np.array(
[x / 100.0 for x in percentiles], dtype=np.float32
)
forecast_at_percentiles = (
# pylint: disable=unsubscriptable-object
np.empty(
(len(percentiles), probabilities_for_cdf.shape[0]), dtype=np.float32
)
)
# pylint: disable=unsubscriptable-object
for index in range(probabilities_for_cdf.shape[0]):
forecast_at_percentiles[:, index] = np.interp(
percentiles_as_fractions,
probabilities_for_cdf[index, :],
threshold_points,
)
# Reshape forecast_at_percentiles, so the percentiles dimension is
# first, and any other dimension coordinates follow.
forecast_at_percentiles = restore_non_percentile_dimensions(
forecast_at_percentiles,
next(forecast_probabilities.slices_over(threshold_coord)),
len(percentiles),
)
template_cube = next(forecast_probabilities.slices_over(threshold_coord.name()))
template_cube.rename(extract_diagnostic_name(template_cube.name()))
template_cube.remove_coord(threshold_coord.name())
percentile_cube = create_cube_with_percentiles(
percentiles,
template_cube,
forecast_at_percentiles,
cube_unit=threshold_unit,
)
return percentile_cube
def process(
self,
forecast_probabilities,
no_of_percentiles=None,
percentiles=None,
sampling="quantile",
):
"""
1. Concatenates cubes with a threshold coordinate.
2. Creates a list of percentiles.
3. Accesses the lower and upper bound pair to find the ends of the
cumulative distribution function.
4. Convert the threshold coordinate into
values at a set of percentiles using linear interpolation,
see Figure 1 from Flowerdew, 2014.
Args:
forecast_probabilities (iris.cube.Cube):
Cube containing a threshold coordinate.
no_of_percentiles (int):
Number of percentiles. If None and percentiles is not set,
the number of thresholds within the input
forecast_probabilities cube is used as the number of
percentiles. This argument is mutually exclusive with
percentiles.
percentiles (list of float):
The desired percentile values in the interval [0, 100].
This argument is mutually exclusive with no_of_percentiles.
sampling (str):
Type of sampling of the distribution to produce a set of
percentiles e.g. quantile or random.
Accepted options for sampling are:
* Quantile: A regular set of equally-spaced percentiles aimed
at dividing a Cumulative Distribution Function into
blocks of equal probability.
* Random: A random set of ordered percentiles.
Returns:
iris.cube.Cube:
Cube with forecast values at the desired set of percentiles.
The threshold coordinate is always the zeroth dimension.
Raises:
ValueError: If both no_of_percentiles and percentiles are provided
"""
if no_of_percentiles is not None and percentiles is not None:
raise ValueError(
"Cannot specify both no_of_percentiles and percentiles to "
"{}".format(self.__class__.__name__)
)
threshold_coord = find_threshold_coordinate(forecast_probabilities)
phenom_name = extract_diagnostic_name(forecast_probabilities.name())
if no_of_percentiles is None:
no_of_percentiles = len(
forecast_probabilities.coord(threshold_coord.name()).points
)
if percentiles is None:
percentiles = choose_set_of_percentiles(
no_of_percentiles, sampling=sampling
)
elif not isinstance(percentiles, (tuple, list)):
percentiles = [percentiles]
percentiles = np.array(percentiles, dtype=np.float32)
cube_units = forecast_probabilities.coord(threshold_coord.name()).units
bounds_pairing = get_bounds_of_distribution(phenom_name, cube_units)
# If a cube still has multiple realizations, slice over these to reduce
# the memory requirements into manageable chunks.
try:
slices_over_realization = forecast_probabilities.slices_over("realization")
except CoordinateNotFoundError:
slices_over_realization = [forecast_probabilities]
cubelist = iris.cube.CubeList([])
for cube_realization in slices_over_realization:
cubelist.append(
self._probabilities_to_percentiles(
cube_realization, percentiles, bounds_pairing
)
)
forecast_at_percentiles = cubelist.merge_cube()
return forecast_at_percentiles
class ConvertLocationAndScaleParameters:
"""
Base Class to support the plugins that compute percentiles and
probabilities from the location and scale parameters.
"""
def __init__(self, distribution="norm", shape_parameters=None):
"""
Initialise the class.
In order to construct percentiles or probabilities from the location
or scale parameter, the distribution for the resulting output needs
to be selected. For use with the outputs from EMOS, where it has been
assumed that the outputs from minimising the CRPS follow a particular
distribution, then the same distribution should be selected, as used
for the CRPS minimisation. The conversion to percentiles and
probabilities from the location and scale parameter relies upon
functionality within scipy.stats.
Args:
distribution (str):
Name of a distribution supported by scipy.stats.
shape_parameters (numpy.ndarray or None):
For use with distributions in scipy.stats (e.g. truncnorm) that
require the specification of shape parameters to be able to
define the shape of the distribution. For the truncated normal
distribution, the shape parameters should be appropriate for
the distribution constructed from the location and scale
parameters provided.
Please note that for use with
:meth:`~improver.calibration.\
ensemble_calibration.ContinuousRankedProbabilityScoreMinimisers.\
calculate_truncated_normal_crps`,
the shape parameters for a truncated normal distribution with
a lower bound of zero should be [0, np.inf].
"""
try:
self.distribution = getattr(stats, distribution)
except AttributeError as err:
msg = (
"The distribution requested {} is not a valid distribution "
"in scipy.stats. {}".format(distribution, err)
)
raise AttributeError(msg)
if shape_parameters is None:
if self.distribution.name == "truncnorm":
raise ValueError(
"For the truncated normal distribution, "
"shape parameters must be specified."
)
shape_parameters = []
self.shape_parameters = shape_parameters
def __repr__(self):
"""Represent the configured plugin instance as a string."""
result = (
"<ConvertLocationAndScaleParameters: distribution: {}; "
"shape_parameters: {}>"
)
return result.format(self.distribution.name, self.shape_parameters)
def _rescale_shape_parameters(self, location_parameter, scale_parameter):
"""
Rescale the shape parameters for the desired location and scale
parameters for the truncated normal distribution. The shape parameters
for any other distribution will remain unchanged.
For the truncated normal distribution, if the shape parameters are not
rescaled, then :data:`scipy.stats.truncnorm` will assume that the shape
parameters are appropriate for a standard normal distribution. As the
aim is to construct a distribution using specific values for the
location and scale parameters, the assumption of a standard normal
distribution is not appropriate. Therefore the shape parameters are
rescaled using the equations:
.. math::
a\\_rescaled = (a - location\\_parameter)/scale\\_parameter
b\\_rescaled = (b - location\\_parameter)/scale\\_parameter
Please see :data:`scipy.stats.truncnorm` for some further information.
Args:
location_parameter (numpy.ndarray):
Location parameter to be used to scale the shape parameters.
scale_parameter (numpy.ndarray):
Scale parameter to be used to scale the shape parameters.
"""
if self.distribution.name == "truncnorm":
rescaled_values = []
for value in self.shape_parameters:
rescaled_values.append((value - location_parameter) / scale_parameter)
self.shape_parameters = rescaled_values
class ConvertLocationAndScaleParametersToPercentiles(
BasePlugin, ConvertLocationAndScaleParameters
):
"""
Plugin focusing on generating percentiles from location and scale
parameters. In combination with the EnsembleReordering plugin, this is
Ensemble Copula Coupling.
"""
def __repr__(self):
"""Represent the configured plugin instance as a string."""
result = (
"<ConvertLocationAndScaleParametersToPercentiles: "
"distribution: {}; shape_parameters: {}>"
)
return result.format(self.distribution.name, self.shape_parameters)
def _location_and_scale_parameters_to_percentiles(
self, location_parameter, scale_parameter, template_cube, percentiles
):
"""
Function returning percentiles based on the supplied location and
scale parameters.
Args:
location_parameter (iris.cube.Cube):
Location parameter of calibrated distribution.
scale_parameter (iris.cube.Cube):
Scale parameter of the calibrated distribution.
template_cube (iris.cube.Cube):
Template cube containing either a percentile or realization
coordinate. All coordinates apart from the percentile or
realization coordinate will be copied from the template cube.
Metadata will also be copied from this cube.
percentiles (list):
Percentiles at which to calculate the value of the phenomenon
at.
Returns:
iris.cube.Cube:
Cube containing the values for the phenomenon at each of the
percentiles requested.
Raises:
ValueError: If any of the resulting percentile values are
nans and these nans are not caused by a scale parameter of
zero.
"""
# Remove any mask that may be applied to location and scale parameters
# and replace with ones
location_data = np.ma.filled(location_parameter.data, 1).flatten()
scale_data = np.ma.filled(scale_parameter.data, 1).flatten()
# Convert percentiles into fractions.
percentiles = np.array([x / 100.0 for x in percentiles], dtype=np.float32)
result = np.zeros((len(percentiles), location_data.shape[0]), dtype=np.float32)
self._rescale_shape_parameters(location_data, np.sqrt(scale_data))
percentile_method = self.distribution(
*self.shape_parameters, loc=location_data, scale=np.sqrt(scale_data)
)
# Loop over percentiles, and use the distribution as the
# "percentile_method" with the location and scale parameter to
# calculate the values at each percentile.
for index, percentile in enumerate(percentiles):
percentile_list = np.repeat(percentile, len(location_data))
result[index, :] = percentile_method.ppf(percentile_list)
# If percent point function (PPF) returns NaNs, fill in
# mean instead of NaN values. NaN will only be generated if the
# variance is zero. Therefore, if the variance is zero, the mean
# value is used for all gridpoints with a NaN.
if np.any(scale_data == 0):
nan_index = np.argwhere(np.isnan(result[index, :]))
result[index, nan_index] = location_data[nan_index]
if np.any(np.isnan(result)):
msg = (
"NaNs are present within the result for the {} "
"percentile. Unable to calculate the percent point "
"function."
)
raise ValueError(msg)
# Convert percentiles back into percentages.
percentiles = [x * 100.0 for x in percentiles]
# Reshape forecast_at_percentiles, so the percentiles dimension is
# first, and any other dimension coordinates follow.
result = result.reshape((len(percentiles),) + location_parameter.data.shape)
for prob_coord_name in ["realization", "percentile"]:
if template_cube.coords(prob_coord_name, dim_coords=True):
prob_coord = template_cube.coord(prob_coord_name)
template_slice = next(template_cube.slices_over(prob_coord))
template_slice.remove_coord(prob_coord)
percentile_cube = create_cube_with_percentiles(
percentiles, template_slice, result
)
# Define a mask to be reapplied later
mask = np.logical_or(
np.ma.getmaskarray(location_parameter.data),
np.ma.getmaskarray(scale_parameter.data),
)
# Make the mask defined above fit the data size and then apply to the
# percentile cube.
mask_array = np.stack([mask] * len(percentiles))
percentile_cube.data = np.ma.masked_where(mask_array, percentile_cube.data)
# Remove cell methods associated with finding the ensemble mean
percentile_cube.cell_methods = {}
return percentile_cube
def process(
self,
location_parameter,
scale_parameter,
template_cube,
no_of_percentiles=None,
percentiles=None,
):
"""
Generate ensemble percentiles from the location and scale parameters.
Args:
location_parameter (iris.cube.Cube):
Cube containing the location parameters.
scale_parameter (iris.cube.Cube):
Cube containing the scale parameters.
template_cube (iris.cube.Cube):
Template cube containing either a percentile or realization
coordinate. All coordinates apart from the percentile or
realization coordinate will be copied from the template cube.
Metadata will also be copied from this cube.
no_of_percentiles (int):
Integer defining the number of percentiles that will be
calculated from the location and scale parameters.
percentiles (list):
List of percentiles that will be generated from the location
and scale parameters provided.
Returns:
iris.cube.Cube:
Cube for calibrated percentiles.
The percentile coordinate is always the zeroth dimension.
Raises:
ValueError: Ensure that it is not possible to supply
"no_of_percentiles" and "percentiles" simultaneously
as keyword arguments.
"""
if no_of_percentiles and percentiles:
msg = (
"Please specify either the number of percentiles or "
"provide a list of percentiles. The number of percentiles "
"provided was {} and the list of percentiles "
"provided was {}".format(no_of_percentiles, percentiles)
)
raise ValueError(msg)
if no_of_percentiles:
percentiles = choose_set_of_percentiles(no_of_percentiles)
calibrated_forecast_percentiles = self._location_and_scale_parameters_to_percentiles(
location_parameter, scale_parameter, template_cube, percentiles
)
return calibrated_forecast_percentiles
class ConvertLocationAndScaleParametersToProbabilities(
BasePlugin, ConvertLocationAndScaleParameters
):
"""
Plugin to generate probabilities relative to given thresholds from the
location and scale parameters of a distribution.
"""
def __repr__(self):
"""Represent the configured plugin instance as a string."""
result = (
"<ConvertLocationAndScaleParametersToProbabilities: "
"distribution: {}; shape_parameters: {}>"
)
return result.format(self.distribution.name, self.shape_parameters)
def _check_template_cube(self, cube):
"""
The template cube is expected to contain a leading threshold dimension
followed by spatial (y/x) dimensions. This check raises an error if
this is not the case. If the cube contains the expected dimensions,
a threshold leading order is enforced.
Args:
cube (iris.cube.Cube):
A cube whose dimensions are checked to ensure they match what
is expected.
Raises:
ValueError: If cube is not of the expected dimensions.
"""
check_for_x_and_y_axes(cube, require_dim_coords=True)
dim_coords = get_dim_coord_names(cube)
msg = (
"{} expects a cube with only a leading threshold dimension, "
"followed by spatial (y/x) dimensions. "
"Got dimensions: {}".format(self.__class__.__name__, dim_coords)
)
try:
threshold_coord = find_threshold_coordinate(cube)
except CoordinateNotFoundError:
raise ValueError(msg)
if len(dim_coords) < 4:
enforce_coordinate_ordering(cube, threshold_coord.name())
return
raise ValueError(msg)
@staticmethod
def _check_unit_compatibility(
location_parameter, scale_parameter, probability_cube_template
):
"""
The location parameter, scale parameters, and threshold values come
from three different cubes. They should all be in the same base unit,
with the units of the scale parameter being the squared units of the
location parameter and threshold values. This is a sanity check to
ensure the units are as expected, converting units of the location
parameter and scale parameter if possible.
Args:
location_parameter (iris.cube.Cube):
Cube of location parameter values.
scale_parameter (iris.cube.Cube):
Cube of scale parameter values.
probability_cube_template (iris.cube.Cube):
Cube containing threshold values.
Raises:
ValueError: If units of input cubes are not compatible.
"""
threshold_units = find_threshold_coordinate(probability_cube_template).units
try:
location_parameter.convert_units(threshold_units)
scale_parameter.convert_units(threshold_units ** 2)
except ValueError as err:
msg = (
"Error: {} This is likely because the mean "
"variance and template cube threshold units are "
"not equivalent/compatible.".format(err)
)
raise ValueError(msg)
def _location_and_scale_parameters_to_probabilities(
self, location_parameter, scale_parameter, probability_cube_template
):
"""
Function returning probabilities relative to provided thresholds based
on the supplied location and scale parameters.
Args:
location_parameter (iris.cube.Cube):
Predictor for the calibrated forecast location parameter.
scale_parameter (iris.cube.Cube):
Scale parameter for the calibrated forecast.
probability_cube_template (iris.cube.Cube):
A probability cube that has a threshold coordinate, where the
probabilities are defined as above or below the threshold by
the spp__relative_to_threshold attribute. This cube matches
the desired output cube format.
Returns:
iris.cube.Cube:
Cube containing the data expressed as probabilities relative to
the provided thresholds in the way described by
spp__relative_to_threshold.
"""
# Define a mask to be reapplied later
loc_mask = np.ma.getmaskarray(location_parameter.data)
scale_mask = np.ma.getmaskarray(scale_parameter.data)
mask = np.logical_or(loc_mask, scale_mask)
# Remove any mask that may be applied to location and scale parameters
# and replace with ones
location_parameter.data = np.ma.filled(location_parameter.data, 1)
scale_parameter.data = np.ma.filled(scale_parameter.data, 1)
thresholds = find_threshold_coordinate(probability_cube_template).points
relative_to_threshold = find_threshold_coordinate(
probability_cube_template
).attributes["spp__relative_to_threshold"]
self._rescale_shape_parameters(
location_parameter.data.flatten(), np.sqrt(scale_parameter.data).flatten()
)
# Loop over thresholds, and use the specified distribution with the
# location and scale parameter to calculate the probabilities relative
# to each threshold.
probabilities = np.empty_like(probability_cube_template.data)
distribution = self.distribution(
*self.shape_parameters,
loc=location_parameter.data.flatten(),
scale=np.sqrt(scale_parameter.data.flatten()),
)
probability_method = distribution.cdf
if relative_to_threshold == "above":
probability_method = distribution.sf
for index, threshold in enumerate(thresholds):
# pylint: disable=unsubscriptable-object
probabilities[index, ...] = np.reshape(
probability_method(threshold), probabilities.shape[1:]
)
probability_cube = probability_cube_template.copy(data=probabilities)
# Make the mask defined above fit the data size and then apply to the
# probability cube.
mask_array = np.array([mask] * len(probabilities))
probability_cube.data = np.ma.masked_where(mask_array, probability_cube.data)
return probability_cube
def process(self, location_parameter, scale_parameter, probability_cube_template):
"""
Generate probabilities from the location and scale parameters of the
distribution.
Args:
location_parameter (iris.cube.Cube):
Cube containing the location parameters.
scale_parameter (iris.cube.Cube):
Cube containing the scale parameters.
probability_cube_template (iris.cube.Cube):
A probability cube that has a threshold coordinate, where the
probabilities are defined as above or below the threshold by
the spp__relative_to_threshold attribute. This cube matches
the desired output cube format.
Returns:
iris.cube.Cube:
A cube of diagnostic data expressed as probabilities relative
to the thresholds found in the probability_cube_template.
"""
self._check_template_cube(probability_cube_template)
self._check_unit_compatibility(
location_parameter, scale_parameter, probability_cube_template
)
probability_cube = self._location_and_scale_parameters_to_probabilities(
location_parameter, scale_parameter, probability_cube_template
)
return probability_cube
class EnsembleReordering(BasePlugin):
"""
Plugin for applying the reordering step of Ensemble Copula Coupling,
in order to generate ensemble realizations with multivariate structure
from percentiles. The percentiles are assumed to be in ascending order.
Reference:
Schefzik, R., Thorarinsdottir, T.L. & Gneiting, T., 2013.
Uncertainty Quantification in Complex Simulation Models Using Ensemble
Copula Coupling.
Statistical Science, 28(4), pp.616-640.
"""
@staticmethod
def _recycle_raw_ensemble_realizations(
post_processed_forecast_percentiles,
raw_forecast_realizations,
percentile_coord_name,
):
"""
Function to determine whether there is a mismatch between the number
of percentiles and the number of raw forecast realizations. If more
percentiles are requested than ensemble realizations, then the ensemble
realizations are recycled. This assumes that the identity of the
ensemble realizations within the raw ensemble forecast is random, such
that the raw ensemble realizations are exchangeable. If fewer
percentiles are requested than ensemble realizations, then only the
first n ensemble realizations are used.
Args:
post_processed_forecast_percentiles (iris.cube.Cube):
Cube for post-processed percentiles.
The percentiles are assumed
to be in ascending order.
raw_forecast_realizations (iris.cube.Cube):
Cube containing the raw (not post-processed) forecasts.
percentile_coord_name (str):
Name of required percentile coordinate.
Returns:
iris cube.Cube:
Cube for the raw ensemble forecast, where the raw ensemble
realizations have either been recycled or constrained,
depending upon the number of percentiles present
in the post-processed forecast cube.
"""
plen = len(
post_processed_forecast_percentiles.coord(percentile_coord_name).points
)
mlen = len(raw_forecast_realizations.coord("realization").points)
if plen == mlen:
pass
else:
raw_forecast_realizations_extended = iris.cube.CubeList()
realization_list = []
mpoints = raw_forecast_realizations.coord("realization").points
# Loop over the number of percentiles and finding the
# corresponding ensemble realization number. The ensemble
# realization numbers are recycled e.g. 1, 2, 3, 1, 2, 3, etc.
for index in range(plen):
realization_list.append(mpoints[index % len(mpoints)])
# Assume that the ensemble realizations are ascending linearly.
new_realization_numbers = realization_list[0] + list(range(plen))
# Extract the realizations required in the realization_list from
# the raw_forecast_realizations. Edit the realization number as
# appropriate and append to a cubelist containing rebadged
# raw ensemble realizations.
for realization, index in zip(realization_list, new_realization_numbers):
constr = iris.Constraint(realization=realization)
raw_forecast_realization = raw_forecast_realizations.extract(constr)
raw_forecast_realization.coord("realization").points = index
raw_forecast_realizations_extended.append(raw_forecast_realization)
raw_forecast_realizations = MergeCubes()(
raw_forecast_realizations_extended, slice_over_realization=True
)
return raw_forecast_realizations
@staticmethod
def rank_ecc(
post_processed_forecast_percentiles,
raw_forecast_realizations,
random_ordering=False,
random_seed=None,
):
"""
Function to apply Ensemble Copula Coupling. This ranks the
post-processed forecast realizations based on a ranking determined from
the raw forecast realizations.
Args:
post_processed_forecast_percentiles (iris.cube.Cube):
Cube for post-processed percentiles. The percentiles are
assumed to be in ascending order.
raw_forecast_realizations (iris.cube.Cube):
Cube containing the raw (not post-processed) forecasts.
The probabilistic dimension is assumed to be the zeroth
dimension.
random_ordering (bool):
If random_ordering is True, the post-processed forecasts are
reordered randomly, rather than using the ordering of the
raw ensemble.
random_seed (int or None):
If random_seed is an integer, the integer value is used for
the random seed.
If random_seed is None, no random seed is set, so the random
values generated are not reproducible.
Returns:
iris.cube.Cube:
Cube for post-processed realizations where at a particular grid
point, the ranking of the values within the ensemble matches
the ranking from the raw ensemble.
"""
results = iris.cube.CubeList([])
for rawfc, calfc in zip(
raw_forecast_realizations.slices_over("time"),
post_processed_forecast_percentiles.slices_over("time"),
):
if random_seed is not None:
random_seed = int(random_seed)
random_seed = np.random.RandomState(random_seed)
random_data = random_seed.rand(*rawfc.data.shape)
if random_ordering:
# Returns the indices that would sort the array.
# As these indices are from a random dataset, only an argsort
# is used.
ranking = np.argsort(random_data, axis=0)
else:
# Lexsort returns the indices sorted firstly by the
# primary key, the raw forecast data (unless random_ordering
# is enabled), and secondly by the secondary key, an array of
# random data, in order to split tied values randomly.
sorting_index = np.lexsort((random_data, rawfc.data), axis=0)
# Returns the indices that would sort the array.
ranking = np.argsort(sorting_index, axis=0)
# Index the post-processed forecast data using the ranking array.
# The following uses a custom choose function that reproduces the
# required elements of the np.choose method without the limitation
# of having < 32 arrays or a leading dimension < 32 in the
# input data array. This function allows indexing of a 3d array
# using a 3d array.
mask = np.ma.getmask(calfc.data)
calfc.data = choose(ranking, calfc.data)
if mask is not np.ma.nomask:
calfc.data = np.ma.MaskedArray(calfc.data, mask, dtype=np.float32)
results.append(calfc)
# Ensure we haven't lost any dimensional coordinates with only one
# value in.
results = results.merge_cube()
results = check_cube_coordinates(post_processed_forecast_percentiles, results)
return results
def process(
self,
post_processed_forecast,
raw_forecast,
random_ordering=False,
random_seed=None,
):
"""
Reorder post-processed forecast using the ordering of the
raw ensemble.
Args:
post_processed_forecast (iris.cube.Cube):
The cube containing the post-processed
forecast realizations.
raw_forecast (iris.cube.Cube):
The cube containing the raw (not post-processed)
forecast.
random_ordering (bool):
If random_ordering is True, the post-processed forecasts are
reordered randomly, rather than using the ordering of the
raw ensemble.
random_seed (int):
If random_seed is an integer, the integer value is used for
the random seed.
If random_seed is None, no random seed is set, so the random
values generated are not reproducible.
Returns:
iris.cube.Cube:
Cube containing the new ensemble realizations where all points
within the dataset have been reordered in comparison to the
input percentiles.
"""
percentile_coord_name = find_percentile_coordinate(
post_processed_forecast
).name()
enforce_coordinate_ordering(post_processed_forecast, percentile_coord_name)
enforce_coordinate_ordering(raw_forecast, "realization")
raw_forecast = self._recycle_raw_ensemble_realizations(
post_processed_forecast, raw_forecast, percentile_coord_name
)
post_processed_forecast_realizations = self.rank_ecc(
post_processed_forecast,
raw_forecast,
random_ordering=random_ordering,
random_seed=random_seed,
)
plugin = RebadgePercentilesAsRealizations()
post_processed_forecast_realizations = plugin(
post_processed_forecast_realizations
)
enforce_coordinate_ordering(post_processed_forecast_realizations, "realization")
return post_processed_forecast_realizations
| 42.816204 | 102 | 0.644286 |
import warnings
import iris
import numpy as np
from iris.exceptions import CoordinateNotFoundError, InvalidCubeError
from scipy import stats
from improver import BasePlugin
from improver.calibration.utilities import convert_cube_data_to_2d
from improver.ensemble_copula_coupling.utilities import (
choose_set_of_percentiles,
concatenate_2d_array_with_2d_array_endpoints,
create_cube_with_percentiles,
get_bounds_of_distribution,
insert_lower_and_upper_endpoint_to_1d_array,
restore_non_percentile_dimensions,
)
from improver.metadata.probabilistic import (
extract_diagnostic_name,
find_percentile_coordinate,
find_threshold_coordinate,
)
from improver.utilities.cube_checker import (
check_cube_coordinates,
check_for_x_and_y_axes,
)
from improver.utilities.cube_manipulation import (
MergeCubes,
enforce_coordinate_ordering,
get_dim_coord_names,
)
from improver.utilities.indexing_operations import choose
class RebadgePercentilesAsRealizations(BasePlugin):
@staticmethod
def process(cube, ensemble_realization_numbers=None):
percentile_coord_name = find_percentile_coordinate(cube).name()
if ensemble_realization_numbers is None:
ensemble_realization_numbers = np.arange(
len(cube.coord(percentile_coord_name).points), dtype=np.int32
)
cube.coord(percentile_coord_name).points = ensemble_realization_numbers
try:
realization_coord = cube.coord("realization")
except CoordinateNotFoundError:
realization_coord = None
if realization_coord:
raise InvalidCubeError(
"Cannot rebadge percentile coordinate to realization "
"coordinate because a realization coordinate already exists."
)
cube.coord(percentile_coord_name).rename("realization")
cube.coord("realization").units = "1"
cube.coord("realization").points = cube.coord("realization").points.astype(
np.int32
)
return cube
class ResamplePercentiles(BasePlugin):
def __init__(self, ecc_bounds_warning=False):
self.ecc_bounds_warning = ecc_bounds_warning
def _add_bounds_to_percentiles_and_forecast_at_percentiles(
self, percentiles, forecast_at_percentiles, bounds_pairing
):
lower_bound, upper_bound = bounds_pairing
percentiles = insert_lower_and_upper_endpoint_to_1d_array(percentiles, 0, 100)
forecast_at_percentiles_with_endpoints = concatenate_2d_array_with_2d_array_endpoints(
forecast_at_percentiles, lower_bound, upper_bound
)
if np.any(np.diff(forecast_at_percentiles_with_endpoints) < 0):
out_of_bounds_vals = forecast_at_percentiles_with_endpoints[
np.where(np.diff(forecast_at_percentiles_with_endpoints) < 0)
]
msg = (
"Forecast values exist that fall outside the expected extrema "
"values that are defined as bounds in "
"ensemble_copula_coupling/constants.py. "
"Applying the extrema values as end points to the distribution "
"would result in non-monotonically increasing values. "
"The defined extremes are {}, whilst the following forecast "
"values exist outside this range: {}.".format(
bounds_pairing, out_of_bounds_vals
)
)
if self.ecc_bounds_warning:
warn_msg = msg + (
" The percentile values that have "
"exceeded the existing bounds will be used "
"as new bounds."
)
warnings.warn(warn_msg)
if upper_bound < forecast_at_percentiles_with_endpoints.max():
upper_bound = forecast_at_percentiles_with_endpoints.max()
if lower_bound > forecast_at_percentiles_with_endpoints.min():
lower_bound = forecast_at_percentiles_with_endpoints.min()
forecast_at_percentiles_with_endpoints = concatenate_2d_array_with_2d_array_endpoints(
forecast_at_percentiles, lower_bound, upper_bound
)
else:
raise ValueError(msg)
if np.any(np.diff(percentiles) < 0):
msg = (
"The percentiles must be in ascending order."
"The input percentiles were {}".format(percentiles)
)
raise ValueError(msg)
return percentiles, forecast_at_percentiles_with_endpoints
def _interpolate_percentiles(
self,
forecast_at_percentiles,
desired_percentiles,
bounds_pairing,
percentile_coord_name,
):
original_percentiles = forecast_at_percentiles.coord(
percentile_coord_name
).points
# Ensure that the percentile dimension is first, so that the
# conversion to a 2d array produces data in the desired order.
enforce_coordinate_ordering(forecast_at_percentiles, percentile_coord_name)
forecast_at_reshaped_percentiles = convert_cube_data_to_2d(
forecast_at_percentiles, coord=percentile_coord_name
)
(
original_percentiles,
forecast_at_reshaped_percentiles,
) = self._add_bounds_to_percentiles_and_forecast_at_percentiles(
original_percentiles, forecast_at_reshaped_percentiles, bounds_pairing
)
forecast_at_interpolated_percentiles = np.empty(
(len(desired_percentiles), forecast_at_reshaped_percentiles.shape[0]),
dtype=np.float32,
)
for index in range(forecast_at_reshaped_percentiles.shape[0]):
forecast_at_interpolated_percentiles[:, index] = np.interp(
desired_percentiles,
original_percentiles,
forecast_at_reshaped_percentiles[index, :],
)
# Reshape forecast_at_percentiles, so the percentiles dimension is
# first, and any other dimension coordinates follow.
forecast_at_percentiles_data = restore_non_percentile_dimensions(
forecast_at_interpolated_percentiles,
next(forecast_at_percentiles.slices_over(percentile_coord_name)),
len(desired_percentiles),
)
template_cube = next(forecast_at_percentiles.slices_over(percentile_coord_name))
template_cube.remove_coord(percentile_coord_name)
percentile_cube = create_cube_with_percentiles(
desired_percentiles, template_cube, forecast_at_percentiles_data,
)
return percentile_cube
def process(
self, forecast_at_percentiles, no_of_percentiles=None, sampling="quantile"
):
percentile_coord = find_percentile_coordinate(forecast_at_percentiles)
if no_of_percentiles is None:
no_of_percentiles = len(
forecast_at_percentiles.coord(percentile_coord).points
)
percentiles = choose_set_of_percentiles(no_of_percentiles, sampling=sampling)
cube_units = forecast_at_percentiles.units
bounds_pairing = get_bounds_of_distribution(
forecast_at_percentiles.name(), cube_units
)
forecast_at_percentiles = self._interpolate_percentiles(
forecast_at_percentiles,
percentiles,
bounds_pairing,
percentile_coord.name(),
)
return forecast_at_percentiles
class ConvertProbabilitiesToPercentiles(BasePlugin):
def __init__(self, ecc_bounds_warning=False):
self.ecc_bounds_warning = ecc_bounds_warning
def _add_bounds_to_thresholds_and_probabilities(
self, threshold_points, probabilities_for_cdf, bounds_pairing
):
lower_bound, upper_bound = bounds_pairing
threshold_points_with_endpoints = insert_lower_and_upper_endpoint_to_1d_array(
threshold_points, lower_bound, upper_bound
)
probabilities_for_cdf = concatenate_2d_array_with_2d_array_endpoints(
probabilities_for_cdf, 0, 1
)
if np.any(np.diff(threshold_points_with_endpoints) < 0):
msg = (
"The calculated threshold values {} are not in ascending "
"order as required for the cumulative distribution "
"function (CDF). This is due to the threshold values "
"exceeding the range given by the ECC bounds {}.".format(
threshold_points_with_endpoints, bounds_pairing
)
)
# If ecc_bounds_warning has been set, generate a warning message
# rather than raising an exception so that subsequent processing
# can continue. Then apply the new bounds as necessary to
# ensure the threshold values and endpoints are in ascending
# order and avoid problems further along the processing chain.
if self.ecc_bounds_warning:
warn_msg = msg + (
" The threshold points that have "
"exceeded the existing bounds will be used "
"as new bounds."
)
warnings.warn(warn_msg)
if upper_bound < max(threshold_points_with_endpoints):
upper_bound = max(threshold_points_with_endpoints)
if lower_bound > min(threshold_points_with_endpoints):
lower_bound = min(threshold_points_with_endpoints)
threshold_points_with_endpoints = insert_lower_and_upper_endpoint_to_1d_array(
threshold_points, lower_bound, upper_bound
)
else:
raise ValueError(msg)
return threshold_points_with_endpoints, probabilities_for_cdf
def _probabilities_to_percentiles(
self, forecast_probabilities, percentiles, bounds_pairing
):
threshold_coord = find_threshold_coordinate(forecast_probabilities)
threshold_unit = threshold_coord.units
threshold_points = threshold_coord.points
# Ensure that the percentile dimension is first, so that the
# conversion to a 2d array produces data in the desired order.
enforce_coordinate_ordering(forecast_probabilities, threshold_coord.name())
prob_slices = convert_cube_data_to_2d(
forecast_probabilities, coord=threshold_coord.name()
)
# The requirement below for a monotonically changing probability
# across thresholds can be thwarted by precision errors of order 1E-10,
# as such, here we round to a precision of 9 decimal places.
prob_slices = np.around(prob_slices, 9)
# Invert probabilities for data thresholded above thresholds.
relation = find_threshold_coordinate(forecast_probabilities).attributes[
"spp__relative_to_threshold"
]
if relation == "above":
probabilities_for_cdf = 1 - prob_slices
elif relation == "below":
probabilities_for_cdf = prob_slices
else:
msg = (
"Probabilities to percentiles only implemented for "
"thresholds above or below a given value."
"The relation to threshold is given as {}".format(relation)
)
raise NotImplementedError(msg)
(
threshold_points,
probabilities_for_cdf,
) = self._add_bounds_to_thresholds_and_probabilities(
threshold_points, probabilities_for_cdf, bounds_pairing
)
if np.any(np.diff(probabilities_for_cdf) < 0):
msg = (
"The probability values used to construct the "
"Cumulative Distribution Function (CDF) "
"must be ascending i.e. in order to yield "
"a monotonically increasing CDF."
"The probabilities are {}".format(probabilities_for_cdf)
)
warnings.warn(msg)
# Convert percentiles into fractions.
percentiles_as_fractions = np.array(
[x / 100.0 for x in percentiles], dtype=np.float32
)
forecast_at_percentiles = (
# pylint: disable=unsubscriptable-object
np.empty(
(len(percentiles), probabilities_for_cdf.shape[0]), dtype=np.float32
)
)
# pylint: disable=unsubscriptable-object
for index in range(probabilities_for_cdf.shape[0]):
forecast_at_percentiles[:, index] = np.interp(
percentiles_as_fractions,
probabilities_for_cdf[index, :],
threshold_points,
)
# Reshape forecast_at_percentiles, so the percentiles dimension is
# first, and any other dimension coordinates follow.
forecast_at_percentiles = restore_non_percentile_dimensions(
forecast_at_percentiles,
next(forecast_probabilities.slices_over(threshold_coord)),
len(percentiles),
)
template_cube = next(forecast_probabilities.slices_over(threshold_coord.name()))
template_cube.rename(extract_diagnostic_name(template_cube.name()))
template_cube.remove_coord(threshold_coord.name())
percentile_cube = create_cube_with_percentiles(
percentiles,
template_cube,
forecast_at_percentiles,
cube_unit=threshold_unit,
)
return percentile_cube
def process(
self,
forecast_probabilities,
no_of_percentiles=None,
percentiles=None,
sampling="quantile",
):
if no_of_percentiles is not None and percentiles is not None:
raise ValueError(
"Cannot specify both no_of_percentiles and percentiles to "
"{}".format(self.__class__.__name__)
)
threshold_coord = find_threshold_coordinate(forecast_probabilities)
phenom_name = extract_diagnostic_name(forecast_probabilities.name())
if no_of_percentiles is None:
no_of_percentiles = len(
forecast_probabilities.coord(threshold_coord.name()).points
)
if percentiles is None:
percentiles = choose_set_of_percentiles(
no_of_percentiles, sampling=sampling
)
elif not isinstance(percentiles, (tuple, list)):
percentiles = [percentiles]
percentiles = np.array(percentiles, dtype=np.float32)
cube_units = forecast_probabilities.coord(threshold_coord.name()).units
bounds_pairing = get_bounds_of_distribution(phenom_name, cube_units)
# If a cube still has multiple realizations, slice over these to reduce
# the memory requirements into manageable chunks.
try:
slices_over_realization = forecast_probabilities.slices_over("realization")
except CoordinateNotFoundError:
slices_over_realization = [forecast_probabilities]
cubelist = iris.cube.CubeList([])
for cube_realization in slices_over_realization:
cubelist.append(
self._probabilities_to_percentiles(
cube_realization, percentiles, bounds_pairing
)
)
forecast_at_percentiles = cubelist.merge_cube()
return forecast_at_percentiles
class ConvertLocationAndScaleParameters:
def __init__(self, distribution="norm", shape_parameters=None):
try:
self.distribution = getattr(stats, distribution)
except AttributeError as err:
msg = (
"The distribution requested {} is not a valid distribution "
"in scipy.stats. {}".format(distribution, err)
)
raise AttributeError(msg)
if shape_parameters is None:
if self.distribution.name == "truncnorm":
raise ValueError(
"For the truncated normal distribution, "
"shape parameters must be specified."
)
shape_parameters = []
self.shape_parameters = shape_parameters
def __repr__(self):
result = (
"<ConvertLocationAndScaleParameters: distribution: {}; "
"shape_parameters: {}>"
)
return result.format(self.distribution.name, self.shape_parameters)
def _rescale_shape_parameters(self, location_parameter, scale_parameter):
if self.distribution.name == "truncnorm":
rescaled_values = []
for value in self.shape_parameters:
rescaled_values.append((value - location_parameter) / scale_parameter)
self.shape_parameters = rescaled_values
class ConvertLocationAndScaleParametersToPercentiles(
BasePlugin, ConvertLocationAndScaleParameters
):
def __repr__(self):
result = (
"<ConvertLocationAndScaleParametersToPercentiles: "
"distribution: {}; shape_parameters: {}>"
)
return result.format(self.distribution.name, self.shape_parameters)
def _location_and_scale_parameters_to_percentiles(
self, location_parameter, scale_parameter, template_cube, percentiles
):
# Remove any mask that may be applied to location and scale parameters
# and replace with ones
location_data = np.ma.filled(location_parameter.data, 1).flatten()
scale_data = np.ma.filled(scale_parameter.data, 1).flatten()
# Convert percentiles into fractions.
percentiles = np.array([x / 100.0 for x in percentiles], dtype=np.float32)
result = np.zeros((len(percentiles), location_data.shape[0]), dtype=np.float32)
self._rescale_shape_parameters(location_data, np.sqrt(scale_data))
percentile_method = self.distribution(
*self.shape_parameters, loc=location_data, scale=np.sqrt(scale_data)
)
# Loop over percentiles, and use the distribution as the
# "percentile_method" with the location and scale parameter to
# calculate the values at each percentile.
for index, percentile in enumerate(percentiles):
percentile_list = np.repeat(percentile, len(location_data))
result[index, :] = percentile_method.ppf(percentile_list)
# If percent point function (PPF) returns NaNs, fill in
# mean instead of NaN values. NaN will only be generated if the
# variance is zero. Therefore, if the variance is zero, the mean
# value is used for all gridpoints with a NaN.
if np.any(scale_data == 0):
nan_index = np.argwhere(np.isnan(result[index, :]))
result[index, nan_index] = location_data[nan_index]
if np.any(np.isnan(result)):
msg = (
"NaNs are present within the result for the {} "
"percentile. Unable to calculate the percent point "
"function."
)
raise ValueError(msg)
# Convert percentiles back into percentages.
percentiles = [x * 100.0 for x in percentiles]
# Reshape forecast_at_percentiles, so the percentiles dimension is
# first, and any other dimension coordinates follow.
result = result.reshape((len(percentiles),) + location_parameter.data.shape)
for prob_coord_name in ["realization", "percentile"]:
if template_cube.coords(prob_coord_name, dim_coords=True):
prob_coord = template_cube.coord(prob_coord_name)
template_slice = next(template_cube.slices_over(prob_coord))
template_slice.remove_coord(prob_coord)
percentile_cube = create_cube_with_percentiles(
percentiles, template_slice, result
)
# Define a mask to be reapplied later
mask = np.logical_or(
np.ma.getmaskarray(location_parameter.data),
np.ma.getmaskarray(scale_parameter.data),
)
# Make the mask defined above fit the data size and then apply to the
# percentile cube.
mask_array = np.stack([mask] * len(percentiles))
percentile_cube.data = np.ma.masked_where(mask_array, percentile_cube.data)
# Remove cell methods associated with finding the ensemble mean
percentile_cube.cell_methods = {}
return percentile_cube
def process(
self,
location_parameter,
scale_parameter,
template_cube,
no_of_percentiles=None,
percentiles=None,
):
if no_of_percentiles and percentiles:
msg = (
"Please specify either the number of percentiles or "
"provide a list of percentiles. The number of percentiles "
"provided was {} and the list of percentiles "
"provided was {}".format(no_of_percentiles, percentiles)
)
raise ValueError(msg)
if no_of_percentiles:
percentiles = choose_set_of_percentiles(no_of_percentiles)
calibrated_forecast_percentiles = self._location_and_scale_parameters_to_percentiles(
location_parameter, scale_parameter, template_cube, percentiles
)
return calibrated_forecast_percentiles
class ConvertLocationAndScaleParametersToProbabilities(
BasePlugin, ConvertLocationAndScaleParameters
):
def __repr__(self):
result = (
"<ConvertLocationAndScaleParametersToProbabilities: "
"distribution: {}; shape_parameters: {}>"
)
return result.format(self.distribution.name, self.shape_parameters)
def _check_template_cube(self, cube):
check_for_x_and_y_axes(cube, require_dim_coords=True)
dim_coords = get_dim_coord_names(cube)
msg = (
"{} expects a cube with only a leading threshold dimension, "
"followed by spatial (y/x) dimensions. "
"Got dimensions: {}".format(self.__class__.__name__, dim_coords)
)
try:
threshold_coord = find_threshold_coordinate(cube)
except CoordinateNotFoundError:
raise ValueError(msg)
if len(dim_coords) < 4:
enforce_coordinate_ordering(cube, threshold_coord.name())
return
raise ValueError(msg)
@staticmethod
def _check_unit_compatibility(
location_parameter, scale_parameter, probability_cube_template
):
threshold_units = find_threshold_coordinate(probability_cube_template).units
try:
location_parameter.convert_units(threshold_units)
scale_parameter.convert_units(threshold_units ** 2)
except ValueError as err:
msg = (
"Error: {} This is likely because the mean "
"variance and template cube threshold units are "
"not equivalent/compatible.".format(err)
)
raise ValueError(msg)
def _location_and_scale_parameters_to_probabilities(
self, location_parameter, scale_parameter, probability_cube_template
):
# Define a mask to be reapplied later
loc_mask = np.ma.getmaskarray(location_parameter.data)
scale_mask = np.ma.getmaskarray(scale_parameter.data)
mask = np.logical_or(loc_mask, scale_mask)
# Remove any mask that may be applied to location and scale parameters
# and replace with ones
location_parameter.data = np.ma.filled(location_parameter.data, 1)
scale_parameter.data = np.ma.filled(scale_parameter.data, 1)
thresholds = find_threshold_coordinate(probability_cube_template).points
relative_to_threshold = find_threshold_coordinate(
probability_cube_template
).attributes["spp__relative_to_threshold"]
self._rescale_shape_parameters(
location_parameter.data.flatten(), np.sqrt(scale_parameter.data).flatten()
)
# Loop over thresholds, and use the specified distribution with the
# location and scale parameter to calculate the probabilities relative
# to each threshold.
probabilities = np.empty_like(probability_cube_template.data)
distribution = self.distribution(
*self.shape_parameters,
loc=location_parameter.data.flatten(),
scale=np.sqrt(scale_parameter.data.flatten()),
)
probability_method = distribution.cdf
if relative_to_threshold == "above":
probability_method = distribution.sf
for index, threshold in enumerate(thresholds):
# pylint: disable=unsubscriptable-object
probabilities[index, ...] = np.reshape(
probability_method(threshold), probabilities.shape[1:]
)
probability_cube = probability_cube_template.copy(data=probabilities)
# Make the mask defined above fit the data size and then apply to the
# probability cube.
mask_array = np.array([mask] * len(probabilities))
probability_cube.data = np.ma.masked_where(mask_array, probability_cube.data)
return probability_cube
def process(self, location_parameter, scale_parameter, probability_cube_template):
self._check_template_cube(probability_cube_template)
self._check_unit_compatibility(
location_parameter, scale_parameter, probability_cube_template
)
probability_cube = self._location_and_scale_parameters_to_probabilities(
location_parameter, scale_parameter, probability_cube_template
)
return probability_cube
class EnsembleReordering(BasePlugin):
@staticmethod
def _recycle_raw_ensemble_realizations(
post_processed_forecast_percentiles,
raw_forecast_realizations,
percentile_coord_name,
):
plen = len(
post_processed_forecast_percentiles.coord(percentile_coord_name).points
)
mlen = len(raw_forecast_realizations.coord("realization").points)
if plen == mlen:
pass
else:
raw_forecast_realizations_extended = iris.cube.CubeList()
realization_list = []
mpoints = raw_forecast_realizations.coord("realization").points
# Loop over the number of percentiles and finding the
# corresponding ensemble realization number. The ensemble
# realization numbers are recycled e.g. 1, 2, 3, 1, 2, 3, etc.
for index in range(plen):
realization_list.append(mpoints[index % len(mpoints)])
# Assume that the ensemble realizations are ascending linearly.
new_realization_numbers = realization_list[0] + list(range(plen))
# Extract the realizations required in the realization_list from
# the raw_forecast_realizations. Edit the realization number as
# appropriate and append to a cubelist containing rebadged
# raw ensemble realizations.
for realization, index in zip(realization_list, new_realization_numbers):
constr = iris.Constraint(realization=realization)
raw_forecast_realization = raw_forecast_realizations.extract(constr)
raw_forecast_realization.coord("realization").points = index
raw_forecast_realizations_extended.append(raw_forecast_realization)
raw_forecast_realizations = MergeCubes()(
raw_forecast_realizations_extended, slice_over_realization=True
)
return raw_forecast_realizations
@staticmethod
def rank_ecc(
post_processed_forecast_percentiles,
raw_forecast_realizations,
random_ordering=False,
random_seed=None,
):
results = iris.cube.CubeList([])
for rawfc, calfc in zip(
raw_forecast_realizations.slices_over("time"),
post_processed_forecast_percentiles.slices_over("time"),
):
if random_seed is not None:
random_seed = int(random_seed)
random_seed = np.random.RandomState(random_seed)
random_data = random_seed.rand(*rawfc.data.shape)
if random_ordering:
# Returns the indices that would sort the array.
# As these indices are from a random dataset, only an argsort
# is used.
ranking = np.argsort(random_data, axis=0)
else:
# Lexsort returns the indices sorted firstly by the
# primary key, the raw forecast data (unless random_ordering
# is enabled), and secondly by the secondary key, an array of
# random data, in order to split tied values randomly.
sorting_index = np.lexsort((random_data, rawfc.data), axis=0)
# Returns the indices that would sort the array.
ranking = np.argsort(sorting_index, axis=0)
# Index the post-processed forecast data using the ranking array.
# The following uses a custom choose function that reproduces the
# required elements of the np.choose method without the limitation
# of having < 32 arrays or a leading dimension < 32 in the
# input data array. This function allows indexing of a 3d array
# using a 3d array.
mask = np.ma.getmask(calfc.data)
calfc.data = choose(ranking, calfc.data)
if mask is not np.ma.nomask:
calfc.data = np.ma.MaskedArray(calfc.data, mask, dtype=np.float32)
results.append(calfc)
# Ensure we haven't lost any dimensional coordinates with only one
results = results.merge_cube()
results = check_cube_coordinates(post_processed_forecast_percentiles, results)
return results
def process(
self,
post_processed_forecast,
raw_forecast,
random_ordering=False,
random_seed=None,
):
percentile_coord_name = find_percentile_coordinate(
post_processed_forecast
).name()
enforce_coordinate_ordering(post_processed_forecast, percentile_coord_name)
enforce_coordinate_ordering(raw_forecast, "realization")
raw_forecast = self._recycle_raw_ensemble_realizations(
post_processed_forecast, raw_forecast, percentile_coord_name
)
post_processed_forecast_realizations = self.rank_ecc(
post_processed_forecast,
raw_forecast,
random_ordering=random_ordering,
random_seed=random_seed,
)
plugin = RebadgePercentilesAsRealizations()
post_processed_forecast_realizations = plugin(
post_processed_forecast_realizations
)
enforce_coordinate_ordering(post_processed_forecast_realizations, "realization")
return post_processed_forecast_realizations
| true | true |
1c45a614492dc6ca48e3d950527282f5ff9aa377 | 784 | py | Python | examples/dagster_examples/intro_tutorial/config.py | bambielli-flex/dagster | 30b75ba7c62fc536bc827f177c1dc6ba20f5ae20 | [
"Apache-2.0"
] | null | null | null | examples/dagster_examples/intro_tutorial/config.py | bambielli-flex/dagster | 30b75ba7c62fc536bc827f177c1dc6ba20f5ae20 | [
"Apache-2.0"
] | null | null | null | examples/dagster_examples/intro_tutorial/config.py | bambielli-flex/dagster | 30b75ba7c62fc536bc827f177c1dc6ba20f5ae20 | [
"Apache-2.0"
] | null | null | null | # encoding: utf-8
# py27 compat
from dagster import Field, PipelineDefinition, execute_pipeline, solid, types
@solid(config_field=Field(types.String, is_optional=True, default_value='en-us'))
def configurable_hello(context):
if len(context.solid_config) >= 3 and context.solid_config[:3] == 'haw':
return 'Aloha honua!'
elif len(context.solid_config) >= 2 and context.solid_config[:2] == 'cn':
return '你好, 世界!'
else:
return 'Hello, world!'
def define_configurable_hello_pipeline():
return PipelineDefinition(name='configurable_hello_pipeline', solids=[configurable_hello])
def test_intro_tutorial_part_four():
execute_pipeline(
define_configurable_hello_pipeline(), {'solids': {'configurable_hello': {'config': 'cn'}}}
)
| 31.36 | 98 | 0.714286 |
from dagster import Field, PipelineDefinition, execute_pipeline, solid, types
@solid(config_field=Field(types.String, is_optional=True, default_value='en-us'))
def configurable_hello(context):
if len(context.solid_config) >= 3 and context.solid_config[:3] == 'haw':
return 'Aloha honua!'
elif len(context.solid_config) >= 2 and context.solid_config[:2] == 'cn':
return '你好, 世界!'
else:
return 'Hello, world!'
def define_configurable_hello_pipeline():
return PipelineDefinition(name='configurable_hello_pipeline', solids=[configurable_hello])
def test_intro_tutorial_part_four():
execute_pipeline(
define_configurable_hello_pipeline(), {'solids': {'configurable_hello': {'config': 'cn'}}}
)
| true | true |
1c45a68d0192fabe44b1195622b98bb7d5868d24 | 3,238 | py | Python | kaplot/astro/wcsgrid.py | maartenbreddels/kaplot | 305026209f8026094d54373e14541f4f039501d5 | [
"MIT"
] | null | null | null | kaplot/astro/wcsgrid.py | maartenbreddels/kaplot | 305026209f8026094d54373e14541f4f039501d5 | [
"MIT"
] | null | null | null | kaplot/astro/wcsgrid.py | maartenbreddels/kaplot | 305026209f8026094d54373e14541f4f039501d5 | [
"MIT"
] | null | null | null | from kaplot.objects import PlotObject
import numarray
import kaplot
import kaplot.context
import kaplot.vector
class WcsGrid(PlotObject):
def __init__(self, xticks, yticks, projection, longitudeoffset, lock=True, context=None, **kwargs):
PlotObject.__init__(self, lock=False, context=kaplot.context.mergeDicts(context, kwargs))
self.xticks = xticks
self.yticks = yticks
self.projection = projection
self.longitudeoffset = longitudeoffset
self.context = kaplot.context.buildContext(kwargs)
self.callback = self.notifyChange
self.context.addWeakListener(self.callback)
if lock:
self._lock()
def plot(self, device):
#xticks = self.xticks
#yticks = self.yticks
#xmask = (xticks >= lomin) == (xticks <= lomax)
#ymask = (yticks >= lamin) == (yticks <= lamax)
#xticks = compress(xmask, xticks)
#yticks = compress(ymask, yticks)
#la
#yticks = arange(lamin, lamax, lagran)
#xticks = arange(lomin, lomax, logran)
lines = []
xticks = numarray.array(self.xticks)
yticks = numarray.array(self.yticks)
#xticks = (xticks + 180) % 360 - 180
lomin, lomax = min(xticks), max(xticks)
lamin, lamax = min(yticks), max(yticks)
logran = (lomax - lomin) / 40
lagran = (lamax - lamin) / 40
#print lomin, lomax
#print lamin, lamax
#print lomin, lomax, lamin, lamax
#print xticks, yticks
#print xticks, yticks
#print dev.transformation.transform(xticks, yticks)
#print "PHAT", lomin, lomax, len(yticks)
for latitude in yticks[:]: #arange(lamin, lamax+lagran/2, lagran):
x = numarray.arange(lomin, lomax+logran/2.0, logran)
y = numarray.zeros(len(x)) + float(latitude)
nx, ny = self.projection.forwardarray(x, y)
#print "latitude", latitude
#print "x=",x, "y=",y
#print "new"
#print "nx=",nx, "ny=",ny
nx = []
ny = []
longoffset = self.longitudeoffset
offset = 0 #(int(self.longitudeoffset) / 180) * 180
longitudebegin = -180
while ((x[0]-offset) >= (longitudebegin+longoffset)):
offset += 180
#print "offset", offset
sigma = 0.0001
for x, y in zip(x, y):
if ((x-offset) >= (longitudebegin+longoffset)):
#print "jump", longoffset
p = self.projection.forward(longitudebegin+(longoffset-sigma)-offset, y)
if p != None:
nx.append(p[0])
ny.append(p[1])
offset += (180)
if len(nx) >= 2:
#print "plot", nx, ny
device.plotPolyline(nx, ny)
nx = []
ny = []
p = self.projection.forward(longitudebegin+(longoffset+sigma)-(offset-180), y)
if p != None:
nx.append(p[0])
ny.append(p[1])
#else:
# print "no jump"
p = self.projection.forward(x, y)
if p != None:
nx.append(p[0])
ny.append(p[1])
#p = self.projection.forward(lomax, y)
#if p != None:
# nx.append(p[0])
# ny.append(p[1])
if len(nx) >= 2:
#print "plot", nx, ny
device.plotPolyline(nx, ny)
for longitude in xticks: #arange(lomin, lomax+logran/2, logran):
y = numarray.arange(lamin, lamax+lagran/2, lagran)
x = numarray.zeros(len(y)) + float(longitude)
nx, ny = self.projection.forwardarray(x, y)
device.plotPolyline(nx, ny)
#line = Polyline(x, y, linestyle="normal", linewidth=self.linewidth, color=self.color)
#lines.append(line)
| 30.261682 | 100 | 0.647931 | from kaplot.objects import PlotObject
import numarray
import kaplot
import kaplot.context
import kaplot.vector
class WcsGrid(PlotObject):
def __init__(self, xticks, yticks, projection, longitudeoffset, lock=True, context=None, **kwargs):
PlotObject.__init__(self, lock=False, context=kaplot.context.mergeDicts(context, kwargs))
self.xticks = xticks
self.yticks = yticks
self.projection = projection
self.longitudeoffset = longitudeoffset
self.context = kaplot.context.buildContext(kwargs)
self.callback = self.notifyChange
self.context.addWeakListener(self.callback)
if lock:
self._lock()
def plot(self, device):
lines = []
xticks = numarray.array(self.xticks)
yticks = numarray.array(self.yticks)
lomin, lomax = min(xticks), max(xticks)
lamin, lamax = min(yticks), max(yticks)
logran = (lomax - lomin) / 40
lagran = (lamax - lamin) / 40
for latitude in yticks[:]:
x = numarray.arange(lomin, lomax+logran/2.0, logran)
y = numarray.zeros(len(x)) + float(latitude)
nx, ny = self.projection.forwardarray(x, y)
nx = []
ny = []
longoffset = self.longitudeoffset
offset = 0
longitudebegin = -180
while ((x[0]-offset) >= (longitudebegin+longoffset)):
offset += 180
sigma = 0.0001
for x, y in zip(x, y):
if ((x-offset) >= (longitudebegin+longoffset)):
p = self.projection.forward(longitudebegin+(longoffset-sigma)-offset, y)
if p != None:
nx.append(p[0])
ny.append(p[1])
offset += (180)
if len(nx) >= 2:
device.plotPolyline(nx, ny)
nx = []
ny = []
p = self.projection.forward(longitudebegin+(longoffset+sigma)-(offset-180), y)
if p != None:
nx.append(p[0])
ny.append(p[1])
p = self.projection.forward(x, y)
if p != None:
nx.append(p[0])
ny.append(p[1])
if len(nx) >= 2:
device.plotPolyline(nx, ny)
for longitude in xticks:
y = numarray.arange(lamin, lamax+lagran/2, lagran)
x = numarray.zeros(len(y)) + float(longitude)
nx, ny = self.projection.forwardarray(x, y)
device.plotPolyline(nx, ny)
| true | true |
1c45a7a78535500c62f6eb5fd46da6f909d578fb | 1,034 | py | Python | manage.py | manuelen12/test_sale | 1d199fcfca8361edf704e0bb138a07e7d924f327 | [
"MIT"
] | null | null | null | manage.py | manuelen12/test_sale | 1d199fcfca8361edf704e0bb138a07e7d924f327 | [
"MIT"
] | null | null | null | manage.py | manuelen12/test_sale | 1d199fcfca8361edf704e0bb138a07e7d924f327 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local')
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
# This allows easy placement of apps within the interior
# test_venta directory.
current_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(current_path, 'test_venta'))
execute_from_command_line(sys.argv)
| 34.466667 | 77 | 0.658607 |
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local')
try:
from django.core.management import execute_from_command_line
except ImportError:
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
current_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(current_path, 'test_venta'))
execute_from_command_line(sys.argv)
| true | true |
1c45a7b3b9bd4e9eca083311a86129a50d7c738e | 189 | py | Python | tests/web_platform/CSS2/normal_flow/test_block_in_inline_insert_014_nosplit_ref.py | fletchgraham/colosseum | 77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f | [
"BSD-3-Clause"
] | null | null | null | tests/web_platform/CSS2/normal_flow/test_block_in_inline_insert_014_nosplit_ref.py | fletchgraham/colosseum | 77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f | [
"BSD-3-Clause"
] | null | null | null | tests/web_platform/CSS2/normal_flow/test_block_in_inline_insert_014_nosplit_ref.py | fletchgraham/colosseum | 77be4896ee52b8f5956a3d77b5f2ccd2c8608e8f | [
"BSD-3-Clause"
] | 1 | 2020-01-16T01:56:41.000Z | 2020-01-16T01:56:41.000Z | from tests.utils import W3CTestCase
class TestBlockInInlineInsert014NosplitRef(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'block-in-inline-insert-014-nosplit-ref'))
| 31.5 | 93 | 0.814815 | from tests.utils import W3CTestCase
class TestBlockInInlineInsert014NosplitRef(W3CTestCase):
vars().update(W3CTestCase.find_tests(__file__, 'block-in-inline-insert-014-nosplit-ref'))
| true | true |
1c45a859a5271dffa80a1d5cc1763cd482c9913a | 2,912 | py | Python | test/integration_tests/test_roles.py | poldracklab/bids-core | b87a1ef2d3e1c5a79a98c0f0ba82b1b2634bce0e | [
"MIT"
] | 1 | 2016-03-09T01:24:02.000Z | 2016-03-09T01:24:02.000Z | test/integration_tests/test_roles.py | poldracklab/bids-core | b87a1ef2d3e1c5a79a98c0f0ba82b1b2634bce0e | [
"MIT"
] | 15 | 2016-02-17T19:11:32.000Z | 2018-04-12T23:33:06.000Z | test/integration_tests/test_roles.py | poldracklab/bids-core | b87a1ef2d3e1c5a79a98c0f0ba82b1b2634bce0e | [
"MIT"
] | 4 | 2017-04-05T17:34:59.000Z | 2018-01-22T01:40:51.000Z | import requests
import os
import json
import time
from nose.tools import with_setup
base_url = 'http://localhost:8080/api'
adm_user = 'test@user.com'
user = 'other@user.com'
test_data = type('',(object,),{})()
def setup_db():
global session
session = requests.Session()
# all the requests will be performed as root
session.params = {
'user': adm_user,
'root': True
}
# Create a group
test_data.group_id = 'test_group_' + str(int(time.time()*1000))
payload = {
'_id': test_data.group_id
}
payload = json.dumps(payload)
r = session.post(base_url + '/groups', data=payload)
assert r.ok
payload = {
'_id': user,
'firstname': 'Other',
'lastname': 'User',
}
payload = json.dumps(payload)
r = session.post(base_url + '/users', data=payload)
assert r.ok
session.params = {}
def teardown_db():
session.params = {
'user': adm_user,
'root': True
}
r = session.delete(base_url + '/groups/' + test_data.group_id)
assert r.ok
r = session.delete(base_url + '/users/' + user)
assert r.ok
def _build_url_and_payload(method, user, access, site='local'):
url = os.path.join(base_url, 'groups', test_data.group_id, 'roles')
if method == 'POST':
payload = {
'_id': user,
'site': site,
'access': access
}
return url, json.dumps(payload)
else:
return os.path.join(url, site, user), None
@with_setup(setup_db, teardown_db)
def test_roles():
session.params = {
'user': adm_user
}
url_get, _ = _build_url_and_payload('GET', user, None)
r = session.get(url_get)
assert r.status_code == 404
url_post, payload = _build_url_and_payload('POST', user, 'rw')
r = session.post(url_post, data=payload)
assert r.ok
r = session.get(url_get)
assert r.ok
content = json.loads(r.content)
assert content['access'] == 'rw'
assert content['_id'] == user
session.params = {
'user': user
}
url_get_not_auth, _ = _build_url_and_payload('GET', adm_user, None)
r = session.get(url_get_not_auth)
assert r.status_code == 403
session.params = {
'user': adm_user
}
payload = json.dumps({'access':'admin'})
r = session.put(url_get, data=payload)
assert r.ok
session.params = {
'user': user
}
r = session.get(url_get_not_auth)
assert r.ok
session.params = {
'user': adm_user
}
payload = json.dumps({'access':'rw'})
r = session.put(url_get, data=payload)
assert r.ok
session.params = {
'user': user
}
r = session.get(url_get_not_auth)
assert r.status_code == 403
session.params = {
'user': adm_user
}
r = session.delete(url_get)
assert r.ok
r = session.get(url_get)
assert r.status_code == 404
| 25.321739 | 71 | 0.595467 | import requests
import os
import json
import time
from nose.tools import with_setup
base_url = 'http://localhost:8080/api'
adm_user = 'test@user.com'
user = 'other@user.com'
test_data = type('',(object,),{})()
def setup_db():
global session
session = requests.Session()
session.params = {
'user': adm_user,
'root': True
}
test_data.group_id = 'test_group_' + str(int(time.time()*1000))
payload = {
'_id': test_data.group_id
}
payload = json.dumps(payload)
r = session.post(base_url + '/groups', data=payload)
assert r.ok
payload = {
'_id': user,
'firstname': 'Other',
'lastname': 'User',
}
payload = json.dumps(payload)
r = session.post(base_url + '/users', data=payload)
assert r.ok
session.params = {}
def teardown_db():
session.params = {
'user': adm_user,
'root': True
}
r = session.delete(base_url + '/groups/' + test_data.group_id)
assert r.ok
r = session.delete(base_url + '/users/' + user)
assert r.ok
def _build_url_and_payload(method, user, access, site='local'):
url = os.path.join(base_url, 'groups', test_data.group_id, 'roles')
if method == 'POST':
payload = {
'_id': user,
'site': site,
'access': access
}
return url, json.dumps(payload)
else:
return os.path.join(url, site, user), None
@with_setup(setup_db, teardown_db)
def test_roles():
session.params = {
'user': adm_user
}
url_get, _ = _build_url_and_payload('GET', user, None)
r = session.get(url_get)
assert r.status_code == 404
url_post, payload = _build_url_and_payload('POST', user, 'rw')
r = session.post(url_post, data=payload)
assert r.ok
r = session.get(url_get)
assert r.ok
content = json.loads(r.content)
assert content['access'] == 'rw'
assert content['_id'] == user
session.params = {
'user': user
}
url_get_not_auth, _ = _build_url_and_payload('GET', adm_user, None)
r = session.get(url_get_not_auth)
assert r.status_code == 403
session.params = {
'user': adm_user
}
payload = json.dumps({'access':'admin'})
r = session.put(url_get, data=payload)
assert r.ok
session.params = {
'user': user
}
r = session.get(url_get_not_auth)
assert r.ok
session.params = {
'user': adm_user
}
payload = json.dumps({'access':'rw'})
r = session.put(url_get, data=payload)
assert r.ok
session.params = {
'user': user
}
r = session.get(url_get_not_auth)
assert r.status_code == 403
session.params = {
'user': adm_user
}
r = session.delete(url_get)
assert r.ok
r = session.get(url_get)
assert r.status_code == 404
| true | true |
1c45a92868008d359499e2e83998919eb99a0158 | 5,916 | py | Python | sdk/python/pulumi_azure_native/migrate/latest/group.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/migrate/latest/group.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/migrate/latest/group.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = ['Group']
warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:migrate:Group'.""", DeprecationWarning)
class Group(pulumi.CustomResource):
warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:migrate:Group'.""", DeprecationWarning)
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
e_tag: Optional[pulumi.Input[str]] = None,
group_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A group created in a Migration project.
Latest API Version: 2019-10-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] e_tag: For optimistic concurrency control.
:param pulumi.Input[str] group_name: Unique name of a group within a project.
:param pulumi.Input[str] project_name: Name of the Azure Migrate project.
:param pulumi.Input[str] resource_group_name: Name of the Azure Resource Group that project is part of.
"""
pulumi.log.warn("""Group is deprecated: The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:migrate:Group'.""")
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['e_tag'] = e_tag
__props__['group_name'] = group_name
if project_name is None and not opts.urn:
raise TypeError("Missing required property 'project_name'")
__props__['project_name'] = project_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['name'] = None
__props__['properties'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:migrate/latest:Group"), pulumi.Alias(type_="azure-native:migrate:Group"), pulumi.Alias(type_="azure-nextgen:migrate:Group"), pulumi.Alias(type_="azure-native:migrate/v20191001:Group"), pulumi.Alias(type_="azure-nextgen:migrate/v20191001:Group")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Group, __self__).__init__(
'azure-native:migrate/latest:Group',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Group':
"""
Get an existing Group resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["e_tag"] = None
__props__["name"] = None
__props__["properties"] = None
__props__["type"] = None
return Group(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[Optional[str]]:
"""
For optimistic concurrency control.
"""
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.GroupPropertiesResponse']:
"""
Properties of the group.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Type of the object = [Microsoft.Migrate/assessmentProjects/groups].
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 42.869565 | 333 | 0.644523 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = ['Group']
warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:migrate:Group'.""", DeprecationWarning)
class Group(pulumi.CustomResource):
warnings.warn("""The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:migrate:Group'.""", DeprecationWarning)
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
e_tag: Optional[pulumi.Input[str]] = None,
group_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
pulumi.log.warn("""Group is deprecated: The 'latest' version is deprecated. Please migrate to the resource in the top-level module: 'azure-native:migrate:Group'.""")
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['e_tag'] = e_tag
__props__['group_name'] = group_name
if project_name is None and not opts.urn:
raise TypeError("Missing required property 'project_name'")
__props__['project_name'] = project_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['name'] = None
__props__['properties'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:migrate/latest:Group"), pulumi.Alias(type_="azure-native:migrate:Group"), pulumi.Alias(type_="azure-nextgen:migrate:Group"), pulumi.Alias(type_="azure-native:migrate/v20191001:Group"), pulumi.Alias(type_="azure-nextgen:migrate/v20191001:Group")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Group, __self__).__init__(
'azure-native:migrate/latest:Group',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Group':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["e_tag"] = None
__props__["name"] = None
__props__["properties"] = None
__props__["type"] = None
return Group(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.GroupPropertiesResponse']:
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
1c45a98c9736d722678cfe3cb4948c956cd7f2d7 | 6,212 | py | Python | tempest/api/object_storage/test_container_sync.py | azorge/tempest | 549dfc93fb7e3d6d8566064a60a6069deae5c8eb | [
"Apache-2.0"
] | 1 | 2021-05-21T08:24:02.000Z | 2021-05-21T08:24:02.000Z | tempest/api/object_storage/test_container_sync.py | azorge/tempest | 549dfc93fb7e3d6d8566064a60a6069deae5c8eb | [
"Apache-2.0"
] | null | null | null | tempest/api/object_storage/test_container_sync.py | azorge/tempest | 549dfc93fb7e3d6d8566064a60a6069deae5c8eb | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from six.moves.urllib import parse as urlparse
import testtools
from tempest.api.object_storage import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest import test
CONF = config.CONF
# This test can be quite long to run due to its
# dependency on container-sync process running interval.
# You can obviously reduce the container-sync interval in the
# container-server configuration.
class ContainerSyncTest(base.BaseObjectTest):
clients = {}
credentials = [['operator', CONF.object_storage.operator_role],
['operator_alt', CONF.object_storage.operator_role]]
@classmethod
def setup_credentials(cls):
super(ContainerSyncTest, cls).setup_credentials()
cls.os = cls.os_roles_operator
cls.os_alt = cls.os_roles_operator_alt
@classmethod
def setup_clients(cls):
super(ContainerSyncTest, cls).setup_clients()
cls.object_client_alt = cls.os_alt.object_client
cls.container_client_alt = cls.os_alt.container_client
@classmethod
def resource_setup(cls):
super(ContainerSyncTest, cls).resource_setup()
cls.containers = []
cls.objects = []
# Default container-server config only allows localhost
cls.local_ip = '127.0.0.1'
# Must be configure according to container-sync interval
container_sync_timeout = CONF.object_storage.container_sync_timeout
cls.container_sync_interval = \
CONF.object_storage.container_sync_interval
cls.attempts = \
int(container_sync_timeout / cls.container_sync_interval)
# define container and object clients
cls.clients[data_utils.rand_name(name='TestContainerSync')] = \
(cls.container_client, cls.object_client)
cls.clients[data_utils.rand_name(name='TestContainerSync')] = \
(cls.container_client_alt, cls.object_client_alt)
for cont_name, client in cls.clients.items():
client[0].create_container(cont_name)
cls.containers.append(cont_name)
@classmethod
def resource_cleanup(cls):
for client in cls.clients.values():
cls.delete_containers(client[0], client[1])
super(ContainerSyncTest, cls).resource_cleanup()
def _test_container_synchronization(self, make_headers):
# container to container synchronization
# to allow/accept sync requests to/from other accounts
# turn container synchronization on and create object in container
for cont in (self.containers, self.containers[::-1]):
cont_client = [self.clients[c][0] for c in cont]
obj_client = [self.clients[c][1] for c in cont]
headers = make_headers(cont[1], cont_client[1])
resp, body = \
cont_client[0].put(str(cont[0]), body=None, headers=headers)
# create object in container
object_name = data_utils.rand_name(name='TestSyncObject')
data = object_name[::-1].encode() # Raw data, we need bytes
resp, _ = obj_client[0].create_object(cont[0], object_name, data)
self.objects.append(object_name)
# wait until container contents list is not empty
cont_client = [self.clients[c][0] for c in self.containers]
params = {'format': 'json'}
while self.attempts > 0:
object_lists = []
for c_client, cont in zip(cont_client, self.containers):
resp, object_list = c_client.list_container_contents(
cont, params=params)
object_lists.append(dict(
(obj['name'], obj) for obj in object_list))
# check that containers are not empty and have equal keys()
# or wait for next attempt
if object_lists[0] and object_lists[1] and \
set(object_lists[0].keys()) == set(object_lists[1].keys()):
break
else:
time.sleep(self.container_sync_interval)
self.attempts -= 1
self.assertEqual(object_lists[0], object_lists[1],
'Different object lists in containers.')
# Verify object content
obj_clients = [(self.clients[c][1], c) for c in self.containers]
for obj_client, cont in obj_clients:
for obj_name in object_lists[0]:
resp, object_content = obj_client.get_object(cont, obj_name)
self.assertEqual(object_content, obj_name[::-1].encode())
@test.attr(type='slow')
@decorators.skip_because(bug='1317133')
@decorators.idempotent_id('be008325-1bba-4925-b7dd-93b58f22ce9b')
@testtools.skipIf(
not CONF.object_storage_feature_enabled.container_sync,
'Old-style container sync function is disabled')
def test_container_synchronization(self):
def make_headers(cont, cont_client):
# tell first container to synchronize to a second
client_proxy_ip = \
urlparse.urlparse(cont_client.base_url).netloc.split(':')[0]
client_base_url = \
cont_client.base_url.replace(client_proxy_ip,
self.local_ip)
headers = {'X-Container-Sync-Key': 'sync_key',
'X-Container-Sync-To': "%s/%s" %
(client_base_url, str(cont))}
return headers
self._test_container_synchronization(make_headers)
| 41.413333 | 79 | 0.650193 |
import time
from six.moves.urllib import parse as urlparse
import testtools
from tempest.api.object_storage import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest import test
CONF = config.CONF
class ContainerSyncTest(base.BaseObjectTest):
clients = {}
credentials = [['operator', CONF.object_storage.operator_role],
['operator_alt', CONF.object_storage.operator_role]]
@classmethod
def setup_credentials(cls):
super(ContainerSyncTest, cls).setup_credentials()
cls.os = cls.os_roles_operator
cls.os_alt = cls.os_roles_operator_alt
@classmethod
def setup_clients(cls):
super(ContainerSyncTest, cls).setup_clients()
cls.object_client_alt = cls.os_alt.object_client
cls.container_client_alt = cls.os_alt.container_client
@classmethod
def resource_setup(cls):
super(ContainerSyncTest, cls).resource_setup()
cls.containers = []
cls.objects = []
cls.local_ip = '127.0.0.1'
container_sync_timeout = CONF.object_storage.container_sync_timeout
cls.container_sync_interval = \
CONF.object_storage.container_sync_interval
cls.attempts = \
int(container_sync_timeout / cls.container_sync_interval)
cls.clients[data_utils.rand_name(name='TestContainerSync')] = \
(cls.container_client, cls.object_client)
cls.clients[data_utils.rand_name(name='TestContainerSync')] = \
(cls.container_client_alt, cls.object_client_alt)
for cont_name, client in cls.clients.items():
client[0].create_container(cont_name)
cls.containers.append(cont_name)
@classmethod
def resource_cleanup(cls):
for client in cls.clients.values():
cls.delete_containers(client[0], client[1])
super(ContainerSyncTest, cls).resource_cleanup()
def _test_container_synchronization(self, make_headers):
for cont in (self.containers, self.containers[::-1]):
cont_client = [self.clients[c][0] for c in cont]
obj_client = [self.clients[c][1] for c in cont]
headers = make_headers(cont[1], cont_client[1])
resp, body = \
cont_client[0].put(str(cont[0]), body=None, headers=headers)
object_name = data_utils.rand_name(name='TestSyncObject')
data = object_name[::-1].encode()
resp, _ = obj_client[0].create_object(cont[0], object_name, data)
self.objects.append(object_name)
cont_client = [self.clients[c][0] for c in self.containers]
params = {'format': 'json'}
while self.attempts > 0:
object_lists = []
for c_client, cont in zip(cont_client, self.containers):
resp, object_list = c_client.list_container_contents(
cont, params=params)
object_lists.append(dict(
(obj['name'], obj) for obj in object_list))
if object_lists[0] and object_lists[1] and \
set(object_lists[0].keys()) == set(object_lists[1].keys()):
break
else:
time.sleep(self.container_sync_interval)
self.attempts -= 1
self.assertEqual(object_lists[0], object_lists[1],
'Different object lists in containers.')
obj_clients = [(self.clients[c][1], c) for c in self.containers]
for obj_client, cont in obj_clients:
for obj_name in object_lists[0]:
resp, object_content = obj_client.get_object(cont, obj_name)
self.assertEqual(object_content, obj_name[::-1].encode())
@test.attr(type='slow')
@decorators.skip_because(bug='1317133')
@decorators.idempotent_id('be008325-1bba-4925-b7dd-93b58f22ce9b')
@testtools.skipIf(
not CONF.object_storage_feature_enabled.container_sync,
'Old-style container sync function is disabled')
def test_container_synchronization(self):
def make_headers(cont, cont_client):
client_proxy_ip = \
urlparse.urlparse(cont_client.base_url).netloc.split(':')[0]
client_base_url = \
cont_client.base_url.replace(client_proxy_ip,
self.local_ip)
headers = {'X-Container-Sync-Key': 'sync_key',
'X-Container-Sync-To': "%s/%s" %
(client_base_url, str(cont))}
return headers
self._test_container_synchronization(make_headers)
| true | true |
1c45aa09ed045489b6f2e606842645ee253d9d21 | 15,307 | py | Python | scripts/compile_frontend.py | jillmnolan/devtools-frontend | ea371f55d17d9b5909785c7c636be866f44cc352 | [
"BSD-3-Clause"
] | 1 | 2018-02-18T03:46:04.000Z | 2018-02-18T03:46:04.000Z | scripts/compile_frontend.py | jillmnolan/devtools-frontend | ea371f55d17d9b5909785c7c636be866f44cc352 | [
"BSD-3-Clause"
] | null | null | null | scripts/compile_frontend.py | jillmnolan/devtools-frontend | ea371f55d17d9b5909785c7c636be866f44cc352 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import argparse
import os
import os.path as path
import re
import shutil
import subprocess
import sys
import tempfile
from build import modular_build
from build import generate_protocol_externs
import dependency_preprocessor
import utils
try:
import simplejson as json
except ImportError:
import json
is_cygwin = sys.platform == 'cygwin'
def popen(arguments):
return subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def to_platform_path(filepath):
if not is_cygwin:
return filepath
return re.sub(r'^/cygdrive/(\w)', '\\1:', filepath)
def to_platform_path_exact(filepath):
if not is_cygwin:
return filepath
output, _ = popen(['cygpath', '-w', filepath]).communicate()
# pylint: disable=E1103
return output.strip().replace('\\', '\\\\')
scripts_path = path.dirname(path.abspath(__file__))
devtools_path = path.dirname(scripts_path)
inspector_path = path.join(path.dirname(devtools_path), 'core', 'inspector')
# TODO(dgozman): move these checks to v8.
v8_inspector_path = path.normpath(path.join(path.dirname(devtools_path), os.pardir, os.pardir, os.pardir, 'v8', 'src', 'inspector'))
devtools_frontend_path = path.join(devtools_path, 'front_end')
global_externs_file = to_platform_path(path.join(devtools_frontend_path, 'externs.js'))
protocol_externs_file = path.join(devtools_frontend_path, 'protocol_externs.js')
runtime_file = to_platform_path(path.join(devtools_frontend_path, 'Runtime.js'))
closure_compiler_jar = to_platform_path(path.join(scripts_path, 'closure', 'compiler.jar'))
closure_runner_jar = to_platform_path(path.join(scripts_path, 'closure', 'closure_runner', 'closure_runner.jar'))
jsdoc_validator_jar = to_platform_path(path.join(scripts_path, 'jsdoc_validator', 'jsdoc_validator.jar'))
type_checked_jsdoc_tags_list = ['param', 'return', 'type', 'enum']
type_checked_jsdoc_tags_or = '|'.join(type_checked_jsdoc_tags_list)
# Basic regex for invalid JsDoc types: an object type name ([A-Z][_A-Za-z0-9.]+[A-Za-z0-9]) not preceded by '!', '?', ':' (this, new), or '.' (object property).
invalid_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or +
r')\s*\{.*(?<![!?:._A-Za-z0-9])([A-Z][_A-Za-z0-9.]+[A-Za-z0-9])[^/]*\}')
invalid_type_designator_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*.*(?<![{: ])([?!])=?\}')
invalid_non_object_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*\{.*(![a-z]+)[^/]*\}')
error_warning_regex = re.compile(r'WARNING|ERROR')
loaded_css_regex = re.compile(r'(?:registerRequiredCSS|WebInspector\.View\.createStyleElement)\s*\(\s*"(.+)"\s*\)')
java_build_regex = re.compile(r'\w+ version "(\d+)\.(\d+)')
def log_error(message):
print 'ERROR: ' + message
def error_excepthook(exctype, value, traceback):
print 'ERROR:'
sys.__excepthook__(exctype, value, traceback)
sys.excepthook = error_excepthook
application_descriptors = [
'inspector',
'toolbox',
'integration_test_runner',
'formatter_worker',
'heap_snapshot_worker',
]
skipped_namespaces = {
'Console', # Closure uses Console as a namespace item so we cannot override it right now.
'Gonzales', # third party module defined in front_end/externs.js
'Terminal', # third party module defined in front_end/externs.js
}
def has_errors(output):
return re.search(error_warning_regex, output) != None
class JSDocChecker:
def __init__(self, descriptors, java_exec):
self._error_found = False
self._all_files = descriptors.all_compiled_files()
self._java_exec = java_exec
def check(self):
print 'Verifying JSDoc comments...'
self._verify_jsdoc()
self._run_jsdoc_validator()
return self._error_found
def _run_jsdoc_validator(self):
files = [to_platform_path(f) for f in self._all_files]
file_list = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
file_list.write('\n'.join(files))
finally:
file_list.close()
proc = popen(self._java_exec + ['-jar', jsdoc_validator_jar, '--files-list-name', to_platform_path_exact(file_list.name)])
(out, _) = proc.communicate()
if out:
print('JSDoc validator output:%s%s' % (os.linesep, out))
self._error_found = True
os.remove(file_list.name)
def _verify_jsdoc(self):
for full_file_name in self._all_files:
line_index = 0
with open(full_file_name, 'r') as sourceFile:
for line in sourceFile:
line_index += 1
if line.rstrip():
self._verify_jsdoc_line(full_file_name, line_index, line)
def _verify_jsdoc_line(self, file_name, line_index, line):
def print_error(message, error_position):
print '%s:%s: ERROR - %s%s%s%s%s%s' % (file_name, line_index, message, os.linesep, line, os.linesep,
' ' * error_position + '^', os.linesep)
known_css = {}
match = re.search(invalid_type_regex, line)
if match:
print_error('Type "%s" nullability not marked explicitly with "?" (nullable) or "!" (non-nullable)' % match.group(1),
match.start(1))
self._error_found = True
match = re.search(invalid_non_object_type_regex, line)
if match:
print_error('Non-object type explicitly marked with "!" (non-nullable), which is the default and should be omitted',
match.start(1))
self._error_found = True
match = re.search(invalid_type_designator_regex, line)
if match:
print_error('Type nullability indicator misplaced, should precede type', match.start(1))
self._error_found = True
match = re.search(loaded_css_regex, line)
if match:
file = path.join(devtools_frontend_path, match.group(1))
exists = known_css.get(file)
if exists is None:
exists = path.isfile(file)
known_css[file] = exists
if not exists:
print_error('Dynamically loaded CSS stylesheet is missing in the source tree', match.start(1))
self._error_found = True
def find_java():
required_major = 1
required_minor = 7
exec_command = None
has_server_jvm = True
java_path = utils.which('java')
if not java_path:
print 'NOTE: No Java executable found in $PATH.'
sys.exit(1)
is_ok = False
java_version_out, _ = popen([java_path, '-version']).communicate()
# pylint: disable=E1103
match = re.search(java_build_regex, java_version_out)
if match:
major = int(match.group(1))
minor = int(match.group(2))
is_ok = major >= required_major and minor >= required_minor
if is_ok:
exec_command = [java_path, '-Xms1024m', '-server', '-XX:+TieredCompilation']
check_server_proc = popen(exec_command + ['-version'])
check_server_proc.communicate()
if check_server_proc.returncode != 0:
# Not all Java installs have server JVMs.
exec_command = exec_command.remove('-server')
has_server_jvm = False
if not is_ok:
print 'NOTE: Java executable version %d.%d or above not found in $PATH.' % (required_major, required_minor)
sys.exit(1)
print 'Java executable: %s%s' % (java_path, '' if has_server_jvm else ' (no server JVM)')
return exec_command
common_closure_args = [
'--summary_detail_level',
'3',
'--jscomp_error',
'visibility',
'--jscomp_warning',
'missingOverride',
'--compilation_level',
'SIMPLE_OPTIMIZATIONS',
'--warning_level',
'VERBOSE',
'--language_in=ECMASCRIPT_2017',
'--language_out=ES5_STRICT',
'--extra_annotation_name',
'suppressReceiverCheck',
'--extra_annotation_name',
'suppressGlobalPropertiesCheck',
'--checks-only',
'--allow_method_call_decomposing',
]
def check_conditional_dependencies(modules_by_name):
errors_found = False
for name in modules_by_name:
if 'test_runner' in name:
continue
for dep_name in modules_by_name[name].get('dependencies', []):
dependency = modules_by_name[dep_name]
if dependency.get('experiment') or dependency.get('condition'):
log_error('Module "%s" may not depend on the conditional module "%s"' % (name, dep_name))
errors_found = True
return errors_found
def prepare_closure_frontend_compile(temp_devtools_path, descriptors, namespace_externs_path):
temp_frontend_path = path.join(temp_devtools_path, 'front_end')
checker = dependency_preprocessor.DependencyPreprocessor(descriptors, temp_frontend_path, devtools_frontend_path)
checker.enforce_dependencies()
command = common_closure_args + [
'--externs',
to_platform_path(global_externs_file),
'--externs',
namespace_externs_path,
'--js',
runtime_file,
]
all_files = descriptors.all_compiled_files()
args = []
for file in all_files:
args.extend(['--js', file])
if "InspectorBackend.js" in file:
args.extend(['--js', protocol_externs_file])
command += args
command = [arg.replace(devtools_frontend_path, temp_frontend_path) for arg in command]
compiler_args_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
compiler_args_file.write('devtools_frontend %s' % (' '.join(command)))
finally:
compiler_args_file.close()
return compiler_args_file.name
def generate_namespace_externs(modules_by_name):
special_case_namespaces_path = path.join(path.dirname(path.abspath(__file__)), 'special_case_namespaces.json')
with open(special_case_namespaces_path) as json_file:
special_case_namespaces = json.load(json_file)
def map_module_to_namespace(module):
return special_case_namespaces.get(module, to_camel_case(module))
def to_camel_case(snake_string):
components = snake_string.split('_')
return ''.join(x.title() for x in components)
all_namespaces = [map_module_to_namespace(module) for module in modules_by_name]
namespaces = [namespace for namespace in all_namespaces if namespace not in skipped_namespaces]
namespaces.sort()
namespace_externs_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
for namespace in namespaces:
namespace_externs_file.write('/** @const */\n')
namespace_externs_file.write('var %s = {};\n' % namespace)
finally:
namespace_externs_file.close()
namespace_externs_path = to_platform_path(namespace_externs_file.name)
return namespace_externs_path
def main():
global protocol_externs_file
errors_found = False
parser = argparse.ArgumentParser()
parser.add_argument('--protocol-externs-file')
args, _ = parser.parse_known_args()
if args.protocol_externs_file:
protocol_externs_file = args.protocol_externs_file
else:
generate_protocol_externs.generate_protocol_externs(protocol_externs_file,
path.join(inspector_path, 'browser_protocol.json'),
path.join(v8_inspector_path, 'js_protocol.json'))
loader = modular_build.DescriptorLoader(devtools_frontend_path)
descriptors = loader.load_applications(application_descriptors)
modules_by_name = descriptors.modules
java_exec = find_java()
errors_found |= check_conditional_dependencies(modules_by_name)
print 'Compiling frontend...'
temp_devtools_path = tempfile.mkdtemp()
namespace_externs_path = generate_namespace_externs(modules_by_name)
compiler_args_file_path = prepare_closure_frontend_compile(temp_devtools_path, descriptors, namespace_externs_path)
frontend_compile_proc = popen(
java_exec + ['-jar', closure_runner_jar, '--compiler-args-file', to_platform_path_exact(compiler_args_file_path)])
print 'Compiling devtools_compatibility.js...'
closure_compiler_command = java_exec + ['-jar', closure_compiler_jar] + common_closure_args
devtools_js_compile_command = closure_compiler_command + [
'--externs', to_platform_path(global_externs_file), '--externs',
to_platform_path(path.join(devtools_frontend_path, 'host', 'InspectorFrontendHostAPI.js')),
'--jscomp_off=externsValidation', '--js', to_platform_path(path.join(devtools_frontend_path, 'devtools_compatibility.js'))
]
devtools_js_compile_proc = popen(devtools_js_compile_command)
errors_found |= JSDocChecker(descriptors, java_exec).check()
(devtools_js_compile_out, _) = devtools_js_compile_proc.communicate()
print 'devtools_compatibility.js compilation output:%s' % os.linesep, devtools_js_compile_out
errors_found |= has_errors(devtools_js_compile_out)
(frontend_compile_out, _) = frontend_compile_proc.communicate()
print 'devtools frontend compilation output:'
for line in frontend_compile_out.splitlines():
if "@@ START_MODULE" in line or "@@ END_MODULE" in line:
continue
print line
errors_found |= has_errors(frontend_compile_out)
os.remove(protocol_externs_file)
os.remove(namespace_externs_path)
os.remove(compiler_args_file_path)
shutil.rmtree(temp_devtools_path, True)
if errors_found:
print 'ERRORS DETECTED'
sys.exit(1)
print 'DONE - compiled without errors'
if __name__ == "__main__":
main()
| 39.148338 | 160 | 0.688639 |
import argparse
import os
import os.path as path
import re
import shutil
import subprocess
import sys
import tempfile
from build import modular_build
from build import generate_protocol_externs
import dependency_preprocessor
import utils
try:
import simplejson as json
except ImportError:
import json
is_cygwin = sys.platform == 'cygwin'
def popen(arguments):
return subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def to_platform_path(filepath):
if not is_cygwin:
return filepath
return re.sub(r'^/cygdrive/(\w)', '\\1:', filepath)
def to_platform_path_exact(filepath):
if not is_cygwin:
return filepath
output, _ = popen(['cygpath', '-w', filepath]).communicate()
return output.strip().replace('\\', '\\\\')
scripts_path = path.dirname(path.abspath(__file__))
devtools_path = path.dirname(scripts_path)
inspector_path = path.join(path.dirname(devtools_path), 'core', 'inspector')
v8_inspector_path = path.normpath(path.join(path.dirname(devtools_path), os.pardir, os.pardir, os.pardir, 'v8', 'src', 'inspector'))
devtools_frontend_path = path.join(devtools_path, 'front_end')
global_externs_file = to_platform_path(path.join(devtools_frontend_path, 'externs.js'))
protocol_externs_file = path.join(devtools_frontend_path, 'protocol_externs.js')
runtime_file = to_platform_path(path.join(devtools_frontend_path, 'Runtime.js'))
closure_compiler_jar = to_platform_path(path.join(scripts_path, 'closure', 'compiler.jar'))
closure_runner_jar = to_platform_path(path.join(scripts_path, 'closure', 'closure_runner', 'closure_runner.jar'))
jsdoc_validator_jar = to_platform_path(path.join(scripts_path, 'jsdoc_validator', 'jsdoc_validator.jar'))
type_checked_jsdoc_tags_list = ['param', 'return', 'type', 'enum']
type_checked_jsdoc_tags_or = '|'.join(type_checked_jsdoc_tags_list)
invalid_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or +
r')\s*\{.*(?<![!?:._A-Za-z0-9])([A-Z][_A-Za-z0-9.]+[A-Za-z0-9])[^/]*\}')
invalid_type_designator_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*.*(?<![{: ])([?!])=?\}')
invalid_non_object_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*\{.*(![a-z]+)[^/]*\}')
error_warning_regex = re.compile(r'WARNING|ERROR')
loaded_css_regex = re.compile(r'(?:registerRequiredCSS|WebInspector\.View\.createStyleElement)\s*\(\s*"(.+)"\s*\)')
java_build_regex = re.compile(r'\w+ version "(\d+)\.(\d+)')
def log_error(message):
print 'ERROR: ' + message
def error_excepthook(exctype, value, traceback):
print 'ERROR:'
sys.__excepthook__(exctype, value, traceback)
sys.excepthook = error_excepthook
application_descriptors = [
'inspector',
'toolbox',
'integration_test_runner',
'formatter_worker',
'heap_snapshot_worker',
]
skipped_namespaces = {
'Console', # Closure uses Console as a namespace item so we cannot override it right now.
'Gonzales', # third party module defined in front_end/externs.js
'Terminal', # third party module defined in front_end/externs.js
}
def has_errors(output):
return re.search(error_warning_regex, output) != None
class JSDocChecker:
def __init__(self, descriptors, java_exec):
self._error_found = False
self._all_files = descriptors.all_compiled_files()
self._java_exec = java_exec
def check(self):
print 'Verifying JSDoc comments...'
self._verify_jsdoc()
self._run_jsdoc_validator()
return self._error_found
def _run_jsdoc_validator(self):
files = [to_platform_path(f) for f in self._all_files]
file_list = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
file_list.write('\n'.join(files))
finally:
file_list.close()
proc = popen(self._java_exec + ['-jar', jsdoc_validator_jar, '--files-list-name', to_platform_path_exact(file_list.name)])
(out, _) = proc.communicate()
if out:
print('JSDoc validator output:%s%s' % (os.linesep, out))
self._error_found = True
os.remove(file_list.name)
def _verify_jsdoc(self):
for full_file_name in self._all_files:
line_index = 0
with open(full_file_name, 'r') as sourceFile:
for line in sourceFile:
line_index += 1
if line.rstrip():
self._verify_jsdoc_line(full_file_name, line_index, line)
def _verify_jsdoc_line(self, file_name, line_index, line):
def print_error(message, error_position):
print '%s:%s: ERROR - %s%s%s%s%s%s' % (file_name, line_index, message, os.linesep, line, os.linesep,
' ' * error_position + '^', os.linesep)
known_css = {}
match = re.search(invalid_type_regex, line)
if match:
print_error('Type "%s" nullability not marked explicitly with "?" (nullable) or "!" (non-nullable)' % match.group(1),
match.start(1))
self._error_found = True
match = re.search(invalid_non_object_type_regex, line)
if match:
print_error('Non-object type explicitly marked with "!" (non-nullable), which is the default and should be omitted',
match.start(1))
self._error_found = True
match = re.search(invalid_type_designator_regex, line)
if match:
print_error('Type nullability indicator misplaced, should precede type', match.start(1))
self._error_found = True
match = re.search(loaded_css_regex, line)
if match:
file = path.join(devtools_frontend_path, match.group(1))
exists = known_css.get(file)
if exists is None:
exists = path.isfile(file)
known_css[file] = exists
if not exists:
print_error('Dynamically loaded CSS stylesheet is missing in the source tree', match.start(1))
self._error_found = True
def find_java():
required_major = 1
required_minor = 7
exec_command = None
has_server_jvm = True
java_path = utils.which('java')
if not java_path:
print 'NOTE: No Java executable found in $PATH.'
sys.exit(1)
is_ok = False
java_version_out, _ = popen([java_path, '-version']).communicate()
# pylint: disable=E1103
match = re.search(java_build_regex, java_version_out)
if match:
major = int(match.group(1))
minor = int(match.group(2))
is_ok = major >= required_major and minor >= required_minor
if is_ok:
exec_command = [java_path, '-Xms1024m', '-server', '-XX:+TieredCompilation']
check_server_proc = popen(exec_command + ['-version'])
check_server_proc.communicate()
if check_server_proc.returncode != 0:
# Not all Java installs have server JVMs.
exec_command = exec_command.remove('-server')
has_server_jvm = False
if not is_ok:
print 'NOTE: Java executable version %d.%d or above not found in $PATH.' % (required_major, required_minor)
sys.exit(1)
print 'Java executable: %s%s' % (java_path, '' if has_server_jvm else ' (no server JVM)')
return exec_command
common_closure_args = [
'--summary_detail_level',
'3',
'--jscomp_error',
'visibility',
'--jscomp_warning',
'missingOverride',
'--compilation_level',
'SIMPLE_OPTIMIZATIONS',
'--warning_level',
'VERBOSE',
'--language_in=ECMASCRIPT_2017',
'--language_out=ES5_STRICT',
'--extra_annotation_name',
'suppressReceiverCheck',
'--extra_annotation_name',
'suppressGlobalPropertiesCheck',
'--checks-only',
'--allow_method_call_decomposing',
]
def check_conditional_dependencies(modules_by_name):
errors_found = False
for name in modules_by_name:
if 'test_runner' in name:
continue
for dep_name in modules_by_name[name].get('dependencies', []):
dependency = modules_by_name[dep_name]
if dependency.get('experiment') or dependency.get('condition'):
log_error('Module "%s" may not depend on the conditional module "%s"' % (name, dep_name))
errors_found = True
return errors_found
def prepare_closure_frontend_compile(temp_devtools_path, descriptors, namespace_externs_path):
temp_frontend_path = path.join(temp_devtools_path, 'front_end')
checker = dependency_preprocessor.DependencyPreprocessor(descriptors, temp_frontend_path, devtools_frontend_path)
checker.enforce_dependencies()
command = common_closure_args + [
'--externs',
to_platform_path(global_externs_file),
'--externs',
namespace_externs_path,
'--js',
runtime_file,
]
all_files = descriptors.all_compiled_files()
args = []
for file in all_files:
args.extend(['--js', file])
if "InspectorBackend.js" in file:
args.extend(['--js', protocol_externs_file])
command += args
command = [arg.replace(devtools_frontend_path, temp_frontend_path) for arg in command]
compiler_args_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
compiler_args_file.write('devtools_frontend %s' % (' '.join(command)))
finally:
compiler_args_file.close()
return compiler_args_file.name
def generate_namespace_externs(modules_by_name):
special_case_namespaces_path = path.join(path.dirname(path.abspath(__file__)), 'special_case_namespaces.json')
with open(special_case_namespaces_path) as json_file:
special_case_namespaces = json.load(json_file)
def map_module_to_namespace(module):
return special_case_namespaces.get(module, to_camel_case(module))
def to_camel_case(snake_string):
components = snake_string.split('_')
return ''.join(x.title() for x in components)
all_namespaces = [map_module_to_namespace(module) for module in modules_by_name]
namespaces = [namespace for namespace in all_namespaces if namespace not in skipped_namespaces]
namespaces.sort()
namespace_externs_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
for namespace in namespaces:
namespace_externs_file.write('/** @const */\n')
namespace_externs_file.write('var %s = {};\n' % namespace)
finally:
namespace_externs_file.close()
namespace_externs_path = to_platform_path(namespace_externs_file.name)
return namespace_externs_path
def main():
global protocol_externs_file
errors_found = False
parser = argparse.ArgumentParser()
parser.add_argument('--protocol-externs-file')
args, _ = parser.parse_known_args()
if args.protocol_externs_file:
protocol_externs_file = args.protocol_externs_file
else:
generate_protocol_externs.generate_protocol_externs(protocol_externs_file,
path.join(inspector_path, 'browser_protocol.json'),
path.join(v8_inspector_path, 'js_protocol.json'))
loader = modular_build.DescriptorLoader(devtools_frontend_path)
descriptors = loader.load_applications(application_descriptors)
modules_by_name = descriptors.modules
java_exec = find_java()
errors_found |= check_conditional_dependencies(modules_by_name)
print 'Compiling frontend...'
temp_devtools_path = tempfile.mkdtemp()
namespace_externs_path = generate_namespace_externs(modules_by_name)
compiler_args_file_path = prepare_closure_frontend_compile(temp_devtools_path, descriptors, namespace_externs_path)
frontend_compile_proc = popen(
java_exec + ['-jar', closure_runner_jar, '--compiler-args-file', to_platform_path_exact(compiler_args_file_path)])
print 'Compiling devtools_compatibility.js...'
closure_compiler_command = java_exec + ['-jar', closure_compiler_jar] + common_closure_args
devtools_js_compile_command = closure_compiler_command + [
'--externs', to_platform_path(global_externs_file), '--externs',
to_platform_path(path.join(devtools_frontend_path, 'host', 'InspectorFrontendHostAPI.js')),
'--jscomp_off=externsValidation', '--js', to_platform_path(path.join(devtools_frontend_path, 'devtools_compatibility.js'))
]
devtools_js_compile_proc = popen(devtools_js_compile_command)
errors_found |= JSDocChecker(descriptors, java_exec).check()
(devtools_js_compile_out, _) = devtools_js_compile_proc.communicate()
print 'devtools_compatibility.js compilation output:%s' % os.linesep, devtools_js_compile_out
errors_found |= has_errors(devtools_js_compile_out)
(frontend_compile_out, _) = frontend_compile_proc.communicate()
print 'devtools frontend compilation output:'
for line in frontend_compile_out.splitlines():
if "@@ START_MODULE" in line or "@@ END_MODULE" in line:
continue
print line
errors_found |= has_errors(frontend_compile_out)
os.remove(protocol_externs_file)
os.remove(namespace_externs_path)
os.remove(compiler_args_file_path)
shutil.rmtree(temp_devtools_path, True)
if errors_found:
print 'ERRORS DETECTED'
sys.exit(1)
print 'DONE - compiled without errors'
if __name__ == "__main__":
main()
| false | true |
1c45ab721c9d7842215f9675276f0e2745f79bac | 14,462 | py | Python | external/workload-automation/wa/framework/signal.py | qais-yousef/lisa | 8343e26bf0565589928a69ccbe67b1be03403db7 | [
"Apache-2.0"
] | 1 | 2020-11-30T16:14:02.000Z | 2020-11-30T16:14:02.000Z | external/workload-automation/wa/framework/signal.py | qais-yousef/lisa | 8343e26bf0565589928a69ccbe67b1be03403db7 | [
"Apache-2.0"
] | null | null | null | external/workload-automation/wa/framework/signal.py | qais-yousef/lisa | 8343e26bf0565589928a69ccbe67b1be03403db7 | [
"Apache-2.0"
] | 1 | 2020-10-09T11:40:00.000Z | 2020-10-09T11:40:00.000Z | # Copyright 2013-2018 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module wraps louie signalling mechanism. It relies on modified version of loiue
that has prioritization added to handler invocation.
"""
import sys
import logging
from contextlib import contextmanager
import wrapt
from louie import dispatcher # pylint: disable=wrong-import-order
from wa.utils.types import prioritylist, enum
logger = logging.getLogger('signal')
class Signal(object):
"""
This class implements the signals to be used for notifiying callbacks
registered to respond to different states and stages of the execution of workload
automation.
"""
def __init__(self, name, description='no description', invert_priority=False):
"""
Instantiates a Signal.
:param name: name is the identifier of the Signal object. Signal instances with
the same name refer to the same execution stage/stage.
:param invert_priority: boolean parameter that determines whether multiple
callbacks for the same signal should be
ordered with ascending or descending
priorities. Typically this flag should be
set to True if the Signal is triggered
AFTER an a state/stage has been reached.
That way callbacks with high priorities
will be called right after the event has
occured.
"""
self.name = name
self.description = description
self.invert_priority = invert_priority
def __str__(self):
return self.name
__repr__ = __str__
def __hash__(self):
return id(self.name)
# Signals associated with run-related events
RUN_STARTED = Signal('run-started', 'sent at the beginning of the run')
RUN_INITIALIZED = Signal('run-initialized', 'set after the run has been initialized')
RUN_ABORTED = Signal('run-aborted', 'set when the run has been aborted due to a keyboard interrupt')
RUN_FAILED = Signal('run-failed', 'set if the run has failed to complete all jobs.')
RUN_FINALIZED = Signal('run-finalized', 'set after the run has been finalized')
RUN_COMPLETED = Signal('run-completed', 'set upon completion of the run (regardless of whether or not it has failed')
# Signals associated with job-related events
JOB_STARTED = Signal('job-started', 'set when a a new job has been started')
JOB_ABORTED = Signal('job-aborted',
description='''
sent if a job has been aborted due to a keyboard interrupt.
.. note:: While the status of every job that has not had a
chance to run due to being interrupted will be
set to "ABORTED", this signal will only be sent
for the job that was actually running at the
time.
''')
JOB_FAILED = Signal('job-failed', description='set if the job has failed')
JOB_RESTARTED = Signal('job-restarted')
JOB_COMPLETED = Signal('job-completed')
# Signals associated with particular stages of workload execution
BEFORE_WORKLOAD_INITIALIZED = Signal('before-workload-initialized',
invert_priority=True)
SUCCESSFUL_WORKLOAD_INITIALIZED = Signal('successful-workload-initialized')
AFTER_WORKLOAD_INITIALIZED = Signal('after-workload-initialized')
BEFORE_WORKLOAD_SETUP = Signal('before-workload-setup', invert_priority=True)
SUCCESSFUL_WORKLOAD_SETUP = Signal('successful-workload-setup')
AFTER_WORKLOAD_SETUP = Signal('after-workload-setup')
BEFORE_WORKLOAD_EXECUTION = Signal('before-workload-execution', invert_priority=True)
SUCCESSFUL_WORKLOAD_EXECUTION = Signal('successful-workload-execution')
AFTER_WORKLOAD_EXECUTION = Signal('after-workload-execution')
BEFORE_WORKLOAD_RESULT_EXTRACTION = Signal('before-workload-result-extracton', invert_priority=True)
SUCCESSFUL_WORKLOAD_RESULT_EXTRACTION = Signal('successful-workload-result-extracton')
AFTER_WORKLOAD_RESULT_EXTRACTION = Signal('after-workload-result-extracton')
BEFORE_WORKLOAD_OUTPUT_UPDATE = Signal('before-workload-output-update',
invert_priority=True)
SUCCESSFUL_WORKLOAD_OUTPUT_UPDATE = Signal('successful-workload-output-update')
AFTER_WORKLOAD_OUTPUT_UPDATE = Signal('after-workload-output-update')
BEFORE_WORKLOAD_TEARDOWN = Signal('before-workload-teardown', invert_priority=True)
SUCCESSFUL_WORKLOAD_TEARDOWN = Signal('successful-workload-teardown')
AFTER_WORKLOAD_TEARDOWN = Signal('after-workload-teardown')
BEFORE_WORKLOAD_FINALIZED = Signal('before-workload-finalized', invert_priority=True)
SUCCESSFUL_WORKLOAD_FINALIZED = Signal('successful-workload-finalized')
AFTER_WORKLOAD_FINALIZED = Signal('after-workload-finalized')
# Signals indicating exceptional conditions
ERROR_LOGGED = Signal('error-logged')
WARNING_LOGGED = Signal('warning-logged')
# These are paired events -- if the before_event is sent, the after_ signal is
# guaranteed to also be sent. In particular, the after_ signals will be sent
# even if there is an error, so you cannot assume in the handler that the
# device has booted successfully. In most cases, you should instead use the
# non-paired signals below.
BEFORE_RUN_INIT = Signal('before-run-init', invert_priority=True)
SUCCESSFUL_RUN_INIT = Signal('successful-run-init')
AFTER_RUN_INIT = Signal('after-run-init')
BEFORE_JOB = Signal('before-job', invert_priority=True)
SUCCESSFUL_JOB = Signal('successful-job')
AFTER_JOB = Signal('after-job')
BEFORE_JOB_QUEUE_EXECUTION = Signal('before-job-queue-execution', invert_priority=True)
SUCCESSFUL_JOB_QUEUE_EXECUTION = Signal('successful-job-queue-execution')
AFTER_JOB_QUEUE_EXECUTION = Signal('after-job-queue-execution')
BEFORE_JOB_TARGET_CONFIG = Signal('before-job-target-config', invert_priority=True)
SUCCESSFUL_JOB_TARGET_CONFIG = Signal('successful-job-target-config')
AFTER_JOB_TARGET_CONFIG = Signal('after-job-target-config')
BEFORE_JOB_OUTPUT_PROCESSED = Signal('before-job-output-processed',
invert_priority=True)
SUCCESSFUL_JOB_OUTPUT_PROCESSED = Signal('successful-job-output-processed')
AFTER_JOB_OUTPUT_PROCESSED = Signal('after-job-output-processed')
BEFORE_FLASHING = Signal('before-flashing', invert_priority=True)
SUCCESSFUL_FLASHING = Signal('successful-flashing')
AFTER_FLASHING = Signal('after-flashing')
BEFORE_REBOOT = Signal('before-reboot', invert_priority=True)
SUCCESSFUL_REBOOT = Signal('successful-reboot')
AFTER_REBOOT = Signal('after-reboot')
BEFORE_TARGET_CONNECT = Signal('before-target-connect', invert_priority=True)
SUCCESSFUL_TARGET_CONNECT = Signal('successful-target-connect')
AFTER_TARGET_CONNECT = Signal('after-target-connect')
BEFORE_TARGET_DISCONNECT = Signal('before-target-disconnect', invert_priority=True)
SUCCESSFUL_TARGET_DISCONNECT = Signal('successful-target-disconnect')
AFTER_TARGET_DISCONNECT = Signal('after-target-disconnect')
BEFORE_RUN_OUTPUT_PROCESSED = Signal(
'before-run-output-processed', invert_priority=True)
SUCCESSFUL_RUN_OUTPUT_PROCESSED = Signal(
'successful-run-output-processed')
AFTER_RUN_OUTPUT_PROCESSED = Signal(
'after-run-output-processed')
CallbackPriority = enum(['extremely_low', 'very_low', 'low', 'normal',
'high', 'very_high', 'extremely_high'], -30, 10)
class _prioritylist_wrapper(prioritylist):
"""
This adds a NOP append() method so that when louie invokes it to add the
handler to receivers, nothing will happen; the handler is actually added inside
the connect() below according to priority, before louie's connect() gets invoked.
"""
def append(self, *args, **kwargs):
pass
def connect(handler, signal, sender=dispatcher.Any, priority=0):
"""
Connects a callback to a signal, so that the callback will be automatically invoked
when that signal is sent.
Parameters:
:handler: This can be any callable that that takes the right arguments for
the signal. For most signals this means a single argument that
will be an ``ExecutionContext`` instance. But please see documentation
for individual signals in the :ref:`signals reference <instruments_method_map>`.
:signal: The signal to which the handler will be subscribed. Please see
:ref:`signals reference <instruments_method_map>` for the list of standard WA
signals.
.. note:: There is nothing that prevents instruments from sending their
own signals that are not part of the standard set. However the signal
must always be an :class:`wa.core.signal.Signal` instance.
:sender: The handler will be invoked only for the signals emitted by this sender. By
default, this is set to :class:`louie.dispatcher.Any`, so the handler will
be invoked for signals from any sender.
:priority: An integer (positive or negative) the specifies the priority of the handler.
Handlers with higher priority will be called before handlers with lower
priority. The call order of handlers with the same priority is not specified.
Defaults to 0.
.. note:: Priorities for some signals are inverted (so highest priority
handlers get executed last). Please see :ref:`signals reference <instruments_method_map>`
for details.
"""
logger.debug('Connecting {} to {}({}) with priority {}'.format(handler, signal, sender, priority))
if getattr(signal, 'invert_priority', False):
priority = -priority
senderkey = id(sender)
if senderkey in dispatcher.connections:
signals = dispatcher.connections[senderkey]
else:
dispatcher.connections[senderkey] = signals = {}
if signal in signals:
receivers = signals[signal]
else:
receivers = signals[signal] = _prioritylist_wrapper()
receivers.add(handler, priority)
dispatcher.connect(handler, signal, sender)
def disconnect(handler, signal, sender=dispatcher.Any):
"""
Disconnect a previously connected handler form the specified signal, optionally, only
for the specified sender.
Parameters:
:handler: The callback to be disconnected.
:signal: The signal the handler is to be disconnected form. It will
be an :class:`wa.core.signal.Signal` instance.
:sender: If specified, the handler will only be disconnected from the signal
sent by this sender.
"""
logger.debug('Disconnecting {} from {}({})'.format(handler, signal, sender))
dispatcher.disconnect(handler, signal, sender)
def send(signal, sender=dispatcher.Anonymous, *args, **kwargs):
"""
Sends a signal, causing connected handlers to be invoked.
Paramters:
:signal: Signal to be sent. This must be an instance of :class:`wa.core.signal.Signal`
or its subclasses.
:sender: The sender of the signal (typically, this would be ``self``). Some handlers may only
be subscribed to signals from a particular sender.
The rest of the parameters will be passed on as aruments to the handler.
"""
logger.debug('Sending {} from {}'.format(signal, sender))
return dispatcher.send(signal, sender, *args, **kwargs)
# This will normally be set to log_error() by init_logging(); see wa.utils.log
# Done this way to prevent a circular import dependency.
log_error_func = logger.error
def safe_send(signal, sender=dispatcher.Anonymous,
propagate=None, *args, **kwargs):
"""
Same as ``send``, except this will catch and log all exceptions raised
by handlers, except those specified in ``propagate`` argument (defaults
to just ``[KeyboardInterrupt]``).
"""
if propagate is None:
propagate = [KeyboardInterrupt]
try:
logger.debug('Safe-sending {} from {}'.format(signal, sender))
send(signal, sender, *args, **kwargs)
except Exception as e: # pylint: disable=broad-except
if any(isinstance(e, p) for p in propagate):
raise e
log_error_func(e)
@contextmanager
def wrap(signal_name, sender=dispatcher.Anonymous, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
"""Wraps the suite in before/after signals, ensuring
that after signal is always sent."""
safe = kwargs.pop('safe', False)
signal_name = signal_name.upper().replace('-', '_')
send_func = safe_send if safe else send
try:
before_signal = globals()['BEFORE_' + signal_name]
success_signal = globals()['SUCCESSFUL_' + signal_name]
after_signal = globals()['AFTER_' + signal_name]
except KeyError:
raise ValueError('Invalid wrapped signal name: {}'.format(signal_name))
try:
send_func(before_signal, sender, *args, **kwargs)
yield
send_func(success_signal, sender, *args, **kwargs)
finally:
_, exc, _ = sys.exc_info()
if exc:
log_error_func(exc)
send_func(after_signal, sender, *args, **kwargs)
def wrapped(signal_name, sender=dispatcher.Anonymous, safe=False):
"""A decorator for wrapping function in signal dispatch."""
@wrapt.decorator
def signal_wrapped(wrapped_func, _, args, kwargs):
def signal_wrapper(*args, **kwargs):
with wrap(signal_name, sender, safe):
return wrapped_func(*args, **kwargs)
return signal_wrapper(*args, **kwargs)
return signal_wrapped
| 42.163265 | 118 | 0.691675 |
import sys
import logging
from contextlib import contextmanager
import wrapt
from louie import dispatcher
from wa.utils.types import prioritylist, enum
logger = logging.getLogger('signal')
class Signal(object):
def __init__(self, name, description='no description', invert_priority=False):
self.name = name
self.description = description
self.invert_priority = invert_priority
def __str__(self):
return self.name
__repr__ = __str__
def __hash__(self):
return id(self.name)
RUN_STARTED = Signal('run-started', 'sent at the beginning of the run')
RUN_INITIALIZED = Signal('run-initialized', 'set after the run has been initialized')
RUN_ABORTED = Signal('run-aborted', 'set when the run has been aborted due to a keyboard interrupt')
RUN_FAILED = Signal('run-failed', 'set if the run has failed to complete all jobs.')
RUN_FINALIZED = Signal('run-finalized', 'set after the run has been finalized')
RUN_COMPLETED = Signal('run-completed', 'set upon completion of the run (regardless of whether or not it has failed')
JOB_STARTED = Signal('job-started', 'set when a a new job has been started')
JOB_ABORTED = Signal('job-aborted',
description='''
sent if a job has been aborted due to a keyboard interrupt.
.. note:: While the status of every job that has not had a
chance to run due to being interrupted will be
set to "ABORTED", this signal will only be sent
for the job that was actually running at the
time.
''')
JOB_FAILED = Signal('job-failed', description='set if the job has failed')
JOB_RESTARTED = Signal('job-restarted')
JOB_COMPLETED = Signal('job-completed')
BEFORE_WORKLOAD_INITIALIZED = Signal('before-workload-initialized',
invert_priority=True)
SUCCESSFUL_WORKLOAD_INITIALIZED = Signal('successful-workload-initialized')
AFTER_WORKLOAD_INITIALIZED = Signal('after-workload-initialized')
BEFORE_WORKLOAD_SETUP = Signal('before-workload-setup', invert_priority=True)
SUCCESSFUL_WORKLOAD_SETUP = Signal('successful-workload-setup')
AFTER_WORKLOAD_SETUP = Signal('after-workload-setup')
BEFORE_WORKLOAD_EXECUTION = Signal('before-workload-execution', invert_priority=True)
SUCCESSFUL_WORKLOAD_EXECUTION = Signal('successful-workload-execution')
AFTER_WORKLOAD_EXECUTION = Signal('after-workload-execution')
BEFORE_WORKLOAD_RESULT_EXTRACTION = Signal('before-workload-result-extracton', invert_priority=True)
SUCCESSFUL_WORKLOAD_RESULT_EXTRACTION = Signal('successful-workload-result-extracton')
AFTER_WORKLOAD_RESULT_EXTRACTION = Signal('after-workload-result-extracton')
BEFORE_WORKLOAD_OUTPUT_UPDATE = Signal('before-workload-output-update',
invert_priority=True)
SUCCESSFUL_WORKLOAD_OUTPUT_UPDATE = Signal('successful-workload-output-update')
AFTER_WORKLOAD_OUTPUT_UPDATE = Signal('after-workload-output-update')
BEFORE_WORKLOAD_TEARDOWN = Signal('before-workload-teardown', invert_priority=True)
SUCCESSFUL_WORKLOAD_TEARDOWN = Signal('successful-workload-teardown')
AFTER_WORKLOAD_TEARDOWN = Signal('after-workload-teardown')
BEFORE_WORKLOAD_FINALIZED = Signal('before-workload-finalized', invert_priority=True)
SUCCESSFUL_WORKLOAD_FINALIZED = Signal('successful-workload-finalized')
AFTER_WORKLOAD_FINALIZED = Signal('after-workload-finalized')
ERROR_LOGGED = Signal('error-logged')
WARNING_LOGGED = Signal('warning-logged')
BEFORE_RUN_INIT = Signal('before-run-init', invert_priority=True)
SUCCESSFUL_RUN_INIT = Signal('successful-run-init')
AFTER_RUN_INIT = Signal('after-run-init')
BEFORE_JOB = Signal('before-job', invert_priority=True)
SUCCESSFUL_JOB = Signal('successful-job')
AFTER_JOB = Signal('after-job')
BEFORE_JOB_QUEUE_EXECUTION = Signal('before-job-queue-execution', invert_priority=True)
SUCCESSFUL_JOB_QUEUE_EXECUTION = Signal('successful-job-queue-execution')
AFTER_JOB_QUEUE_EXECUTION = Signal('after-job-queue-execution')
BEFORE_JOB_TARGET_CONFIG = Signal('before-job-target-config', invert_priority=True)
SUCCESSFUL_JOB_TARGET_CONFIG = Signal('successful-job-target-config')
AFTER_JOB_TARGET_CONFIG = Signal('after-job-target-config')
BEFORE_JOB_OUTPUT_PROCESSED = Signal('before-job-output-processed',
invert_priority=True)
SUCCESSFUL_JOB_OUTPUT_PROCESSED = Signal('successful-job-output-processed')
AFTER_JOB_OUTPUT_PROCESSED = Signal('after-job-output-processed')
BEFORE_FLASHING = Signal('before-flashing', invert_priority=True)
SUCCESSFUL_FLASHING = Signal('successful-flashing')
AFTER_FLASHING = Signal('after-flashing')
BEFORE_REBOOT = Signal('before-reboot', invert_priority=True)
SUCCESSFUL_REBOOT = Signal('successful-reboot')
AFTER_REBOOT = Signal('after-reboot')
BEFORE_TARGET_CONNECT = Signal('before-target-connect', invert_priority=True)
SUCCESSFUL_TARGET_CONNECT = Signal('successful-target-connect')
AFTER_TARGET_CONNECT = Signal('after-target-connect')
BEFORE_TARGET_DISCONNECT = Signal('before-target-disconnect', invert_priority=True)
SUCCESSFUL_TARGET_DISCONNECT = Signal('successful-target-disconnect')
AFTER_TARGET_DISCONNECT = Signal('after-target-disconnect')
BEFORE_RUN_OUTPUT_PROCESSED = Signal(
'before-run-output-processed', invert_priority=True)
SUCCESSFUL_RUN_OUTPUT_PROCESSED = Signal(
'successful-run-output-processed')
AFTER_RUN_OUTPUT_PROCESSED = Signal(
'after-run-output-processed')
CallbackPriority = enum(['extremely_low', 'very_low', 'low', 'normal',
'high', 'very_high', 'extremely_high'], -30, 10)
class _prioritylist_wrapper(prioritylist):
def append(self, *args, **kwargs):
pass
def connect(handler, signal, sender=dispatcher.Any, priority=0):
logger.debug('Connecting {} to {}({}) with priority {}'.format(handler, signal, sender, priority))
if getattr(signal, 'invert_priority', False):
priority = -priority
senderkey = id(sender)
if senderkey in dispatcher.connections:
signals = dispatcher.connections[senderkey]
else:
dispatcher.connections[senderkey] = signals = {}
if signal in signals:
receivers = signals[signal]
else:
receivers = signals[signal] = _prioritylist_wrapper()
receivers.add(handler, priority)
dispatcher.connect(handler, signal, sender)
def disconnect(handler, signal, sender=dispatcher.Any):
logger.debug('Disconnecting {} from {}({})'.format(handler, signal, sender))
dispatcher.disconnect(handler, signal, sender)
def send(signal, sender=dispatcher.Anonymous, *args, **kwargs):
logger.debug('Sending {} from {}'.format(signal, sender))
return dispatcher.send(signal, sender, *args, **kwargs)
log_error_func = logger.error
def safe_send(signal, sender=dispatcher.Anonymous,
propagate=None, *args, **kwargs):
if propagate is None:
propagate = [KeyboardInterrupt]
try:
logger.debug('Safe-sending {} from {}'.format(signal, sender))
send(signal, sender, *args, **kwargs)
except Exception as e:
if any(isinstance(e, p) for p in propagate):
raise e
log_error_func(e)
@contextmanager
def wrap(signal_name, sender=dispatcher.Anonymous, *args, **kwargs):
safe = kwargs.pop('safe', False)
signal_name = signal_name.upper().replace('-', '_')
send_func = safe_send if safe else send
try:
before_signal = globals()['BEFORE_' + signal_name]
success_signal = globals()['SUCCESSFUL_' + signal_name]
after_signal = globals()['AFTER_' + signal_name]
except KeyError:
raise ValueError('Invalid wrapped signal name: {}'.format(signal_name))
try:
send_func(before_signal, sender, *args, **kwargs)
yield
send_func(success_signal, sender, *args, **kwargs)
finally:
_, exc, _ = sys.exc_info()
if exc:
log_error_func(exc)
send_func(after_signal, sender, *args, **kwargs)
def wrapped(signal_name, sender=dispatcher.Anonymous, safe=False):
@wrapt.decorator
def signal_wrapped(wrapped_func, _, args, kwargs):
def signal_wrapper(*args, **kwargs):
with wrap(signal_name, sender, safe):
return wrapped_func(*args, **kwargs)
return signal_wrapper(*args, **kwargs)
return signal_wrapped
| true | true |
1c45ac250287c61459664f4104f27b4fea00e83d | 61 | py | Python | language-python-test/test/features/comprehensions/set_comprehension.py | wbadart/language-python | 6c048c215ff7fe4a5d5cc36ba3c17a666af74821 | [
"BSD-3-Clause"
] | null | null | null | language-python-test/test/features/comprehensions/set_comprehension.py | wbadart/language-python | 6c048c215ff7fe4a5d5cc36ba3c17a666af74821 | [
"BSD-3-Clause"
] | null | null | null | language-python-test/test/features/comprehensions/set_comprehension.py | wbadart/language-python | 6c048c215ff7fe4a5d5cc36ba3c17a666af74821 | [
"BSD-3-Clause"
] | null | null | null | { x + y for x in [1,2,3] if x > 1 for y in [4,5,6] if y < 6}
| 30.5 | 60 | 0.459016 | { x + y for x in [1,2,3] if x > 1 for y in [4,5,6] if y < 6}
| true | true |
1c45ad4927dd2f22598e965b4d772bbae5f47434 | 1,172 | py | Python | tests/api/ils/eitems/test_eitems_crud.py | NRodriguezcuellar/invenio-app-ils | 144a25a6c56330b214c6fd0b832220fa71f2e68a | [
"MIT"
] | 41 | 2018-09-04T13:00:46.000Z | 2022-03-24T20:45:56.000Z | tests/api/ils/eitems/test_eitems_crud.py | NRodriguezcuellar/invenio-app-ils | 144a25a6c56330b214c6fd0b832220fa71f2e68a | [
"MIT"
] | 720 | 2017-03-10T08:02:41.000Z | 2022-01-14T15:36:37.000Z | tests/api/ils/eitems/test_eitems_crud.py | NRodriguezcuellar/invenio-app-ils | 144a25a6c56330b214c6fd0b832220fa71f2e68a | [
"MIT"
] | 54 | 2017-03-09T16:05:29.000Z | 2022-03-17T08:34:51.000Z | # -*- coding: utf-8 -*-
#
# Copyright (C) 2021 CERN.
#
# Invenio-Circulation is free software; you can redistribute it and/or modify
# it under the terms of the MIT License; see LICENSE file for more details.
"""Tests eitems CRUD."""
import pytest
from invenio_app_ils.eitems.api import EItem
from invenio_app_ils.errors import DocumentNotFoundError
def test_eitem_refs(app, testdata):
"""Test creation of an eitem."""
eitem = EItem.create(
dict(
pid="eitemid-99",
document_pid="docid-1",
created_by=dict(type="script", value="demo"),
)
)
assert "$schema" in eitem
assert "document" in eitem and "$ref" in eitem["document"]
eitem = EItem.get_record_by_pid("eitemid-4")
eitem = eitem.replace_refs()
assert "document" in eitem and eitem["document"]["title"]
def test_eitem_validation(db, testdata):
"""Test validation when updating an eitem."""
eitem_pid = testdata["eitems"][0]["pid"]
eitem = EItem.get_record_by_pid(eitem_pid)
# change document pid
eitem["document_pid"] = "not_found_doc"
with pytest.raises(DocumentNotFoundError):
eitem.commit()
| 27.904762 | 77 | 0.669795 |
import pytest
from invenio_app_ils.eitems.api import EItem
from invenio_app_ils.errors import DocumentNotFoundError
def test_eitem_refs(app, testdata):
eitem = EItem.create(
dict(
pid="eitemid-99",
document_pid="docid-1",
created_by=dict(type="script", value="demo"),
)
)
assert "$schema" in eitem
assert "document" in eitem and "$ref" in eitem["document"]
eitem = EItem.get_record_by_pid("eitemid-4")
eitem = eitem.replace_refs()
assert "document" in eitem and eitem["document"]["title"]
def test_eitem_validation(db, testdata):
eitem_pid = testdata["eitems"][0]["pid"]
eitem = EItem.get_record_by_pid(eitem_pid)
eitem["document_pid"] = "not_found_doc"
with pytest.raises(DocumentNotFoundError):
eitem.commit()
| true | true |
1c45ad5c3147af9dff358391d91445cf2f8d76bf | 3,131 | py | Python | from_cpython/Lib/test/test_normalization.py | aisk/pyston | ac69cfef0621dbc8901175e84fa2b5cb5781a646 | [
"BSD-2-Clause",
"Apache-2.0"
] | 9 | 2015-04-15T10:58:49.000Z | 2018-09-24T09:11:33.000Z | Lib/test/test_normalization.py | odsod/cpython-internals-course | 55fffca28e83ac0f30029c60113a3110451dfa08 | [
"PSF-2.0"
] | 2 | 2020-02-17T22:31:09.000Z | 2020-02-18T04:31:55.000Z | Lib/test/test_normalization.py | odsod/cpython-internals-course | 55fffca28e83ac0f30029c60113a3110451dfa08 | [
"PSF-2.0"
] | 9 | 2015-03-13T18:27:27.000Z | 2018-12-03T15:38:51.000Z | from test.test_support import run_unittest, open_urlresource
import unittest
from httplib import HTTPException
import sys
import os
from unicodedata import normalize, unidata_version
TESTDATAFILE = "NormalizationTest.txt"
TESTDATAURL = "http://www.unicode.org/Public/" + unidata_version + "/ucd/" + TESTDATAFILE
def check_version(testfile):
hdr = testfile.readline()
return unidata_version in hdr
class RangeError(Exception):
pass
def NFC(str):
return normalize("NFC", str)
def NFKC(str):
return normalize("NFKC", str)
def NFD(str):
return normalize("NFD", str)
def NFKD(str):
return normalize("NFKD", str)
def unistr(data):
data = [int(x, 16) for x in data.split(" ")]
for x in data:
if x > sys.maxunicode:
raise RangeError
return u"".join([unichr(x) for x in data])
class NormalizationTest(unittest.TestCase):
def test_main(self):
part = None
part1_data = {}
# Hit the exception early
try:
testdata = open_urlresource(TESTDATAURL, check_version)
except (IOError, HTTPException):
self.skipTest("Could not retrieve " + TESTDATAURL)
for line in testdata:
if '#' in line:
line = line.split('#')[0]
line = line.strip()
if not line:
continue
if line.startswith("@Part"):
part = line.split()[0]
continue
try:
c1,c2,c3,c4,c5 = [unistr(x) for x in line.split(';')[:-1]]
except RangeError:
# Skip unsupported characters;
# try at least adding c1 if we are in part1
if part == "@Part1":
try:
c1 = unistr(line.split(';')[0])
except RangeError:
pass
else:
part1_data[c1] = 1
continue
# Perform tests
self.assertTrue(c2 == NFC(c1) == NFC(c2) == NFC(c3), line)
self.assertTrue(c4 == NFC(c4) == NFC(c5), line)
self.assertTrue(c3 == NFD(c1) == NFD(c2) == NFD(c3), line)
self.assertTrue(c5 == NFD(c4) == NFD(c5), line)
self.assertTrue(c4 == NFKC(c1) == NFKC(c2) == \
NFKC(c3) == NFKC(c4) == NFKC(c5),
line)
self.assertTrue(c5 == NFKD(c1) == NFKD(c2) == \
NFKD(c3) == NFKD(c4) == NFKD(c5),
line)
# Record part 1 data
if part == "@Part1":
part1_data[c1] = 1
# Perform tests for all other data
for c in range(sys.maxunicode+1):
X = unichr(c)
if X in part1_data:
continue
self.assertTrue(X == NFC(X) == NFD(X) == NFKC(X) == NFKD(X), c)
def test_bug_834676(self):
# Check for bug 834676
normalize('NFC', u'\ud55c\uae00')
def test_main():
run_unittest(NormalizationTest)
if __name__ == "__main__":
test_main()
| 30.398058 | 89 | 0.516448 | from test.test_support import run_unittest, open_urlresource
import unittest
from httplib import HTTPException
import sys
import os
from unicodedata import normalize, unidata_version
TESTDATAFILE = "NormalizationTest.txt"
TESTDATAURL = "http://www.unicode.org/Public/" + unidata_version + "/ucd/" + TESTDATAFILE
def check_version(testfile):
hdr = testfile.readline()
return unidata_version in hdr
class RangeError(Exception):
pass
def NFC(str):
return normalize("NFC", str)
def NFKC(str):
return normalize("NFKC", str)
def NFD(str):
return normalize("NFD", str)
def NFKD(str):
return normalize("NFKD", str)
def unistr(data):
data = [int(x, 16) for x in data.split(" ")]
for x in data:
if x > sys.maxunicode:
raise RangeError
return u"".join([unichr(x) for x in data])
class NormalizationTest(unittest.TestCase):
def test_main(self):
part = None
part1_data = {}
try:
testdata = open_urlresource(TESTDATAURL, check_version)
except (IOError, HTTPException):
self.skipTest("Could not retrieve " + TESTDATAURL)
for line in testdata:
if '#' in line:
line = line.split('#')[0]
line = line.strip()
if not line:
continue
if line.startswith("@Part"):
part = line.split()[0]
continue
try:
c1,c2,c3,c4,c5 = [unistr(x) for x in line.split(';')[:-1]]
except RangeError:
if part == "@Part1":
try:
c1 = unistr(line.split(';')[0])
except RangeError:
pass
else:
part1_data[c1] = 1
continue
self.assertTrue(c2 == NFC(c1) == NFC(c2) == NFC(c3), line)
self.assertTrue(c4 == NFC(c4) == NFC(c5), line)
self.assertTrue(c3 == NFD(c1) == NFD(c2) == NFD(c3), line)
self.assertTrue(c5 == NFD(c4) == NFD(c5), line)
self.assertTrue(c4 == NFKC(c1) == NFKC(c2) == \
NFKC(c3) == NFKC(c4) == NFKC(c5),
line)
self.assertTrue(c5 == NFKD(c1) == NFKD(c2) == \
NFKD(c3) == NFKD(c4) == NFKD(c5),
line)
if part == "@Part1":
part1_data[c1] = 1
for c in range(sys.maxunicode+1):
X = unichr(c)
if X in part1_data:
continue
self.assertTrue(X == NFC(X) == NFD(X) == NFKC(X) == NFKD(X), c)
def test_bug_834676(self):
normalize('NFC', u'\ud55c\uae00')
def test_main():
run_unittest(NormalizationTest)
if __name__ == "__main__":
test_main()
| true | true |
1c45af1163ca30e3f1de7ee012519613a5a4350b | 66,206 | py | Python | test/test_datasets.py | CellEight/vision | e8dded4c05ee403633529cef2e09bf94b07f6170 | [
"BSD-3-Clause"
] | 1 | 2021-04-12T09:42:25.000Z | 2021-04-12T09:42:25.000Z | test/test_datasets.py | mvpzhangqiu/vision | e8dded4c05ee403633529cef2e09bf94b07f6170 | [
"BSD-3-Clause"
] | null | null | null | test/test_datasets.py | mvpzhangqiu/vision | e8dded4c05ee403633529cef2e09bf94b07f6170 | [
"BSD-3-Clause"
] | null | null | null | import contextlib
import sys
import os
import unittest
from unittest import mock
import numpy as np
import PIL
from PIL import Image
from torch._utils_internal import get_file_path_2
import torchvision
from torchvision.datasets import utils
from common_utils import get_tmp_dir
from fakedata_generation import svhn_root, places365_root, widerface_root, stl10_root
import xml.etree.ElementTree as ET
from urllib.request import Request, urlopen
import itertools
import datasets_utils
import pathlib
import pickle
from torchvision import datasets
import torch
import shutil
import json
import random
import bz2
import torch.nn.functional as F
import string
import io
import zipfile
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
try:
import av
HAS_PYAV = True
except ImportError:
HAS_PYAV = False
class DatasetTestcase(unittest.TestCase):
def generic_classification_dataset_test(self, dataset, num_images=1):
self.assertEqual(len(dataset), num_images)
img, target = dataset[0]
self.assertTrue(isinstance(img, PIL.Image.Image))
self.assertTrue(isinstance(target, int))
def generic_segmentation_dataset_test(self, dataset, num_images=1):
self.assertEqual(len(dataset), num_images)
img, target = dataset[0]
self.assertTrue(isinstance(img, PIL.Image.Image))
self.assertTrue(isinstance(target, PIL.Image.Image))
class Tester(DatasetTestcase):
@mock.patch('torchvision.datasets.SVHN._check_integrity')
@unittest.skipIf(not HAS_SCIPY, "scipy unavailable")
def test_svhn(self, mock_check):
mock_check.return_value = True
with svhn_root() as root:
dataset = torchvision.datasets.SVHN(root, split="train")
self.generic_classification_dataset_test(dataset, num_images=2)
dataset = torchvision.datasets.SVHN(root, split="test")
self.generic_classification_dataset_test(dataset, num_images=2)
dataset = torchvision.datasets.SVHN(root, split="extra")
self.generic_classification_dataset_test(dataset, num_images=2)
def test_places365(self):
for split, small in itertools.product(("train-standard", "train-challenge", "val"), (False, True)):
with places365_root(split=split, small=small) as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, split=split, small=small, download=True)
self.generic_classification_dataset_test(dataset, num_images=len(data["imgs"]))
def test_places365_transforms(self):
expected_image = "image"
expected_target = "target"
def transform(image):
return expected_image
def target_transform(target):
return expected_target
with places365_root() as places365:
root, data = places365
dataset = torchvision.datasets.Places365(
root, transform=transform, target_transform=target_transform, download=True
)
actual_image, actual_target = dataset[0]
self.assertEqual(actual_image, expected_image)
self.assertEqual(actual_target, expected_target)
def test_places365_devkit_download(self):
for split in ("train-standard", "train-challenge", "val"):
with self.subTest(split=split):
with places365_root(split=split) as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, split=split, download=True)
with self.subTest("classes"):
self.assertSequenceEqual(dataset.classes, data["classes"])
with self.subTest("class_to_idx"):
self.assertDictEqual(dataset.class_to_idx, data["class_to_idx"])
with self.subTest("imgs"):
self.assertSequenceEqual(dataset.imgs, data["imgs"])
def test_places365_devkit_no_download(self):
for split in ("train-standard", "train-challenge", "val"):
with self.subTest(split=split):
with places365_root(split=split) as places365:
root, data = places365
with self.assertRaises(RuntimeError):
torchvision.datasets.Places365(root, split=split, download=False)
def test_places365_images_download(self):
for split, small in itertools.product(("train-standard", "train-challenge", "val"), (False, True)):
with self.subTest(split=split, small=small):
with places365_root(split=split, small=small) as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, split=split, small=small, download=True)
assert all(os.path.exists(item[0]) for item in dataset.imgs)
def test_places365_images_download_preexisting(self):
split = "train-standard"
small = False
images_dir = "data_large_standard"
with places365_root(split=split, small=small) as places365:
root, data = places365
os.mkdir(os.path.join(root, images_dir))
with self.assertRaises(RuntimeError):
torchvision.datasets.Places365(root, split=split, small=small, download=True)
def test_places365_repr_smoke(self):
with places365_root() as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, download=True)
self.assertIsInstance(repr(dataset), str)
class STL10Tester(DatasetTestcase):
@contextlib.contextmanager
def mocked_root(self):
with stl10_root() as (root, data):
yield root, data
@contextlib.contextmanager
def mocked_dataset(self, pre_extract=False, download=True, **kwargs):
with self.mocked_root() as (root, data):
if pre_extract:
utils.extract_archive(os.path.join(root, data["archive"]))
dataset = torchvision.datasets.STL10(root, download=download, **kwargs)
yield dataset, data
def test_not_found(self):
with self.assertRaises(RuntimeError):
with self.mocked_dataset(download=False):
pass
def test_splits(self):
for split in ('train', 'train+unlabeled', 'unlabeled', 'test'):
with self.mocked_dataset(split=split) as (dataset, data):
num_images = sum([data["num_images_in_split"][part] for part in split.split("+")])
self.generic_classification_dataset_test(dataset, num_images=num_images)
def test_folds(self):
for fold in range(10):
with self.mocked_dataset(split="train", folds=fold) as (dataset, data):
num_images = data["num_images_in_folds"][fold]
self.assertEqual(len(dataset), num_images)
def test_invalid_folds1(self):
with self.assertRaises(ValueError):
with self.mocked_dataset(folds=10):
pass
def test_invalid_folds2(self):
with self.assertRaises(ValueError):
with self.mocked_dataset(folds="0"):
pass
def test_transforms(self):
expected_image = "image"
expected_target = "target"
def transform(image):
return expected_image
def target_transform(target):
return expected_target
with self.mocked_dataset(transform=transform, target_transform=target_transform) as (dataset, _):
actual_image, actual_target = dataset[0]
self.assertEqual(actual_image, expected_image)
self.assertEqual(actual_target, expected_target)
def test_unlabeled(self):
with self.mocked_dataset(split="unlabeled") as (dataset, _):
labels = [dataset[idx][1] for idx in range(len(dataset))]
self.assertTrue(all([label == -1 for label in labels]))
@unittest.mock.patch("torchvision.datasets.stl10.download_and_extract_archive")
def test_download_preexisting(self, mock):
with self.mocked_dataset(pre_extract=True) as (dataset, data):
mock.assert_not_called()
def test_repr_smoke(self):
with self.mocked_dataset() as (dataset, _):
self.assertIsInstance(repr(dataset), str)
class Caltech101TestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Caltech101
FEATURE_TYPES = (PIL.Image.Image, (int, np.ndarray, tuple))
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
target_type=("category", "annotation", ["category", "annotation"])
)
REQUIRED_PACKAGES = ("scipy",)
def inject_fake_data(self, tmpdir, config):
root = pathlib.Path(tmpdir) / "caltech101"
images = root / "101_ObjectCategories"
annotations = root / "Annotations"
categories = (("Faces", "Faces_2"), ("helicopter", "helicopter"), ("ying_yang", "ying_yang"))
num_images_per_category = 2
for image_category, annotation_category in categories:
datasets_utils.create_image_folder(
root=images,
name=image_category,
file_name_fn=lambda idx: f"image_{idx + 1:04d}.jpg",
num_examples=num_images_per_category,
)
self._create_annotation_folder(
root=annotations,
name=annotation_category,
file_name_fn=lambda idx: f"annotation_{idx + 1:04d}.mat",
num_examples=num_images_per_category,
)
# This is included in the original archive, but is removed by the dataset. Thus, an empty directory suffices.
os.makedirs(images / "BACKGROUND_Google")
return num_images_per_category * len(categories)
def _create_annotation_folder(self, root, name, file_name_fn, num_examples):
root = pathlib.Path(root) / name
os.makedirs(root)
for idx in range(num_examples):
self._create_annotation_file(root, file_name_fn(idx))
def _create_annotation_file(self, root, name):
mdict = dict(obj_contour=torch.rand((2, torch.randint(3, 6, size=())), dtype=torch.float64).numpy())
datasets_utils.lazy_importer.scipy.io.savemat(str(pathlib.Path(root) / name), mdict)
def test_combined_targets(self):
target_types = ["category", "annotation"]
individual_targets = []
for target_type in target_types:
with self.create_dataset(target_type=target_type) as (dataset, _):
_, target = dataset[0]
individual_targets.append(target)
with self.create_dataset(target_type=target_types) as (dataset, _):
_, combined_targets = dataset[0]
actual = len(individual_targets)
expected = len(combined_targets)
self.assertEqual(
actual,
expected,
f"The number of the returned combined targets does not match the the number targets if requested "
f"individually: {actual} != {expected}",
)
for target_type, combined_target, individual_target in zip(target_types, combined_targets, individual_targets):
with self.subTest(target_type=target_type):
actual = type(combined_target)
expected = type(individual_target)
self.assertIs(
actual,
expected,
f"Type of the combined target does not match the type of the corresponding individual target: "
f"{actual} is not {expected}",
)
class Caltech256TestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Caltech256
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir) / "caltech256" / "256_ObjectCategories"
categories = ((1, "ak47"), (127, "laptop-101"), (257, "clutter"))
num_images_per_category = 2
for idx, category in categories:
datasets_utils.create_image_folder(
tmpdir,
name=f"{idx:03d}.{category}",
file_name_fn=lambda image_idx: f"{idx:03d}_{image_idx + 1:04d}.jpg",
num_examples=num_images_per_category,
)
return num_images_per_category * len(categories)
class WIDERFaceTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.WIDERFace
FEATURE_TYPES = (PIL.Image.Image, (dict, type(None))) # test split returns None as target
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(split=('train', 'val', 'test'))
def inject_fake_data(self, tmpdir, config):
widerface_dir = pathlib.Path(tmpdir) / 'widerface'
annotations_dir = widerface_dir / 'wider_face_split'
os.makedirs(annotations_dir)
split_to_idx = split_to_num_examples = {
"train": 1,
"val": 2,
"test": 3,
}
# We need to create all folders regardless of the split in config
for split in ('train', 'val', 'test'):
split_idx = split_to_idx[split]
num_examples = split_to_num_examples[split]
datasets_utils.create_image_folder(
root=tmpdir,
name=widerface_dir / f'WIDER_{split}' / 'images' / '0--Parade',
file_name_fn=lambda image_idx: f"0_Parade_marchingband_1_{split_idx + image_idx}.jpg",
num_examples=num_examples,
)
annotation_file_name = {
'train': annotations_dir / 'wider_face_train_bbx_gt.txt',
'val': annotations_dir / 'wider_face_val_bbx_gt.txt',
'test': annotations_dir / 'wider_face_test_filelist.txt',
}[split]
annotation_content = {
"train": "".join(
f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n1\n449 330 122 149 0 0 0 0 0 0\n"
for image_idx in range(num_examples)
),
"val": "".join(
f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n1\n501 160 285 443 0 0 0 0 0 0\n"
for image_idx in range(num_examples)
),
"test": "".join(
f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n"
for image_idx in range(num_examples)
),
}[split]
with open(annotation_file_name, "w") as annotation_file:
annotation_file.write(annotation_content)
return split_to_num_examples[config["split"]]
class CityScapesTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Cityscapes
TARGET_TYPES = (
"instance",
"semantic",
"polygon",
"color",
)
ADDITIONAL_CONFIGS = (
*datasets_utils.combinations_grid(
mode=("fine",), split=("train", "test", "val"), target_type=TARGET_TYPES
),
*datasets_utils.combinations_grid(
mode=("coarse",),
split=("train", "train_extra", "val"),
target_type=TARGET_TYPES,
),
)
FEATURE_TYPES = (PIL.Image.Image, (dict, PIL.Image.Image))
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
mode_to_splits = {
"Coarse": ["train", "train_extra", "val"],
"Fine": ["train", "test", "val"],
}
if config["split"] == "train": # just for coverage of the number of samples
cities = ["bochum", "bremen"]
else:
cities = ["bochum"]
polygon_target = {
"imgHeight": 1024,
"imgWidth": 2048,
"objects": [
{
"label": "sky",
"polygon": [
[1241, 0],
[1234, 156],
[1478, 197],
[1611, 172],
[1606, 0],
],
},
{
"label": "road",
"polygon": [
[0, 448],
[1331, 274],
[1473, 265],
[2047, 605],
[2047, 1023],
[0, 1023],
],
},
],
}
for mode in ["Coarse", "Fine"]:
gt_dir = tmpdir / f"gt{mode}"
for split in mode_to_splits[mode]:
for city in cities:
def make_image(name, size=10):
datasets_utils.create_image_folder(
root=gt_dir / split,
name=city,
file_name_fn=lambda _: name,
size=size,
num_examples=1,
)
make_image(f"{city}_000000_000000_gt{mode}_instanceIds.png")
make_image(f"{city}_000000_000000_gt{mode}_labelIds.png")
make_image(f"{city}_000000_000000_gt{mode}_color.png", size=(4, 10, 10))
polygon_target_name = gt_dir / split / city / f"{city}_000000_000000_gt{mode}_polygons.json"
with open(polygon_target_name, "w") as outfile:
json.dump(polygon_target, outfile)
# Create leftImg8bit folder
for split in ['test', 'train_extra', 'train', 'val']:
for city in cities:
datasets_utils.create_image_folder(
root=tmpdir / "leftImg8bit" / split,
name=city,
file_name_fn=lambda _: f"{city}_000000_000000_leftImg8bit.png",
num_examples=1,
)
info = {'num_examples': len(cities)}
if config['target_type'] == 'polygon':
info['expected_polygon_target'] = polygon_target
return info
def test_combined_targets(self):
target_types = ['semantic', 'polygon', 'color']
with self.create_dataset(target_type=target_types) as (dataset, _):
output = dataset[0]
self.assertTrue(isinstance(output, tuple))
self.assertTrue(len(output) == 2)
self.assertTrue(isinstance(output[0], PIL.Image.Image))
self.assertTrue(isinstance(output[1], tuple))
self.assertTrue(len(output[1]) == 3)
self.assertTrue(isinstance(output[1][0], PIL.Image.Image)) # semantic
self.assertTrue(isinstance(output[1][1], dict)) # polygon
self.assertTrue(isinstance(output[1][2], PIL.Image.Image)) # color
def test_feature_types_target_color(self):
with self.create_dataset(target_type='color') as (dataset, _):
color_img, color_target = dataset[0]
self.assertTrue(isinstance(color_img, PIL.Image.Image))
self.assertTrue(np.array(color_target).shape[2] == 4)
def test_feature_types_target_polygon(self):
with self.create_dataset(target_type='polygon') as (dataset, info):
polygon_img, polygon_target = dataset[0]
self.assertTrue(isinstance(polygon_img, PIL.Image.Image))
self.assertEqual(polygon_target, info['expected_polygon_target'])
class ImageNetTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.ImageNet
REQUIRED_PACKAGES = ('scipy',)
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(split=('train', 'val'))
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
wnid = 'n01234567'
if config['split'] == 'train':
num_examples = 3
datasets_utils.create_image_folder(
root=tmpdir,
name=tmpdir / 'train' / wnid / wnid,
file_name_fn=lambda image_idx: f"{wnid}_{image_idx}.JPEG",
num_examples=num_examples,
)
else:
num_examples = 1
datasets_utils.create_image_folder(
root=tmpdir,
name=tmpdir / 'val' / wnid,
file_name_fn=lambda image_ifx: "ILSVRC2012_val_0000000{image_idx}.JPEG",
num_examples=num_examples,
)
wnid_to_classes = {wnid: [1]}
torch.save((wnid_to_classes, None), tmpdir / 'meta.bin')
return num_examples
class CIFAR10TestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.CIFAR10
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
_VERSION_CONFIG = dict(
base_folder="cifar-10-batches-py",
train_files=tuple(f"data_batch_{idx}" for idx in range(1, 6)),
test_files=("test_batch",),
labels_key="labels",
meta_file="batches.meta",
num_categories=10,
categories_key="label_names",
)
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir) / self._VERSION_CONFIG["base_folder"]
os.makedirs(tmpdir)
num_images_per_file = 1
for name in itertools.chain(self._VERSION_CONFIG["train_files"], self._VERSION_CONFIG["test_files"]):
self._create_batch_file(tmpdir, name, num_images_per_file)
categories = self._create_meta_file(tmpdir)
return dict(
num_examples=num_images_per_file
* len(self._VERSION_CONFIG["train_files"] if config["train"] else self._VERSION_CONFIG["test_files"]),
categories=categories,
)
def _create_batch_file(self, root, name, num_images):
data = datasets_utils.create_image_or_video_tensor((num_images, 32 * 32 * 3))
labels = np.random.randint(0, self._VERSION_CONFIG["num_categories"], size=num_images).tolist()
self._create_binary_file(root, name, {"data": data, self._VERSION_CONFIG["labels_key"]: labels})
def _create_meta_file(self, root):
categories = [
f"{idx:0{len(str(self._VERSION_CONFIG['num_categories'] - 1))}d}"
for idx in range(self._VERSION_CONFIG["num_categories"])
]
self._create_binary_file(
root, self._VERSION_CONFIG["meta_file"], {self._VERSION_CONFIG["categories_key"]: categories}
)
return categories
def _create_binary_file(self, root, name, content):
with open(pathlib.Path(root) / name, "wb") as fh:
pickle.dump(content, fh)
def test_class_to_idx(self):
with self.create_dataset() as (dataset, info):
expected = {category: label for label, category in enumerate(info["categories"])}
actual = dataset.class_to_idx
self.assertEqual(actual, expected)
class CIFAR100(CIFAR10TestCase):
DATASET_CLASS = datasets.CIFAR100
_VERSION_CONFIG = dict(
base_folder="cifar-100-python",
train_files=("train",),
test_files=("test",),
labels_key="fine_labels",
meta_file="meta",
num_categories=100,
categories_key="fine_label_names",
)
class CelebATestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.CelebA
FEATURE_TYPES = (PIL.Image.Image, (torch.Tensor, int, tuple, type(None)))
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
split=("train", "valid", "test", "all"),
target_type=("attr", "identity", "bbox", "landmarks", ["attr", "identity"]),
)
REQUIRED_PACKAGES = ("pandas",)
_SPLIT_TO_IDX = dict(train=0, valid=1, test=2)
def inject_fake_data(self, tmpdir, config):
base_folder = pathlib.Path(tmpdir) / "celeba"
os.makedirs(base_folder)
num_images, num_images_per_split = self._create_split_txt(base_folder)
datasets_utils.create_image_folder(
base_folder, "img_align_celeba", lambda idx: f"{idx + 1:06d}.jpg", num_images
)
attr_names = self._create_attr_txt(base_folder, num_images)
self._create_identity_txt(base_folder, num_images)
self._create_bbox_txt(base_folder, num_images)
self._create_landmarks_txt(base_folder, num_images)
return dict(num_examples=num_images_per_split[config["split"]], attr_names=attr_names)
def _create_split_txt(self, root):
num_images_per_split = dict(train=3, valid=2, test=1)
data = [
[self._SPLIT_TO_IDX[split]] for split, num_images in num_images_per_split.items() for _ in range(num_images)
]
self._create_txt(root, "list_eval_partition.txt", data)
num_images_per_split["all"] = num_images = sum(num_images_per_split.values())
return num_images, num_images_per_split
def _create_attr_txt(self, root, num_images):
header = ("5_o_Clock_Shadow", "Young")
data = torch.rand((num_images, len(header))).ge(0.5).int().mul(2).sub(1).tolist()
self._create_txt(root, "list_attr_celeba.txt", data, header=header, add_num_examples=True)
return header
def _create_identity_txt(self, root, num_images):
data = torch.randint(1, 4, size=(num_images, 1)).tolist()
self._create_txt(root, "identity_CelebA.txt", data)
def _create_bbox_txt(self, root, num_images):
header = ("x_1", "y_1", "width", "height")
data = torch.randint(10, size=(num_images, len(header))).tolist()
self._create_txt(
root, "list_bbox_celeba.txt", data, header=header, add_num_examples=True, add_image_id_to_header=True
)
def _create_landmarks_txt(self, root, num_images):
header = ("lefteye_x", "rightmouth_y")
data = torch.randint(10, size=(num_images, len(header))).tolist()
self._create_txt(root, "list_landmarks_align_celeba.txt", data, header=header, add_num_examples=True)
def _create_txt(self, root, name, data, header=None, add_num_examples=False, add_image_id_to_header=False):
with open(pathlib.Path(root) / name, "w") as fh:
if add_num_examples:
fh.write(f"{len(data)}\n")
if header:
if add_image_id_to_header:
header = ("image_id", *header)
fh.write(f"{' '.join(header)}\n")
for idx, line in enumerate(data, 1):
fh.write(f"{' '.join((f'{idx:06d}.jpg', *[str(value) for value in line]))}\n")
def test_combined_targets(self):
target_types = ["attr", "identity", "bbox", "landmarks"]
individual_targets = []
for target_type in target_types:
with self.create_dataset(target_type=target_type) as (dataset, _):
_, target = dataset[0]
individual_targets.append(target)
with self.create_dataset(target_type=target_types) as (dataset, _):
_, combined_targets = dataset[0]
actual = len(individual_targets)
expected = len(combined_targets)
self.assertEqual(
actual,
expected,
f"The number of the returned combined targets does not match the the number targets if requested "
f"individually: {actual} != {expected}",
)
for target_type, combined_target, individual_target in zip(target_types, combined_targets, individual_targets):
with self.subTest(target_type=target_type):
actual = type(combined_target)
expected = type(individual_target)
self.assertIs(
actual,
expected,
f"Type of the combined target does not match the type of the corresponding individual target: "
f"{actual} is not {expected}",
)
def test_no_target(self):
with self.create_dataset(target_type=[]) as (dataset, _):
_, target = dataset[0]
self.assertIsNone(target)
def test_attr_names(self):
with self.create_dataset() as (dataset, info):
self.assertEqual(tuple(dataset.attr_names), info["attr_names"])
class VOCSegmentationTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.VOCSegmentation
FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image)
ADDITIONAL_CONFIGS = (
*datasets_utils.combinations_grid(
year=[f"20{year:02d}" for year in range(7, 13)], image_set=("train", "val", "trainval")
),
dict(year="2007", image_set="test"),
dict(year="2007-test", image_set="test"),
)
def inject_fake_data(self, tmpdir, config):
year, is_test_set = (
("2007", True)
if config["year"] == "2007-test" or config["image_set"] == "test"
else (config["year"], False)
)
image_set = config["image_set"]
base_dir = pathlib.Path(tmpdir)
if year == "2011":
base_dir /= "TrainVal"
base_dir = base_dir / "VOCdevkit" / f"VOC{year}"
os.makedirs(base_dir)
num_images, num_images_per_image_set = self._create_image_set_files(base_dir, "ImageSets", is_test_set)
datasets_utils.create_image_folder(base_dir, "JPEGImages", lambda idx: f"{idx:06d}.jpg", num_images)
datasets_utils.create_image_folder(base_dir, "SegmentationClass", lambda idx: f"{idx:06d}.png", num_images)
annotation = self._create_annotation_files(base_dir, "Annotations", num_images)
return dict(num_examples=num_images_per_image_set[image_set], annotation=annotation)
def _create_image_set_files(self, root, name, is_test_set):
root = pathlib.Path(root) / name
src = pathlib.Path(root) / "Main"
os.makedirs(src, exist_ok=True)
idcs = dict(train=(0, 1, 2), val=(3, 4), test=(5,))
idcs["trainval"] = (*idcs["train"], *idcs["val"])
for image_set in ("test",) if is_test_set else ("train", "val", "trainval"):
self._create_image_set_file(src, image_set, idcs[image_set])
shutil.copytree(src, root / "Segmentation")
num_images = max(itertools.chain(*idcs.values())) + 1
num_images_per_image_set = dict([(image_set, len(idcs_)) for image_set, idcs_ in idcs.items()])
return num_images, num_images_per_image_set
def _create_image_set_file(self, root, image_set, idcs):
with open(pathlib.Path(root) / f"{image_set}.txt", "w") as fh:
fh.writelines([f"{idx:06d}\n" for idx in idcs])
def _create_annotation_files(self, root, name, num_images):
root = pathlib.Path(root) / name
os.makedirs(root)
for idx in range(num_images):
annotation = self._create_annotation_file(root, f"{idx:06d}.xml")
return annotation
def _create_annotation_file(self, root, name):
def add_child(parent, name, text=None):
child = ET.SubElement(parent, name)
child.text = text
return child
def add_name(obj, name="dog"):
add_child(obj, "name", name)
return name
def add_bndbox(obj, bndbox=None):
if bndbox is None:
bndbox = {"xmin": "1", "xmax": "2", "ymin": "3", "ymax": "4"}
obj = add_child(obj, "bndbox")
for name, text in bndbox.items():
add_child(obj, name, text)
return bndbox
annotation = ET.Element("annotation")
obj = add_child(annotation, "object")
data = dict(name=add_name(obj), bndbox=add_bndbox(obj))
with open(pathlib.Path(root) / name, "wb") as fh:
fh.write(ET.tostring(annotation))
return data
class VOCDetectionTestCase(VOCSegmentationTestCase):
DATASET_CLASS = datasets.VOCDetection
FEATURE_TYPES = (PIL.Image.Image, dict)
def test_annotations(self):
with self.create_dataset() as (dataset, info):
_, target = dataset[0]
self.assertIn("annotation", target)
annotation = target["annotation"]
self.assertIn("object", annotation)
objects = annotation["object"]
self.assertEqual(len(objects), 1)
object = objects[0]
self.assertEqual(object, info["annotation"])
class CocoDetectionTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.CocoDetection
FEATURE_TYPES = (PIL.Image.Image, list)
REQUIRED_PACKAGES = ("pycocotools",)
_IMAGE_FOLDER = "images"
_ANNOTATIONS_FOLDER = "annotations"
_ANNOTATIONS_FILE = "annotations.json"
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._IMAGE_FOLDER
annotation_file = tmpdir / self._ANNOTATIONS_FOLDER / self._ANNOTATIONS_FILE
return root, annotation_file
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
num_images = 3
num_annotations_per_image = 2
files = datasets_utils.create_image_folder(
tmpdir, name=self._IMAGE_FOLDER, file_name_fn=lambda idx: f"{idx:012d}.jpg", num_examples=num_images
)
file_names = [file.relative_to(tmpdir / self._IMAGE_FOLDER) for file in files]
annotation_folder = tmpdir / self._ANNOTATIONS_FOLDER
os.makedirs(annotation_folder)
info = self._create_annotation_file(
annotation_folder, self._ANNOTATIONS_FILE, file_names, num_annotations_per_image
)
info["num_examples"] = num_images
return info
def _create_annotation_file(self, root, name, file_names, num_annotations_per_image):
image_ids = [int(file_name.stem) for file_name in file_names]
images = [dict(file_name=str(file_name), id=id) for file_name, id in zip(file_names, image_ids)]
annotations, info = self._create_annotations(image_ids, num_annotations_per_image)
self._create_json(root, name, dict(images=images, annotations=annotations))
return info
def _create_annotations(self, image_ids, num_annotations_per_image):
annotations = datasets_utils.combinations_grid(
image_id=image_ids, bbox=([1.0, 2.0, 3.0, 4.0],) * num_annotations_per_image
)
for id, annotation in enumerate(annotations):
annotation["id"] = id
return annotations, dict()
def _create_json(self, root, name, content):
file = pathlib.Path(root) / name
with open(file, "w") as fh:
json.dump(content, fh)
return file
class CocoCaptionsTestCase(CocoDetectionTestCase):
DATASET_CLASS = datasets.CocoCaptions
def _create_annotations(self, image_ids, num_annotations_per_image):
captions = [str(idx) for idx in range(num_annotations_per_image)]
annotations = datasets_utils.combinations_grid(image_id=image_ids, caption=captions)
for id, annotation in enumerate(annotations):
annotation["id"] = id
return annotations, dict(captions=captions)
def test_captions(self):
with self.create_dataset() as (dataset, info):
_, captions = dataset[0]
self.assertEqual(tuple(captions), tuple(info["captions"]))
class UCF101TestCase(datasets_utils.VideoDatasetTestCase):
DATASET_CLASS = datasets.UCF101
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(fold=(1, 2, 3), train=(True, False))
_VIDEO_FOLDER = "videos"
_ANNOTATIONS_FOLDER = "annotations"
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._VIDEO_FOLDER
annotation_path = tmpdir / self._ANNOTATIONS_FOLDER
return root, annotation_path
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
video_folder = tmpdir / self._VIDEO_FOLDER
os.makedirs(video_folder)
video_files = self._create_videos(video_folder)
annotations_folder = tmpdir / self._ANNOTATIONS_FOLDER
os.makedirs(annotations_folder)
num_examples = self._create_annotation_files(annotations_folder, video_files, config["fold"], config["train"])
return num_examples
def _create_videos(self, root, num_examples_per_class=3):
def file_name_fn(cls, idx, clips_per_group=2):
return f"v_{cls}_g{(idx // clips_per_group) + 1:02d}_c{(idx % clips_per_group) + 1:02d}.avi"
video_files = [
datasets_utils.create_video_folder(root, cls, lambda idx: file_name_fn(cls, idx), num_examples_per_class)
for cls in ("ApplyEyeMakeup", "YoYo")
]
return [path.relative_to(root) for path in itertools.chain(*video_files)]
def _create_annotation_files(self, root, video_files, fold, train):
current_videos = random.sample(video_files, random.randrange(1, len(video_files) - 1))
current_annotation = self._annotation_file_name(fold, train)
self._create_annotation_file(root, current_annotation, current_videos)
other_videos = set(video_files) - set(current_videos)
other_annotations = [
self._annotation_file_name(fold, train) for fold, train in itertools.product((1, 2, 3), (True, False))
]
other_annotations.remove(current_annotation)
for name in other_annotations:
self._create_annotation_file(root, name, other_videos)
return len(current_videos)
def _annotation_file_name(self, fold, train):
return f"{'train' if train else 'test'}list{fold:02d}.txt"
def _create_annotation_file(self, root, name, video_files):
with open(pathlib.Path(root) / name, "w") as fh:
fh.writelines(f"{file}\n" for file in sorted(video_files))
class LSUNTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.LSUN
REQUIRED_PACKAGES = ("lmdb",)
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
classes=("train", "test", "val", ["bedroom_train", "church_outdoor_train"])
)
_CATEGORIES = (
"bedroom",
"bridge",
"church_outdoor",
"classroom",
"conference_room",
"dining_room",
"kitchen",
"living_room",
"restaurant",
"tower",
)
def inject_fake_data(self, tmpdir, config):
root = pathlib.Path(tmpdir)
num_images = 0
for cls in self._parse_classes(config["classes"]):
num_images += self._create_lmdb(root, cls)
return num_images
@contextlib.contextmanager
def create_dataset(
self,
*args, **kwargs
):
with super().create_dataset(*args, **kwargs) as output:
yield output
# Currently datasets.LSUN caches the keys in the current directory rather than in the root directory. Thus,
# this creates a number of unique _cache_* files in the current directory that will not be removed together
# with the temporary directory
for file in os.listdir(os.getcwd()):
if file.startswith("_cache_"):
os.remove(file)
def _parse_classes(self, classes):
if not isinstance(classes, str):
return classes
split = classes
if split == "test":
return [split]
return [f"{category}_{split}" for category in self._CATEGORIES]
def _create_lmdb(self, root, cls):
lmdb = datasets_utils.lazy_importer.lmdb
hexdigits_lowercase = string.digits + string.ascii_lowercase[:6]
folder = f"{cls}_lmdb"
num_images = torch.randint(1, 4, size=()).item()
format = "png"
files = datasets_utils.create_image_folder(root, folder, lambda idx: f"{idx}.{format}", num_images)
with lmdb.open(str(root / folder)) as env, env.begin(write=True) as txn:
for file in files:
key = "".join(random.choice(hexdigits_lowercase) for _ in range(40)).encode()
buffer = io.BytesIO()
Image.open(file).save(buffer, format)
buffer.seek(0)
value = buffer.read()
txn.put(key, value)
os.remove(file)
return num_images
def test_not_found_or_corrupted(self):
# LSUN does not raise built-in exception, but a custom one. It is expressive enough to not 'cast' it to
# RuntimeError or FileNotFoundError that are normally checked by this test.
with self.assertRaises(datasets_utils.lazy_importer.lmdb.Error):
super().test_not_found_or_corrupted()
class Kinetics400TestCase(datasets_utils.VideoDatasetTestCase):
DATASET_CLASS = datasets.Kinetics400
def inject_fake_data(self, tmpdir, config):
classes = ("Abseiling", "Zumba")
num_videos_per_class = 2
digits = string.ascii_letters + string.digits + "-_"
for cls in classes:
datasets_utils.create_video_folder(
tmpdir,
cls,
lambda _: f"{datasets_utils.create_random_string(11, digits)}.avi",
num_videos_per_class,
)
return num_videos_per_class * len(classes)
class HMDB51TestCase(datasets_utils.VideoDatasetTestCase):
DATASET_CLASS = datasets.HMDB51
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(fold=(1, 2, 3), train=(True, False))
_VIDEO_FOLDER = "videos"
_SPLITS_FOLDER = "splits"
_CLASSES = ("brush_hair", "wave")
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._VIDEO_FOLDER
annotation_path = tmpdir / self._SPLITS_FOLDER
return root, annotation_path
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
video_folder = tmpdir / self._VIDEO_FOLDER
os.makedirs(video_folder)
video_files = self._create_videos(video_folder)
splits_folder = tmpdir / self._SPLITS_FOLDER
os.makedirs(splits_folder)
num_examples = self._create_split_files(splits_folder, video_files, config["fold"], config["train"])
return num_examples
def _create_videos(self, root, num_examples_per_class=3):
def file_name_fn(cls, idx, clips_per_group=2):
return f"{cls}_{(idx // clips_per_group) + 1:d}_{(idx % clips_per_group) + 1:d}.avi"
return [
(
cls,
datasets_utils.create_video_folder(
root,
cls,
lambda idx: file_name_fn(cls, idx),
num_examples_per_class,
),
)
for cls in self._CLASSES
]
def _create_split_files(self, root, video_files, fold, train):
num_videos = num_train_videos = 0
for cls, videos in video_files:
num_videos += len(videos)
train_videos = set(random.sample(videos, random.randrange(1, len(videos) - 1)))
num_train_videos += len(train_videos)
with open(pathlib.Path(root) / f"{cls}_test_split{fold}.txt", "w") as fh:
fh.writelines(f"{file.name} {1 if file in train_videos else 2}\n" for file in videos)
return num_train_videos if train else (num_videos - num_train_videos)
class OmniglotTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Omniglot
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(background=(True, False))
def inject_fake_data(self, tmpdir, config):
target_folder = (
pathlib.Path(tmpdir) / "omniglot-py" / f"images_{'background' if config['background'] else 'evaluation'}"
)
os.makedirs(target_folder)
num_images = 0
for name in ("Alphabet_of_the_Magi", "Tifinagh"):
num_images += self._create_alphabet_folder(target_folder, name)
return num_images
def _create_alphabet_folder(self, root, name):
num_images_total = 0
for idx in range(torch.randint(1, 4, size=()).item()):
num_images = torch.randint(1, 4, size=()).item()
num_images_total += num_images
datasets_utils.create_image_folder(
root / name, f"character{idx:02d}", lambda image_idx: f"{image_idx:02d}.png", num_images
)
return num_images_total
class SBUTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SBU
FEATURE_TYPES = (PIL.Image.Image, str)
def inject_fake_data(self, tmpdir, config):
num_images = 3
dataset_folder = pathlib.Path(tmpdir) / "dataset"
images = datasets_utils.create_image_folder(tmpdir, "dataset", self._create_file_name, num_images)
self._create_urls_txt(dataset_folder, images)
self._create_captions_txt(dataset_folder, num_images)
return num_images
def _create_file_name(self, idx):
part1 = datasets_utils.create_random_string(10, string.digits)
part2 = datasets_utils.create_random_string(10, string.ascii_lowercase, string.digits[:6])
return f"{part1}_{part2}.jpg"
def _create_urls_txt(self, root, images):
with open(root / "SBU_captioned_photo_dataset_urls.txt", "w") as fh:
for image in images:
fh.write(
f"http://static.flickr.com/{datasets_utils.create_random_string(4, string.digits)}/{image.name}\n"
)
def _create_captions_txt(self, root, num_images):
with open(root / "SBU_captioned_photo_dataset_captions.txt", "w") as fh:
for _ in range(num_images):
fh.write(f"{datasets_utils.create_random_string(10)}\n")
class SEMEIONTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SEMEION
def inject_fake_data(self, tmpdir, config):
num_images = 3
images = torch.rand(num_images, 256)
labels = F.one_hot(torch.randint(10, size=(num_images,)))
with open(pathlib.Path(tmpdir) / "semeion.data", "w") as fh:
for image, one_hot_labels in zip(images, labels):
image_columns = " ".join([f"{pixel.item():.4f}" for pixel in image])
labels_columns = " ".join([str(label.item()) for label in one_hot_labels])
fh.write(f"{image_columns} {labels_columns}\n")
return num_images
class USPSTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.USPS
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
def inject_fake_data(self, tmpdir, config):
num_images = 2 if config["train"] else 1
images = torch.rand(num_images, 256) * 2 - 1
labels = torch.randint(1, 11, size=(num_images,))
with bz2.open(pathlib.Path(tmpdir) / f"usps{'.t' if not config['train'] else ''}.bz2", "w") as fh:
for image, label in zip(images, labels):
line = " ".join((str(label.item()), *[f"{idx}:{pixel:.6f}" for idx, pixel in enumerate(image, 1)]))
fh.write(f"{line}\n".encode())
return num_images
class SBDatasetTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SBDataset
FEATURE_TYPES = (PIL.Image.Image, (np.ndarray, PIL.Image.Image))
REQUIRED_PACKAGES = ("scipy.io", "scipy.sparse")
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
image_set=("train", "val", "train_noval"), mode=("boundaries", "segmentation")
)
_NUM_CLASSES = 20
def inject_fake_data(self, tmpdir, config):
num_images, num_images_per_image_set = self._create_split_files(tmpdir)
sizes = self._create_target_folder(tmpdir, "cls", num_images)
datasets_utils.create_image_folder(
tmpdir, "img", lambda idx: f"{self._file_stem(idx)}.jpg", num_images, size=lambda idx: sizes[idx]
)
return num_images_per_image_set[config["image_set"]]
def _create_split_files(self, root):
root = pathlib.Path(root)
splits = dict(train=(0, 1, 2), train_noval=(0, 2), val=(3,))
for split, idcs in splits.items():
self._create_split_file(root, split, idcs)
num_images = max(itertools.chain(*splits.values())) + 1
num_images_per_split = dict([(split, len(idcs)) for split, idcs in splits.items()])
return num_images, num_images_per_split
def _create_split_file(self, root, name, idcs):
with open(root / f"{name}.txt", "w") as fh:
fh.writelines(f"{self._file_stem(idx)}\n" for idx in idcs)
def _create_target_folder(self, root, name, num_images):
io = datasets_utils.lazy_importer.scipy.io
target_folder = pathlib.Path(root) / name
os.makedirs(target_folder)
sizes = [torch.randint(1, 4, size=(2,)).tolist() for _ in range(num_images)]
for idx, size in enumerate(sizes):
content = dict(
GTcls=dict(Boundaries=self._create_boundaries(size), Segmentation=self._create_segmentation(size))
)
io.savemat(target_folder / f"{self._file_stem(idx)}.mat", content)
return sizes
def _create_boundaries(self, size):
sparse = datasets_utils.lazy_importer.scipy.sparse
return [
[sparse.csc_matrix(torch.randint(0, 2, size=size, dtype=torch.uint8).numpy())]
for _ in range(self._NUM_CLASSES)
]
def _create_segmentation(self, size):
return torch.randint(0, self._NUM_CLASSES + 1, size=size, dtype=torch.uint8).numpy()
def _file_stem(self, idx):
return f"2008_{idx:06d}"
class FakeDataTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.FakeData
FEATURE_TYPES = (PIL.Image.Image, int)
def dataset_args(self, tmpdir, config):
return ()
def inject_fake_data(self, tmpdir, config):
return config["size"]
def test_not_found_or_corrupted(self):
self.skipTest("The data is generated at creation and thus cannot be non-existent or corrupted.")
class PhotoTourTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.PhotoTour
# The PhotoTour dataset returns examples with different features with respect to the 'train' parameter. Thus,
# we overwrite 'FEATURE_TYPES' with a dummy value to satisfy the initial checks of the base class. Furthermore, we
# overwrite the 'test_feature_types()' method to select the correct feature types before the test is run.
FEATURE_TYPES = ()
_TRAIN_FEATURE_TYPES = (torch.Tensor,)
_TEST_FEATURE_TYPES = (torch.Tensor, torch.Tensor, torch.Tensor)
datasets_utils.combinations_grid(train=(True, False))
_NAME = "liberty"
def dataset_args(self, tmpdir, config):
return tmpdir, self._NAME
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
# In contrast to the original data, the fake images injected here comprise only a single patch. Thus,
# num_images == num_patches.
num_patches = 5
image_files = self._create_images(tmpdir, self._NAME, num_patches)
point_ids, info_file = self._create_info_file(tmpdir / self._NAME, num_patches)
num_matches, matches_file = self._create_matches_file(tmpdir / self._NAME, num_patches, point_ids)
self._create_archive(tmpdir, self._NAME, *image_files, info_file, matches_file)
return num_patches if config["train"] else num_matches
def _create_images(self, root, name, num_images):
# The images in the PhotoTour dataset comprises of multiple grayscale patches of 64 x 64 pixels. Thus, the
# smallest fake image is 64 x 64 pixels and comprises a single patch.
return datasets_utils.create_image_folder(
root, name, lambda idx: f"patches{idx:04d}.bmp", num_images, size=(1, 64, 64)
)
def _create_info_file(self, root, num_images):
point_ids = torch.randint(num_images, size=(num_images,)).tolist()
file = root / "info.txt"
with open(file, "w") as fh:
fh.writelines([f"{point_id} 0\n" for point_id in point_ids])
return point_ids, file
def _create_matches_file(self, root, num_patches, point_ids):
lines = [
f"{patch_id1} {point_ids[patch_id1]} 0 {patch_id2} {point_ids[patch_id2]} 0\n"
for patch_id1, patch_id2 in itertools.combinations(range(num_patches), 2)
]
file = root / "m50_100000_100000_0.txt"
with open(file, "w") as fh:
fh.writelines(lines)
return len(lines), file
def _create_archive(self, root, name, *files):
archive = root / f"{name}.zip"
with zipfile.ZipFile(archive, "w") as zip:
for file in files:
zip.write(file, arcname=file.relative_to(root))
return archive
@datasets_utils.test_all_configs
def test_feature_types(self, config):
feature_types = self.FEATURE_TYPES
self.FEATURE_TYPES = self._TRAIN_FEATURE_TYPES if config["train"] else self._TEST_FEATURE_TYPES
try:
super().test_feature_types.__wrapped__(self, config)
finally:
self.FEATURE_TYPES = feature_types
class Flickr8kTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Flickr8k
FEATURE_TYPES = (PIL.Image.Image, list)
_IMAGES_FOLDER = "images"
_ANNOTATIONS_FILE = "captions.html"
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._IMAGES_FOLDER
ann_file = tmpdir / self._ANNOTATIONS_FILE
return str(root), str(ann_file)
def inject_fake_data(self, tmpdir, config):
num_images = 3
num_captions_per_image = 3
tmpdir = pathlib.Path(tmpdir)
images = self._create_images(tmpdir, self._IMAGES_FOLDER, num_images)
self._create_annotations_file(tmpdir, self._ANNOTATIONS_FILE, images, num_captions_per_image)
return dict(num_examples=num_images, captions=self._create_captions(num_captions_per_image))
def _create_images(self, root, name, num_images):
return datasets_utils.create_image_folder(root, name, self._image_file_name, num_images)
def _image_file_name(self, idx):
id = datasets_utils.create_random_string(10, string.digits)
checksum = datasets_utils.create_random_string(10, string.digits, string.ascii_lowercase[:6])
size = datasets_utils.create_random_string(1, "qwcko")
return f"{id}_{checksum}_{size}.jpg"
def _create_annotations_file(self, root, name, images, num_captions_per_image):
with open(root / name, "w") as fh:
fh.write("<table>")
for image in (None, *images):
self._add_image(fh, image, num_captions_per_image)
fh.write("</table>")
def _add_image(self, fh, image, num_captions_per_image):
fh.write("<tr>")
self._add_image_header(fh, image)
fh.write("</tr><tr><td><ul>")
self._add_image_captions(fh, num_captions_per_image)
fh.write("</ul></td></tr>")
def _add_image_header(self, fh, image=None):
if image:
url = f"http://www.flickr.com/photos/user/{image.name.split('_')[0]}/"
data = f'<a href="{url}">{url}</a>'
else:
data = "Image Not Found"
fh.write(f"<td>{data}</td>")
def _add_image_captions(self, fh, num_captions_per_image):
for caption in self._create_captions(num_captions_per_image):
fh.write(f"<li>{caption}")
def _create_captions(self, num_captions_per_image):
return [str(idx) for idx in range(num_captions_per_image)]
def test_captions(self):
with self.create_dataset() as (dataset, info):
_, captions = dataset[0]
self.assertSequenceEqual(captions, info["captions"])
class Flickr30kTestCase(Flickr8kTestCase):
DATASET_CLASS = datasets.Flickr30k
FEATURE_TYPES = (PIL.Image.Image, list)
_ANNOTATIONS_FILE = "captions.token"
def _image_file_name(self, idx):
return f"{idx}.jpg"
def _create_annotations_file(self, root, name, images, num_captions_per_image):
with open(root / name, "w") as fh:
for image, (idx, caption) in itertools.product(
images, enumerate(self._create_captions(num_captions_per_image))
):
fh.write(f"{image.name}#{idx}\t{caption}\n")
class MNISTTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.MNIST
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
_MAGIC_DTYPES = {
torch.uint8: 8,
torch.int8: 9,
torch.int16: 11,
torch.int32: 12,
torch.float32: 13,
torch.float64: 14,
}
_IMAGES_SIZE = (28, 28)
_IMAGES_DTYPE = torch.uint8
_LABELS_SIZE = ()
_LABELS_DTYPE = torch.uint8
def inject_fake_data(self, tmpdir, config):
raw_dir = pathlib.Path(tmpdir) / self.DATASET_CLASS.__name__ / "raw"
os.makedirs(raw_dir, exist_ok=True)
num_images = self._num_images(config)
self._create_binary_file(
raw_dir, self._images_file(config), (num_images, *self._IMAGES_SIZE), self._IMAGES_DTYPE
)
self._create_binary_file(
raw_dir, self._labels_file(config), (num_images, *self._LABELS_SIZE), self._LABELS_DTYPE
)
return num_images
def _num_images(self, config):
return 2 if config["train"] else 1
def _images_file(self, config):
return f"{self._prefix(config)}-images-idx3-ubyte"
def _labels_file(self, config):
return f"{self._prefix(config)}-labels-idx1-ubyte"
def _prefix(self, config):
return "train" if config["train"] else "t10k"
def _create_binary_file(self, root, filename, size, dtype):
with open(pathlib.Path(root) / filename, "wb") as fh:
for meta in (self._magic(dtype, len(size)), *size):
fh.write(self._encode(meta))
# If ever an MNIST variant is added that uses floating point data, this should be adapted.
data = torch.randint(0, torch.iinfo(dtype).max + 1, size, dtype=dtype)
fh.write(data.numpy().tobytes())
def _magic(self, dtype, dims):
return self._MAGIC_DTYPES[dtype] * 256 + dims
def _encode(self, v):
return torch.tensor(v, dtype=torch.int32).numpy().tobytes()[::-1]
class FashionMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.FashionMNIST
class KMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.KMNIST
class EMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.EMNIST
DEFAULT_CONFIG = dict(split="byclass")
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
split=("byclass", "bymerge", "balanced", "letters", "digits", "mnist"), train=(True, False)
)
def _prefix(self, config):
return f"emnist-{config['split']}-{'train' if config['train'] else 'test'}"
class QMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.QMNIST
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(what=("train", "test", "test10k", "nist"))
_LABELS_SIZE = (8,)
_LABELS_DTYPE = torch.int32
def _num_images(self, config):
if config["what"] == "nist":
return 3
elif config["what"] == "train":
return 2
elif config["what"] == "test50k":
# The split 'test50k' is defined as the last 50k images beginning at index 10000. Thus, we need to create
# more than 10000 images for the dataset to not be empty. Since this takes significantly longer than the
# creation of all other splits, this is excluded from the 'ADDITIONAL_CONFIGS' and is tested only once in
# 'test_num_examples_test50k'.
return 10001
else:
return 1
def _labels_file(self, config):
return f"{self._prefix(config)}-labels-idx2-int"
def _prefix(self, config):
if config["what"] == "nist":
return "xnist"
if config["what"] is None:
what = "train" if config["train"] else "test"
elif config["what"].startswith("test"):
what = "test"
else:
what = config["what"]
return f"qmnist-{what}"
def test_num_examples_test50k(self):
with self.create_dataset(what="test50k") as (dataset, info):
# Since the split 'test50k' selects all images beginning from the index 10000, we subtract the number of
# created examples by this.
self.assertEqual(len(dataset), info["num_examples"] - 10000)
class DatasetFolderTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.DatasetFolder
# The dataset has no fixed return type since it is defined by the loader parameter. For testing, we use a loader
# that simply returns the path as type 'str' instead of loading anything. See the 'dataset_args()' method.
FEATURE_TYPES = (str, int)
_IMAGE_EXTENSIONS = ("jpg", "png")
_VIDEO_EXTENSIONS = ("avi", "mp4")
_EXTENSIONS = (*_IMAGE_EXTENSIONS, *_VIDEO_EXTENSIONS)
# DatasetFolder has two mutually exclusive parameters: 'extensions' and 'is_valid_file'. One of both is required.
# We only iterate over different 'extensions' here and handle the tests for 'is_valid_file' in the
# 'test_is_valid_file()' method.
DEFAULT_CONFIG = dict(extensions=_EXTENSIONS)
ADDITIONAL_CONFIGS = (
*datasets_utils.combinations_grid(extensions=[(ext,) for ext in _IMAGE_EXTENSIONS]),
dict(extensions=_IMAGE_EXTENSIONS),
*datasets_utils.combinations_grid(extensions=[(ext,) for ext in _VIDEO_EXTENSIONS]),
dict(extensions=_VIDEO_EXTENSIONS),
)
def dataset_args(self, tmpdir, config):
return tmpdir, lambda x: x
def inject_fake_data(self, tmpdir, config):
extensions = config["extensions"] or self._is_valid_file_to_extensions(config["is_valid_file"])
num_examples_total = 0
classes = []
for ext, cls in zip(self._EXTENSIONS, string.ascii_letters):
if ext not in extensions:
continue
create_example_folder = (
datasets_utils.create_image_folder
if ext in self._IMAGE_EXTENSIONS
else datasets_utils.create_video_folder
)
num_examples = torch.randint(1, 3, size=()).item()
create_example_folder(tmpdir, cls, lambda idx: self._file_name_fn(cls, ext, idx), num_examples)
num_examples_total += num_examples
classes.append(cls)
return dict(num_examples=num_examples_total, classes=classes)
def _file_name_fn(self, cls, ext, idx):
return f"{cls}_{idx}.{ext}"
def _is_valid_file_to_extensions(self, is_valid_file):
return {ext for ext in self._EXTENSIONS if is_valid_file(f"foo.{ext}")}
@datasets_utils.test_all_configs
def test_is_valid_file(self, config):
extensions = config.pop("extensions")
# We need to explicitly pass extensions=None here or otherwise it would be filled by the value from the
# DEFAULT_CONFIG.
with self.create_dataset(
config, extensions=None, is_valid_file=lambda file: pathlib.Path(file).suffix[1:] in extensions
) as (dataset, info):
self.assertEqual(len(dataset), info["num_examples"])
@datasets_utils.test_all_configs
def test_classes(self, config):
with self.create_dataset(config) as (dataset, info):
self.assertSequenceEqual(dataset.classes, info["classes"])
class ImageFolderTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.ImageFolder
def inject_fake_data(self, tmpdir, config):
num_examples_total = 0
classes = ("a", "b")
for cls in classes:
num_examples = torch.randint(1, 3, size=()).item()
num_examples_total += num_examples
datasets_utils.create_image_folder(tmpdir, cls, lambda idx: f"{cls}_{idx}.png", num_examples)
return dict(num_examples=num_examples_total, classes=classes)
@datasets_utils.test_all_configs
def test_classes(self, config):
with self.create_dataset(config) as (dataset, info):
self.assertSequenceEqual(dataset.classes, info["classes"])
class KittiTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Kitti
FEATURE_TYPES = (PIL.Image.Image, (list, type(None))) # test split returns None as target
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
def inject_fake_data(self, tmpdir, config):
kitti_dir = os.path.join(tmpdir, "Kitti", "raw")
os.makedirs(kitti_dir)
split_to_num_examples = {
True: 1,
False: 2,
}
# We need to create all folders(training and testing).
for is_training in (True, False):
num_examples = split_to_num_examples[is_training]
datasets_utils.create_image_folder(
root=kitti_dir,
name=os.path.join("training" if is_training else "testing", "image_2"),
file_name_fn=lambda image_idx: f"{image_idx:06d}.png",
num_examples=num_examples,
)
if is_training:
for image_idx in range(num_examples):
target_file_dir = os.path.join(kitti_dir, "training", "label_2")
os.makedirs(target_file_dir)
target_file_name = os.path.join(target_file_dir, f"{image_idx:06d}.txt")
target_contents = "Pedestrian 0.00 0 -0.20 712.40 143.00 810.73 307.92 1.89 0.48 1.20 1.84 1.47 8.41 0.01\n" # noqa
with open(target_file_name, "w") as target_file:
target_file.write(target_contents)
return split_to_num_examples[config["train"]]
if __name__ == "__main__":
unittest.main()
| 37.983936 | 136 | 0.634142 | import contextlib
import sys
import os
import unittest
from unittest import mock
import numpy as np
import PIL
from PIL import Image
from torch._utils_internal import get_file_path_2
import torchvision
from torchvision.datasets import utils
from common_utils import get_tmp_dir
from fakedata_generation import svhn_root, places365_root, widerface_root, stl10_root
import xml.etree.ElementTree as ET
from urllib.request import Request, urlopen
import itertools
import datasets_utils
import pathlib
import pickle
from torchvision import datasets
import torch
import shutil
import json
import random
import bz2
import torch.nn.functional as F
import string
import io
import zipfile
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
try:
import av
HAS_PYAV = True
except ImportError:
HAS_PYAV = False
class DatasetTestcase(unittest.TestCase):
def generic_classification_dataset_test(self, dataset, num_images=1):
self.assertEqual(len(dataset), num_images)
img, target = dataset[0]
self.assertTrue(isinstance(img, PIL.Image.Image))
self.assertTrue(isinstance(target, int))
def generic_segmentation_dataset_test(self, dataset, num_images=1):
self.assertEqual(len(dataset), num_images)
img, target = dataset[0]
self.assertTrue(isinstance(img, PIL.Image.Image))
self.assertTrue(isinstance(target, PIL.Image.Image))
class Tester(DatasetTestcase):
@mock.patch('torchvision.datasets.SVHN._check_integrity')
@unittest.skipIf(not HAS_SCIPY, "scipy unavailable")
def test_svhn(self, mock_check):
mock_check.return_value = True
with svhn_root() as root:
dataset = torchvision.datasets.SVHN(root, split="train")
self.generic_classification_dataset_test(dataset, num_images=2)
dataset = torchvision.datasets.SVHN(root, split="test")
self.generic_classification_dataset_test(dataset, num_images=2)
dataset = torchvision.datasets.SVHN(root, split="extra")
self.generic_classification_dataset_test(dataset, num_images=2)
def test_places365(self):
for split, small in itertools.product(("train-standard", "train-challenge", "val"), (False, True)):
with places365_root(split=split, small=small) as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, split=split, small=small, download=True)
self.generic_classification_dataset_test(dataset, num_images=len(data["imgs"]))
def test_places365_transforms(self):
expected_image = "image"
expected_target = "target"
def transform(image):
return expected_image
def target_transform(target):
return expected_target
with places365_root() as places365:
root, data = places365
dataset = torchvision.datasets.Places365(
root, transform=transform, target_transform=target_transform, download=True
)
actual_image, actual_target = dataset[0]
self.assertEqual(actual_image, expected_image)
self.assertEqual(actual_target, expected_target)
def test_places365_devkit_download(self):
for split in ("train-standard", "train-challenge", "val"):
with self.subTest(split=split):
with places365_root(split=split) as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, split=split, download=True)
with self.subTest("classes"):
self.assertSequenceEqual(dataset.classes, data["classes"])
with self.subTest("class_to_idx"):
self.assertDictEqual(dataset.class_to_idx, data["class_to_idx"])
with self.subTest("imgs"):
self.assertSequenceEqual(dataset.imgs, data["imgs"])
def test_places365_devkit_no_download(self):
for split in ("train-standard", "train-challenge", "val"):
with self.subTest(split=split):
with places365_root(split=split) as places365:
root, data = places365
with self.assertRaises(RuntimeError):
torchvision.datasets.Places365(root, split=split, download=False)
def test_places365_images_download(self):
for split, small in itertools.product(("train-standard", "train-challenge", "val"), (False, True)):
with self.subTest(split=split, small=small):
with places365_root(split=split, small=small) as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, split=split, small=small, download=True)
assert all(os.path.exists(item[0]) for item in dataset.imgs)
def test_places365_images_download_preexisting(self):
split = "train-standard"
small = False
images_dir = "data_large_standard"
with places365_root(split=split, small=small) as places365:
root, data = places365
os.mkdir(os.path.join(root, images_dir))
with self.assertRaises(RuntimeError):
torchvision.datasets.Places365(root, split=split, small=small, download=True)
def test_places365_repr_smoke(self):
with places365_root() as places365:
root, data = places365
dataset = torchvision.datasets.Places365(root, download=True)
self.assertIsInstance(repr(dataset), str)
class STL10Tester(DatasetTestcase):
@contextlib.contextmanager
def mocked_root(self):
with stl10_root() as (root, data):
yield root, data
@contextlib.contextmanager
def mocked_dataset(self, pre_extract=False, download=True, **kwargs):
with self.mocked_root() as (root, data):
if pre_extract:
utils.extract_archive(os.path.join(root, data["archive"]))
dataset = torchvision.datasets.STL10(root, download=download, **kwargs)
yield dataset, data
def test_not_found(self):
with self.assertRaises(RuntimeError):
with self.mocked_dataset(download=False):
pass
def test_splits(self):
for split in ('train', 'train+unlabeled', 'unlabeled', 'test'):
with self.mocked_dataset(split=split) as (dataset, data):
num_images = sum([data["num_images_in_split"][part] for part in split.split("+")])
self.generic_classification_dataset_test(dataset, num_images=num_images)
def test_folds(self):
for fold in range(10):
with self.mocked_dataset(split="train", folds=fold) as (dataset, data):
num_images = data["num_images_in_folds"][fold]
self.assertEqual(len(dataset), num_images)
def test_invalid_folds1(self):
with self.assertRaises(ValueError):
with self.mocked_dataset(folds=10):
pass
def test_invalid_folds2(self):
with self.assertRaises(ValueError):
with self.mocked_dataset(folds="0"):
pass
def test_transforms(self):
expected_image = "image"
expected_target = "target"
def transform(image):
return expected_image
def target_transform(target):
return expected_target
with self.mocked_dataset(transform=transform, target_transform=target_transform) as (dataset, _):
actual_image, actual_target = dataset[0]
self.assertEqual(actual_image, expected_image)
self.assertEqual(actual_target, expected_target)
def test_unlabeled(self):
with self.mocked_dataset(split="unlabeled") as (dataset, _):
labels = [dataset[idx][1] for idx in range(len(dataset))]
self.assertTrue(all([label == -1 for label in labels]))
@unittest.mock.patch("torchvision.datasets.stl10.download_and_extract_archive")
def test_download_preexisting(self, mock):
with self.mocked_dataset(pre_extract=True) as (dataset, data):
mock.assert_not_called()
def test_repr_smoke(self):
with self.mocked_dataset() as (dataset, _):
self.assertIsInstance(repr(dataset), str)
class Caltech101TestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Caltech101
FEATURE_TYPES = (PIL.Image.Image, (int, np.ndarray, tuple))
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
target_type=("category", "annotation", ["category", "annotation"])
)
REQUIRED_PACKAGES = ("scipy",)
def inject_fake_data(self, tmpdir, config):
root = pathlib.Path(tmpdir) / "caltech101"
images = root / "101_ObjectCategories"
annotations = root / "Annotations"
categories = (("Faces", "Faces_2"), ("helicopter", "helicopter"), ("ying_yang", "ying_yang"))
num_images_per_category = 2
for image_category, annotation_category in categories:
datasets_utils.create_image_folder(
root=images,
name=image_category,
file_name_fn=lambda idx: f"image_{idx + 1:04d}.jpg",
num_examples=num_images_per_category,
)
self._create_annotation_folder(
root=annotations,
name=annotation_category,
file_name_fn=lambda idx: f"annotation_{idx + 1:04d}.mat",
num_examples=num_images_per_category,
)
os.makedirs(images / "BACKGROUND_Google")
return num_images_per_category * len(categories)
def _create_annotation_folder(self, root, name, file_name_fn, num_examples):
root = pathlib.Path(root) / name
os.makedirs(root)
for idx in range(num_examples):
self._create_annotation_file(root, file_name_fn(idx))
def _create_annotation_file(self, root, name):
mdict = dict(obj_contour=torch.rand((2, torch.randint(3, 6, size=())), dtype=torch.float64).numpy())
datasets_utils.lazy_importer.scipy.io.savemat(str(pathlib.Path(root) / name), mdict)
def test_combined_targets(self):
target_types = ["category", "annotation"]
individual_targets = []
for target_type in target_types:
with self.create_dataset(target_type=target_type) as (dataset, _):
_, target = dataset[0]
individual_targets.append(target)
with self.create_dataset(target_type=target_types) as (dataset, _):
_, combined_targets = dataset[0]
actual = len(individual_targets)
expected = len(combined_targets)
self.assertEqual(
actual,
expected,
f"The number of the returned combined targets does not match the the number targets if requested "
f"individually: {actual} != {expected}",
)
for target_type, combined_target, individual_target in zip(target_types, combined_targets, individual_targets):
with self.subTest(target_type=target_type):
actual = type(combined_target)
expected = type(individual_target)
self.assertIs(
actual,
expected,
f"Type of the combined target does not match the type of the corresponding individual target: "
f"{actual} is not {expected}",
)
class Caltech256TestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Caltech256
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir) / "caltech256" / "256_ObjectCategories"
categories = ((1, "ak47"), (127, "laptop-101"), (257, "clutter"))
num_images_per_category = 2
for idx, category in categories:
datasets_utils.create_image_folder(
tmpdir,
name=f"{idx:03d}.{category}",
file_name_fn=lambda image_idx: f"{idx:03d}_{image_idx + 1:04d}.jpg",
num_examples=num_images_per_category,
)
return num_images_per_category * len(categories)
class WIDERFaceTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.WIDERFace
FEATURE_TYPES = (PIL.Image.Image, (dict, type(None)))
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(split=('train', 'val', 'test'))
def inject_fake_data(self, tmpdir, config):
widerface_dir = pathlib.Path(tmpdir) / 'widerface'
annotations_dir = widerface_dir / 'wider_face_split'
os.makedirs(annotations_dir)
split_to_idx = split_to_num_examples = {
"train": 1,
"val": 2,
"test": 3,
}
for split in ('train', 'val', 'test'):
split_idx = split_to_idx[split]
num_examples = split_to_num_examples[split]
datasets_utils.create_image_folder(
root=tmpdir,
name=widerface_dir / f'WIDER_{split}' / 'images' / '0--Parade',
file_name_fn=lambda image_idx: f"0_Parade_marchingband_1_{split_idx + image_idx}.jpg",
num_examples=num_examples,
)
annotation_file_name = {
'train': annotations_dir / 'wider_face_train_bbx_gt.txt',
'val': annotations_dir / 'wider_face_val_bbx_gt.txt',
'test': annotations_dir / 'wider_face_test_filelist.txt',
}[split]
annotation_content = {
"train": "".join(
f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n1\n449 330 122 149 0 0 0 0 0 0\n"
for image_idx in range(num_examples)
),
"val": "".join(
f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n1\n501 160 285 443 0 0 0 0 0 0\n"
for image_idx in range(num_examples)
),
"test": "".join(
f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n"
for image_idx in range(num_examples)
),
}[split]
with open(annotation_file_name, "w") as annotation_file:
annotation_file.write(annotation_content)
return split_to_num_examples[config["split"]]
class CityScapesTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Cityscapes
TARGET_TYPES = (
"instance",
"semantic",
"polygon",
"color",
)
ADDITIONAL_CONFIGS = (
*datasets_utils.combinations_grid(
mode=("fine",), split=("train", "test", "val"), target_type=TARGET_TYPES
),
*datasets_utils.combinations_grid(
mode=("coarse",),
split=("train", "train_extra", "val"),
target_type=TARGET_TYPES,
),
)
FEATURE_TYPES = (PIL.Image.Image, (dict, PIL.Image.Image))
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
mode_to_splits = {
"Coarse": ["train", "train_extra", "val"],
"Fine": ["train", "test", "val"],
}
if config["split"] == "train":
cities = ["bochum", "bremen"]
else:
cities = ["bochum"]
polygon_target = {
"imgHeight": 1024,
"imgWidth": 2048,
"objects": [
{
"label": "sky",
"polygon": [
[1241, 0],
[1234, 156],
[1478, 197],
[1611, 172],
[1606, 0],
],
},
{
"label": "road",
"polygon": [
[0, 448],
[1331, 274],
[1473, 265],
[2047, 605],
[2047, 1023],
[0, 1023],
],
},
],
}
for mode in ["Coarse", "Fine"]:
gt_dir = tmpdir / f"gt{mode}"
for split in mode_to_splits[mode]:
for city in cities:
def make_image(name, size=10):
datasets_utils.create_image_folder(
root=gt_dir / split,
name=city,
file_name_fn=lambda _: name,
size=size,
num_examples=1,
)
make_image(f"{city}_000000_000000_gt{mode}_instanceIds.png")
make_image(f"{city}_000000_000000_gt{mode}_labelIds.png")
make_image(f"{city}_000000_000000_gt{mode}_color.png", size=(4, 10, 10))
polygon_target_name = gt_dir / split / city / f"{city}_000000_000000_gt{mode}_polygons.json"
with open(polygon_target_name, "w") as outfile:
json.dump(polygon_target, outfile)
for split in ['test', 'train_extra', 'train', 'val']:
for city in cities:
datasets_utils.create_image_folder(
root=tmpdir / "leftImg8bit" / split,
name=city,
file_name_fn=lambda _: f"{city}_000000_000000_leftImg8bit.png",
num_examples=1,
)
info = {'num_examples': len(cities)}
if config['target_type'] == 'polygon':
info['expected_polygon_target'] = polygon_target
return info
def test_combined_targets(self):
target_types = ['semantic', 'polygon', 'color']
with self.create_dataset(target_type=target_types) as (dataset, _):
output = dataset[0]
self.assertTrue(isinstance(output, tuple))
self.assertTrue(len(output) == 2)
self.assertTrue(isinstance(output[0], PIL.Image.Image))
self.assertTrue(isinstance(output[1], tuple))
self.assertTrue(len(output[1]) == 3)
self.assertTrue(isinstance(output[1][0], PIL.Image.Image))
self.assertTrue(isinstance(output[1][1], dict))
self.assertTrue(isinstance(output[1][2], PIL.Image.Image))
def test_feature_types_target_color(self):
with self.create_dataset(target_type='color') as (dataset, _):
color_img, color_target = dataset[0]
self.assertTrue(isinstance(color_img, PIL.Image.Image))
self.assertTrue(np.array(color_target).shape[2] == 4)
def test_feature_types_target_polygon(self):
with self.create_dataset(target_type='polygon') as (dataset, info):
polygon_img, polygon_target = dataset[0]
self.assertTrue(isinstance(polygon_img, PIL.Image.Image))
self.assertEqual(polygon_target, info['expected_polygon_target'])
class ImageNetTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.ImageNet
REQUIRED_PACKAGES = ('scipy',)
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(split=('train', 'val'))
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
wnid = 'n01234567'
if config['split'] == 'train':
num_examples = 3
datasets_utils.create_image_folder(
root=tmpdir,
name=tmpdir / 'train' / wnid / wnid,
file_name_fn=lambda image_idx: f"{wnid}_{image_idx}.JPEG",
num_examples=num_examples,
)
else:
num_examples = 1
datasets_utils.create_image_folder(
root=tmpdir,
name=tmpdir / 'val' / wnid,
file_name_fn=lambda image_ifx: "ILSVRC2012_val_0000000{image_idx}.JPEG",
num_examples=num_examples,
)
wnid_to_classes = {wnid: [1]}
torch.save((wnid_to_classes, None), tmpdir / 'meta.bin')
return num_examples
class CIFAR10TestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.CIFAR10
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
_VERSION_CONFIG = dict(
base_folder="cifar-10-batches-py",
train_files=tuple(f"data_batch_{idx}" for idx in range(1, 6)),
test_files=("test_batch",),
labels_key="labels",
meta_file="batches.meta",
num_categories=10,
categories_key="label_names",
)
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir) / self._VERSION_CONFIG["base_folder"]
os.makedirs(tmpdir)
num_images_per_file = 1
for name in itertools.chain(self._VERSION_CONFIG["train_files"], self._VERSION_CONFIG["test_files"]):
self._create_batch_file(tmpdir, name, num_images_per_file)
categories = self._create_meta_file(tmpdir)
return dict(
num_examples=num_images_per_file
* len(self._VERSION_CONFIG["train_files"] if config["train"] else self._VERSION_CONFIG["test_files"]),
categories=categories,
)
def _create_batch_file(self, root, name, num_images):
data = datasets_utils.create_image_or_video_tensor((num_images, 32 * 32 * 3))
labels = np.random.randint(0, self._VERSION_CONFIG["num_categories"], size=num_images).tolist()
self._create_binary_file(root, name, {"data": data, self._VERSION_CONFIG["labels_key"]: labels})
def _create_meta_file(self, root):
categories = [
f"{idx:0{len(str(self._VERSION_CONFIG['num_categories'] - 1))}d}"
for idx in range(self._VERSION_CONFIG["num_categories"])
]
self._create_binary_file(
root, self._VERSION_CONFIG["meta_file"], {self._VERSION_CONFIG["categories_key"]: categories}
)
return categories
def _create_binary_file(self, root, name, content):
with open(pathlib.Path(root) / name, "wb") as fh:
pickle.dump(content, fh)
def test_class_to_idx(self):
with self.create_dataset() as (dataset, info):
expected = {category: label for label, category in enumerate(info["categories"])}
actual = dataset.class_to_idx
self.assertEqual(actual, expected)
class CIFAR100(CIFAR10TestCase):
DATASET_CLASS = datasets.CIFAR100
_VERSION_CONFIG = dict(
base_folder="cifar-100-python",
train_files=("train",),
test_files=("test",),
labels_key="fine_labels",
meta_file="meta",
num_categories=100,
categories_key="fine_label_names",
)
class CelebATestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.CelebA
FEATURE_TYPES = (PIL.Image.Image, (torch.Tensor, int, tuple, type(None)))
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
split=("train", "valid", "test", "all"),
target_type=("attr", "identity", "bbox", "landmarks", ["attr", "identity"]),
)
REQUIRED_PACKAGES = ("pandas",)
_SPLIT_TO_IDX = dict(train=0, valid=1, test=2)
def inject_fake_data(self, tmpdir, config):
base_folder = pathlib.Path(tmpdir) / "celeba"
os.makedirs(base_folder)
num_images, num_images_per_split = self._create_split_txt(base_folder)
datasets_utils.create_image_folder(
base_folder, "img_align_celeba", lambda idx: f"{idx + 1:06d}.jpg", num_images
)
attr_names = self._create_attr_txt(base_folder, num_images)
self._create_identity_txt(base_folder, num_images)
self._create_bbox_txt(base_folder, num_images)
self._create_landmarks_txt(base_folder, num_images)
return dict(num_examples=num_images_per_split[config["split"]], attr_names=attr_names)
def _create_split_txt(self, root):
num_images_per_split = dict(train=3, valid=2, test=1)
data = [
[self._SPLIT_TO_IDX[split]] for split, num_images in num_images_per_split.items() for _ in range(num_images)
]
self._create_txt(root, "list_eval_partition.txt", data)
num_images_per_split["all"] = num_images = sum(num_images_per_split.values())
return num_images, num_images_per_split
def _create_attr_txt(self, root, num_images):
header = ("5_o_Clock_Shadow", "Young")
data = torch.rand((num_images, len(header))).ge(0.5).int().mul(2).sub(1).tolist()
self._create_txt(root, "list_attr_celeba.txt", data, header=header, add_num_examples=True)
return header
def _create_identity_txt(self, root, num_images):
data = torch.randint(1, 4, size=(num_images, 1)).tolist()
self._create_txt(root, "identity_CelebA.txt", data)
def _create_bbox_txt(self, root, num_images):
header = ("x_1", "y_1", "width", "height")
data = torch.randint(10, size=(num_images, len(header))).tolist()
self._create_txt(
root, "list_bbox_celeba.txt", data, header=header, add_num_examples=True, add_image_id_to_header=True
)
def _create_landmarks_txt(self, root, num_images):
header = ("lefteye_x", "rightmouth_y")
data = torch.randint(10, size=(num_images, len(header))).tolist()
self._create_txt(root, "list_landmarks_align_celeba.txt", data, header=header, add_num_examples=True)
def _create_txt(self, root, name, data, header=None, add_num_examples=False, add_image_id_to_header=False):
with open(pathlib.Path(root) / name, "w") as fh:
if add_num_examples:
fh.write(f"{len(data)}\n")
if header:
if add_image_id_to_header:
header = ("image_id", *header)
fh.write(f"{' '.join(header)}\n")
for idx, line in enumerate(data, 1):
fh.write(f"{' '.join((f'{idx:06d}.jpg', *[str(value) for value in line]))}\n")
def test_combined_targets(self):
target_types = ["attr", "identity", "bbox", "landmarks"]
individual_targets = []
for target_type in target_types:
with self.create_dataset(target_type=target_type) as (dataset, _):
_, target = dataset[0]
individual_targets.append(target)
with self.create_dataset(target_type=target_types) as (dataset, _):
_, combined_targets = dataset[0]
actual = len(individual_targets)
expected = len(combined_targets)
self.assertEqual(
actual,
expected,
f"The number of the returned combined targets does not match the the number targets if requested "
f"individually: {actual} != {expected}",
)
for target_type, combined_target, individual_target in zip(target_types, combined_targets, individual_targets):
with self.subTest(target_type=target_type):
actual = type(combined_target)
expected = type(individual_target)
self.assertIs(
actual,
expected,
f"Type of the combined target does not match the type of the corresponding individual target: "
f"{actual} is not {expected}",
)
def test_no_target(self):
with self.create_dataset(target_type=[]) as (dataset, _):
_, target = dataset[0]
self.assertIsNone(target)
def test_attr_names(self):
with self.create_dataset() as (dataset, info):
self.assertEqual(tuple(dataset.attr_names), info["attr_names"])
class VOCSegmentationTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.VOCSegmentation
FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image)
ADDITIONAL_CONFIGS = (
*datasets_utils.combinations_grid(
year=[f"20{year:02d}" for year in range(7, 13)], image_set=("train", "val", "trainval")
),
dict(year="2007", image_set="test"),
dict(year="2007-test", image_set="test"),
)
def inject_fake_data(self, tmpdir, config):
year, is_test_set = (
("2007", True)
if config["year"] == "2007-test" or config["image_set"] == "test"
else (config["year"], False)
)
image_set = config["image_set"]
base_dir = pathlib.Path(tmpdir)
if year == "2011":
base_dir /= "TrainVal"
base_dir = base_dir / "VOCdevkit" / f"VOC{year}"
os.makedirs(base_dir)
num_images, num_images_per_image_set = self._create_image_set_files(base_dir, "ImageSets", is_test_set)
datasets_utils.create_image_folder(base_dir, "JPEGImages", lambda idx: f"{idx:06d}.jpg", num_images)
datasets_utils.create_image_folder(base_dir, "SegmentationClass", lambda idx: f"{idx:06d}.png", num_images)
annotation = self._create_annotation_files(base_dir, "Annotations", num_images)
return dict(num_examples=num_images_per_image_set[image_set], annotation=annotation)
def _create_image_set_files(self, root, name, is_test_set):
root = pathlib.Path(root) / name
src = pathlib.Path(root) / "Main"
os.makedirs(src, exist_ok=True)
idcs = dict(train=(0, 1, 2), val=(3, 4), test=(5,))
idcs["trainval"] = (*idcs["train"], *idcs["val"])
for image_set in ("test",) if is_test_set else ("train", "val", "trainval"):
self._create_image_set_file(src, image_set, idcs[image_set])
shutil.copytree(src, root / "Segmentation")
num_images = max(itertools.chain(*idcs.values())) + 1
num_images_per_image_set = dict([(image_set, len(idcs_)) for image_set, idcs_ in idcs.items()])
return num_images, num_images_per_image_set
def _create_image_set_file(self, root, image_set, idcs):
with open(pathlib.Path(root) / f"{image_set}.txt", "w") as fh:
fh.writelines([f"{idx:06d}\n" for idx in idcs])
def _create_annotation_files(self, root, name, num_images):
root = pathlib.Path(root) / name
os.makedirs(root)
for idx in range(num_images):
annotation = self._create_annotation_file(root, f"{idx:06d}.xml")
return annotation
def _create_annotation_file(self, root, name):
def add_child(parent, name, text=None):
child = ET.SubElement(parent, name)
child.text = text
return child
def add_name(obj, name="dog"):
add_child(obj, "name", name)
return name
def add_bndbox(obj, bndbox=None):
if bndbox is None:
bndbox = {"xmin": "1", "xmax": "2", "ymin": "3", "ymax": "4"}
obj = add_child(obj, "bndbox")
for name, text in bndbox.items():
add_child(obj, name, text)
return bndbox
annotation = ET.Element("annotation")
obj = add_child(annotation, "object")
data = dict(name=add_name(obj), bndbox=add_bndbox(obj))
with open(pathlib.Path(root) / name, "wb") as fh:
fh.write(ET.tostring(annotation))
return data
class VOCDetectionTestCase(VOCSegmentationTestCase):
DATASET_CLASS = datasets.VOCDetection
FEATURE_TYPES = (PIL.Image.Image, dict)
def test_annotations(self):
with self.create_dataset() as (dataset, info):
_, target = dataset[0]
self.assertIn("annotation", target)
annotation = target["annotation"]
self.assertIn("object", annotation)
objects = annotation["object"]
self.assertEqual(len(objects), 1)
object = objects[0]
self.assertEqual(object, info["annotation"])
class CocoDetectionTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.CocoDetection
FEATURE_TYPES = (PIL.Image.Image, list)
REQUIRED_PACKAGES = ("pycocotools",)
_IMAGE_FOLDER = "images"
_ANNOTATIONS_FOLDER = "annotations"
_ANNOTATIONS_FILE = "annotations.json"
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._IMAGE_FOLDER
annotation_file = tmpdir / self._ANNOTATIONS_FOLDER / self._ANNOTATIONS_FILE
return root, annotation_file
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
num_images = 3
num_annotations_per_image = 2
files = datasets_utils.create_image_folder(
tmpdir, name=self._IMAGE_FOLDER, file_name_fn=lambda idx: f"{idx:012d}.jpg", num_examples=num_images
)
file_names = [file.relative_to(tmpdir / self._IMAGE_FOLDER) for file in files]
annotation_folder = tmpdir / self._ANNOTATIONS_FOLDER
os.makedirs(annotation_folder)
info = self._create_annotation_file(
annotation_folder, self._ANNOTATIONS_FILE, file_names, num_annotations_per_image
)
info["num_examples"] = num_images
return info
def _create_annotation_file(self, root, name, file_names, num_annotations_per_image):
image_ids = [int(file_name.stem) for file_name in file_names]
images = [dict(file_name=str(file_name), id=id) for file_name, id in zip(file_names, image_ids)]
annotations, info = self._create_annotations(image_ids, num_annotations_per_image)
self._create_json(root, name, dict(images=images, annotations=annotations))
return info
def _create_annotations(self, image_ids, num_annotations_per_image):
annotations = datasets_utils.combinations_grid(
image_id=image_ids, bbox=([1.0, 2.0, 3.0, 4.0],) * num_annotations_per_image
)
for id, annotation in enumerate(annotations):
annotation["id"] = id
return annotations, dict()
def _create_json(self, root, name, content):
file = pathlib.Path(root) / name
with open(file, "w") as fh:
json.dump(content, fh)
return file
class CocoCaptionsTestCase(CocoDetectionTestCase):
DATASET_CLASS = datasets.CocoCaptions
def _create_annotations(self, image_ids, num_annotations_per_image):
captions = [str(idx) for idx in range(num_annotations_per_image)]
annotations = datasets_utils.combinations_grid(image_id=image_ids, caption=captions)
for id, annotation in enumerate(annotations):
annotation["id"] = id
return annotations, dict(captions=captions)
def test_captions(self):
with self.create_dataset() as (dataset, info):
_, captions = dataset[0]
self.assertEqual(tuple(captions), tuple(info["captions"]))
class UCF101TestCase(datasets_utils.VideoDatasetTestCase):
DATASET_CLASS = datasets.UCF101
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(fold=(1, 2, 3), train=(True, False))
_VIDEO_FOLDER = "videos"
_ANNOTATIONS_FOLDER = "annotations"
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._VIDEO_FOLDER
annotation_path = tmpdir / self._ANNOTATIONS_FOLDER
return root, annotation_path
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
video_folder = tmpdir / self._VIDEO_FOLDER
os.makedirs(video_folder)
video_files = self._create_videos(video_folder)
annotations_folder = tmpdir / self._ANNOTATIONS_FOLDER
os.makedirs(annotations_folder)
num_examples = self._create_annotation_files(annotations_folder, video_files, config["fold"], config["train"])
return num_examples
def _create_videos(self, root, num_examples_per_class=3):
def file_name_fn(cls, idx, clips_per_group=2):
return f"v_{cls}_g{(idx // clips_per_group) + 1:02d}_c{(idx % clips_per_group) + 1:02d}.avi"
video_files = [
datasets_utils.create_video_folder(root, cls, lambda idx: file_name_fn(cls, idx), num_examples_per_class)
for cls in ("ApplyEyeMakeup", "YoYo")
]
return [path.relative_to(root) for path in itertools.chain(*video_files)]
def _create_annotation_files(self, root, video_files, fold, train):
current_videos = random.sample(video_files, random.randrange(1, len(video_files) - 1))
current_annotation = self._annotation_file_name(fold, train)
self._create_annotation_file(root, current_annotation, current_videos)
other_videos = set(video_files) - set(current_videos)
other_annotations = [
self._annotation_file_name(fold, train) for fold, train in itertools.product((1, 2, 3), (True, False))
]
other_annotations.remove(current_annotation)
for name in other_annotations:
self._create_annotation_file(root, name, other_videos)
return len(current_videos)
def _annotation_file_name(self, fold, train):
return f"{'train' if train else 'test'}list{fold:02d}.txt"
def _create_annotation_file(self, root, name, video_files):
with open(pathlib.Path(root) / name, "w") as fh:
fh.writelines(f"{file}\n" for file in sorted(video_files))
class LSUNTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.LSUN
REQUIRED_PACKAGES = ("lmdb",)
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
classes=("train", "test", "val", ["bedroom_train", "church_outdoor_train"])
)
_CATEGORIES = (
"bedroom",
"bridge",
"church_outdoor",
"classroom",
"conference_room",
"dining_room",
"kitchen",
"living_room",
"restaurant",
"tower",
)
def inject_fake_data(self, tmpdir, config):
root = pathlib.Path(tmpdir)
num_images = 0
for cls in self._parse_classes(config["classes"]):
num_images += self._create_lmdb(root, cls)
return num_images
@contextlib.contextmanager
def create_dataset(
self,
*args, **kwargs
):
with super().create_dataset(*args, **kwargs) as output:
yield output
for file in os.listdir(os.getcwd()):
if file.startswith("_cache_"):
os.remove(file)
def _parse_classes(self, classes):
if not isinstance(classes, str):
return classes
split = classes
if split == "test":
return [split]
return [f"{category}_{split}" for category in self._CATEGORIES]
def _create_lmdb(self, root, cls):
lmdb = datasets_utils.lazy_importer.lmdb
hexdigits_lowercase = string.digits + string.ascii_lowercase[:6]
folder = f"{cls}_lmdb"
num_images = torch.randint(1, 4, size=()).item()
format = "png"
files = datasets_utils.create_image_folder(root, folder, lambda idx: f"{idx}.{format}", num_images)
with lmdb.open(str(root / folder)) as env, env.begin(write=True) as txn:
for file in files:
key = "".join(random.choice(hexdigits_lowercase) for _ in range(40)).encode()
buffer = io.BytesIO()
Image.open(file).save(buffer, format)
buffer.seek(0)
value = buffer.read()
txn.put(key, value)
os.remove(file)
return num_images
def test_not_found_or_corrupted(self):
with self.assertRaises(datasets_utils.lazy_importer.lmdb.Error):
super().test_not_found_or_corrupted()
class Kinetics400TestCase(datasets_utils.VideoDatasetTestCase):
DATASET_CLASS = datasets.Kinetics400
def inject_fake_data(self, tmpdir, config):
classes = ("Abseiling", "Zumba")
num_videos_per_class = 2
digits = string.ascii_letters + string.digits + "-_"
for cls in classes:
datasets_utils.create_video_folder(
tmpdir,
cls,
lambda _: f"{datasets_utils.create_random_string(11, digits)}.avi",
num_videos_per_class,
)
return num_videos_per_class * len(classes)
class HMDB51TestCase(datasets_utils.VideoDatasetTestCase):
DATASET_CLASS = datasets.HMDB51
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(fold=(1, 2, 3), train=(True, False))
_VIDEO_FOLDER = "videos"
_SPLITS_FOLDER = "splits"
_CLASSES = ("brush_hair", "wave")
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._VIDEO_FOLDER
annotation_path = tmpdir / self._SPLITS_FOLDER
return root, annotation_path
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
video_folder = tmpdir / self._VIDEO_FOLDER
os.makedirs(video_folder)
video_files = self._create_videos(video_folder)
splits_folder = tmpdir / self._SPLITS_FOLDER
os.makedirs(splits_folder)
num_examples = self._create_split_files(splits_folder, video_files, config["fold"], config["train"])
return num_examples
def _create_videos(self, root, num_examples_per_class=3):
def file_name_fn(cls, idx, clips_per_group=2):
return f"{cls}_{(idx // clips_per_group) + 1:d}_{(idx % clips_per_group) + 1:d}.avi"
return [
(
cls,
datasets_utils.create_video_folder(
root,
cls,
lambda idx: file_name_fn(cls, idx),
num_examples_per_class,
),
)
for cls in self._CLASSES
]
def _create_split_files(self, root, video_files, fold, train):
num_videos = num_train_videos = 0
for cls, videos in video_files:
num_videos += len(videos)
train_videos = set(random.sample(videos, random.randrange(1, len(videos) - 1)))
num_train_videos += len(train_videos)
with open(pathlib.Path(root) / f"{cls}_test_split{fold}.txt", "w") as fh:
fh.writelines(f"{file.name} {1 if file in train_videos else 2}\n" for file in videos)
return num_train_videos if train else (num_videos - num_train_videos)
class OmniglotTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Omniglot
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(background=(True, False))
def inject_fake_data(self, tmpdir, config):
target_folder = (
pathlib.Path(tmpdir) / "omniglot-py" / f"images_{'background' if config['background'] else 'evaluation'}"
)
os.makedirs(target_folder)
num_images = 0
for name in ("Alphabet_of_the_Magi", "Tifinagh"):
num_images += self._create_alphabet_folder(target_folder, name)
return num_images
def _create_alphabet_folder(self, root, name):
num_images_total = 0
for idx in range(torch.randint(1, 4, size=()).item()):
num_images = torch.randint(1, 4, size=()).item()
num_images_total += num_images
datasets_utils.create_image_folder(
root / name, f"character{idx:02d}", lambda image_idx: f"{image_idx:02d}.png", num_images
)
return num_images_total
class SBUTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SBU
FEATURE_TYPES = (PIL.Image.Image, str)
def inject_fake_data(self, tmpdir, config):
num_images = 3
dataset_folder = pathlib.Path(tmpdir) / "dataset"
images = datasets_utils.create_image_folder(tmpdir, "dataset", self._create_file_name, num_images)
self._create_urls_txt(dataset_folder, images)
self._create_captions_txt(dataset_folder, num_images)
return num_images
def _create_file_name(self, idx):
part1 = datasets_utils.create_random_string(10, string.digits)
part2 = datasets_utils.create_random_string(10, string.ascii_lowercase, string.digits[:6])
return f"{part1}_{part2}.jpg"
def _create_urls_txt(self, root, images):
with open(root / "SBU_captioned_photo_dataset_urls.txt", "w") as fh:
for image in images:
fh.write(
f"http://static.flickr.com/{datasets_utils.create_random_string(4, string.digits)}/{image.name}\n"
)
def _create_captions_txt(self, root, num_images):
with open(root / "SBU_captioned_photo_dataset_captions.txt", "w") as fh:
for _ in range(num_images):
fh.write(f"{datasets_utils.create_random_string(10)}\n")
class SEMEIONTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SEMEION
def inject_fake_data(self, tmpdir, config):
num_images = 3
images = torch.rand(num_images, 256)
labels = F.one_hot(torch.randint(10, size=(num_images,)))
with open(pathlib.Path(tmpdir) / "semeion.data", "w") as fh:
for image, one_hot_labels in zip(images, labels):
image_columns = " ".join([f"{pixel.item():.4f}" for pixel in image])
labels_columns = " ".join([str(label.item()) for label in one_hot_labels])
fh.write(f"{image_columns} {labels_columns}\n")
return num_images
class USPSTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.USPS
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
def inject_fake_data(self, tmpdir, config):
num_images = 2 if config["train"] else 1
images = torch.rand(num_images, 256) * 2 - 1
labels = torch.randint(1, 11, size=(num_images,))
with bz2.open(pathlib.Path(tmpdir) / f"usps{'.t' if not config['train'] else ''}.bz2", "w") as fh:
for image, label in zip(images, labels):
line = " ".join((str(label.item()), *[f"{idx}:{pixel:.6f}" for idx, pixel in enumerate(image, 1)]))
fh.write(f"{line}\n".encode())
return num_images
class SBDatasetTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.SBDataset
FEATURE_TYPES = (PIL.Image.Image, (np.ndarray, PIL.Image.Image))
REQUIRED_PACKAGES = ("scipy.io", "scipy.sparse")
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
image_set=("train", "val", "train_noval"), mode=("boundaries", "segmentation")
)
_NUM_CLASSES = 20
def inject_fake_data(self, tmpdir, config):
num_images, num_images_per_image_set = self._create_split_files(tmpdir)
sizes = self._create_target_folder(tmpdir, "cls", num_images)
datasets_utils.create_image_folder(
tmpdir, "img", lambda idx: f"{self._file_stem(idx)}.jpg", num_images, size=lambda idx: sizes[idx]
)
return num_images_per_image_set[config["image_set"]]
def _create_split_files(self, root):
root = pathlib.Path(root)
splits = dict(train=(0, 1, 2), train_noval=(0, 2), val=(3,))
for split, idcs in splits.items():
self._create_split_file(root, split, idcs)
num_images = max(itertools.chain(*splits.values())) + 1
num_images_per_split = dict([(split, len(idcs)) for split, idcs in splits.items()])
return num_images, num_images_per_split
def _create_split_file(self, root, name, idcs):
with open(root / f"{name}.txt", "w") as fh:
fh.writelines(f"{self._file_stem(idx)}\n" for idx in idcs)
def _create_target_folder(self, root, name, num_images):
io = datasets_utils.lazy_importer.scipy.io
target_folder = pathlib.Path(root) / name
os.makedirs(target_folder)
sizes = [torch.randint(1, 4, size=(2,)).tolist() for _ in range(num_images)]
for idx, size in enumerate(sizes):
content = dict(
GTcls=dict(Boundaries=self._create_boundaries(size), Segmentation=self._create_segmentation(size))
)
io.savemat(target_folder / f"{self._file_stem(idx)}.mat", content)
return sizes
def _create_boundaries(self, size):
sparse = datasets_utils.lazy_importer.scipy.sparse
return [
[sparse.csc_matrix(torch.randint(0, 2, size=size, dtype=torch.uint8).numpy())]
for _ in range(self._NUM_CLASSES)
]
def _create_segmentation(self, size):
return torch.randint(0, self._NUM_CLASSES + 1, size=size, dtype=torch.uint8).numpy()
def _file_stem(self, idx):
return f"2008_{idx:06d}"
class FakeDataTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.FakeData
FEATURE_TYPES = (PIL.Image.Image, int)
def dataset_args(self, tmpdir, config):
return ()
def inject_fake_data(self, tmpdir, config):
return config["size"]
def test_not_found_or_corrupted(self):
self.skipTest("The data is generated at creation and thus cannot be non-existent or corrupted.")
class PhotoTourTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.PhotoTour
FEATURE_TYPES = ()
_TRAIN_FEATURE_TYPES = (torch.Tensor,)
_TEST_FEATURE_TYPES = (torch.Tensor, torch.Tensor, torch.Tensor)
datasets_utils.combinations_grid(train=(True, False))
_NAME = "liberty"
def dataset_args(self, tmpdir, config):
return tmpdir, self._NAME
def inject_fake_data(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
num_patches = 5
image_files = self._create_images(tmpdir, self._NAME, num_patches)
point_ids, info_file = self._create_info_file(tmpdir / self._NAME, num_patches)
num_matches, matches_file = self._create_matches_file(tmpdir / self._NAME, num_patches, point_ids)
self._create_archive(tmpdir, self._NAME, *image_files, info_file, matches_file)
return num_patches if config["train"] else num_matches
def _create_images(self, root, name, num_images):
return datasets_utils.create_image_folder(
root, name, lambda idx: f"patches{idx:04d}.bmp", num_images, size=(1, 64, 64)
)
def _create_info_file(self, root, num_images):
point_ids = torch.randint(num_images, size=(num_images,)).tolist()
file = root / "info.txt"
with open(file, "w") as fh:
fh.writelines([f"{point_id} 0\n" for point_id in point_ids])
return point_ids, file
def _create_matches_file(self, root, num_patches, point_ids):
lines = [
f"{patch_id1} {point_ids[patch_id1]} 0 {patch_id2} {point_ids[patch_id2]} 0\n"
for patch_id1, patch_id2 in itertools.combinations(range(num_patches), 2)
]
file = root / "m50_100000_100000_0.txt"
with open(file, "w") as fh:
fh.writelines(lines)
return len(lines), file
def _create_archive(self, root, name, *files):
archive = root / f"{name}.zip"
with zipfile.ZipFile(archive, "w") as zip:
for file in files:
zip.write(file, arcname=file.relative_to(root))
return archive
@datasets_utils.test_all_configs
def test_feature_types(self, config):
feature_types = self.FEATURE_TYPES
self.FEATURE_TYPES = self._TRAIN_FEATURE_TYPES if config["train"] else self._TEST_FEATURE_TYPES
try:
super().test_feature_types.__wrapped__(self, config)
finally:
self.FEATURE_TYPES = feature_types
class Flickr8kTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Flickr8k
FEATURE_TYPES = (PIL.Image.Image, list)
_IMAGES_FOLDER = "images"
_ANNOTATIONS_FILE = "captions.html"
def dataset_args(self, tmpdir, config):
tmpdir = pathlib.Path(tmpdir)
root = tmpdir / self._IMAGES_FOLDER
ann_file = tmpdir / self._ANNOTATIONS_FILE
return str(root), str(ann_file)
def inject_fake_data(self, tmpdir, config):
num_images = 3
num_captions_per_image = 3
tmpdir = pathlib.Path(tmpdir)
images = self._create_images(tmpdir, self._IMAGES_FOLDER, num_images)
self._create_annotations_file(tmpdir, self._ANNOTATIONS_FILE, images, num_captions_per_image)
return dict(num_examples=num_images, captions=self._create_captions(num_captions_per_image))
def _create_images(self, root, name, num_images):
return datasets_utils.create_image_folder(root, name, self._image_file_name, num_images)
def _image_file_name(self, idx):
id = datasets_utils.create_random_string(10, string.digits)
checksum = datasets_utils.create_random_string(10, string.digits, string.ascii_lowercase[:6])
size = datasets_utils.create_random_string(1, "qwcko")
return f"{id}_{checksum}_{size}.jpg"
def _create_annotations_file(self, root, name, images, num_captions_per_image):
with open(root / name, "w") as fh:
fh.write("<table>")
for image in (None, *images):
self._add_image(fh, image, num_captions_per_image)
fh.write("</table>")
def _add_image(self, fh, image, num_captions_per_image):
fh.write("<tr>")
self._add_image_header(fh, image)
fh.write("</tr><tr><td><ul>")
self._add_image_captions(fh, num_captions_per_image)
fh.write("</ul></td></tr>")
def _add_image_header(self, fh, image=None):
if image:
url = f"http://www.flickr.com/photos/user/{image.name.split('_')[0]}/"
data = f'<a href="{url}">{url}</a>'
else:
data = "Image Not Found"
fh.write(f"<td>{data}</td>")
def _add_image_captions(self, fh, num_captions_per_image):
for caption in self._create_captions(num_captions_per_image):
fh.write(f"<li>{caption}")
def _create_captions(self, num_captions_per_image):
return [str(idx) for idx in range(num_captions_per_image)]
def test_captions(self):
with self.create_dataset() as (dataset, info):
_, captions = dataset[0]
self.assertSequenceEqual(captions, info["captions"])
class Flickr30kTestCase(Flickr8kTestCase):
DATASET_CLASS = datasets.Flickr30k
FEATURE_TYPES = (PIL.Image.Image, list)
_ANNOTATIONS_FILE = "captions.token"
def _image_file_name(self, idx):
return f"{idx}.jpg"
def _create_annotations_file(self, root, name, images, num_captions_per_image):
with open(root / name, "w") as fh:
for image, (idx, caption) in itertools.product(
images, enumerate(self._create_captions(num_captions_per_image))
):
fh.write(f"{image.name}#{idx}\t{caption}\n")
class MNISTTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.MNIST
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
_MAGIC_DTYPES = {
torch.uint8: 8,
torch.int8: 9,
torch.int16: 11,
torch.int32: 12,
torch.float32: 13,
torch.float64: 14,
}
_IMAGES_SIZE = (28, 28)
_IMAGES_DTYPE = torch.uint8
_LABELS_SIZE = ()
_LABELS_DTYPE = torch.uint8
def inject_fake_data(self, tmpdir, config):
raw_dir = pathlib.Path(tmpdir) / self.DATASET_CLASS.__name__ / "raw"
os.makedirs(raw_dir, exist_ok=True)
num_images = self._num_images(config)
self._create_binary_file(
raw_dir, self._images_file(config), (num_images, *self._IMAGES_SIZE), self._IMAGES_DTYPE
)
self._create_binary_file(
raw_dir, self._labels_file(config), (num_images, *self._LABELS_SIZE), self._LABELS_DTYPE
)
return num_images
def _num_images(self, config):
return 2 if config["train"] else 1
def _images_file(self, config):
return f"{self._prefix(config)}-images-idx3-ubyte"
def _labels_file(self, config):
return f"{self._prefix(config)}-labels-idx1-ubyte"
def _prefix(self, config):
return "train" if config["train"] else "t10k"
def _create_binary_file(self, root, filename, size, dtype):
with open(pathlib.Path(root) / filename, "wb") as fh:
for meta in (self._magic(dtype, len(size)), *size):
fh.write(self._encode(meta))
data = torch.randint(0, torch.iinfo(dtype).max + 1, size, dtype=dtype)
fh.write(data.numpy().tobytes())
def _magic(self, dtype, dims):
return self._MAGIC_DTYPES[dtype] * 256 + dims
def _encode(self, v):
return torch.tensor(v, dtype=torch.int32).numpy().tobytes()[::-1]
class FashionMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.FashionMNIST
class KMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.KMNIST
class EMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.EMNIST
DEFAULT_CONFIG = dict(split="byclass")
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(
split=("byclass", "bymerge", "balanced", "letters", "digits", "mnist"), train=(True, False)
)
def _prefix(self, config):
return f"emnist-{config['split']}-{'train' if config['train'] else 'test'}"
class QMNISTTestCase(MNISTTestCase):
DATASET_CLASS = datasets.QMNIST
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(what=("train", "test", "test10k", "nist"))
_LABELS_SIZE = (8,)
_LABELS_DTYPE = torch.int32
def _num_images(self, config):
if config["what"] == "nist":
return 3
elif config["what"] == "train":
return 2
elif config["what"] == "test50k":
return 10001
else:
return 1
def _labels_file(self, config):
return f"{self._prefix(config)}-labels-idx2-int"
def _prefix(self, config):
if config["what"] == "nist":
return "xnist"
if config["what"] is None:
what = "train" if config["train"] else "test"
elif config["what"].startswith("test"):
what = "test"
else:
what = config["what"]
return f"qmnist-{what}"
def test_num_examples_test50k(self):
with self.create_dataset(what="test50k") as (dataset, info):
self.assertEqual(len(dataset), info["num_examples"] - 10000)
class DatasetFolderTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.DatasetFolder
FEATURE_TYPES = (str, int)
_IMAGE_EXTENSIONS = ("jpg", "png")
_VIDEO_EXTENSIONS = ("avi", "mp4")
_EXTENSIONS = (*_IMAGE_EXTENSIONS, *_VIDEO_EXTENSIONS)
DEFAULT_CONFIG = dict(extensions=_EXTENSIONS)
ADDITIONAL_CONFIGS = (
*datasets_utils.combinations_grid(extensions=[(ext,) for ext in _IMAGE_EXTENSIONS]),
dict(extensions=_IMAGE_EXTENSIONS),
*datasets_utils.combinations_grid(extensions=[(ext,) for ext in _VIDEO_EXTENSIONS]),
dict(extensions=_VIDEO_EXTENSIONS),
)
def dataset_args(self, tmpdir, config):
return tmpdir, lambda x: x
def inject_fake_data(self, tmpdir, config):
extensions = config["extensions"] or self._is_valid_file_to_extensions(config["is_valid_file"])
num_examples_total = 0
classes = []
for ext, cls in zip(self._EXTENSIONS, string.ascii_letters):
if ext not in extensions:
continue
create_example_folder = (
datasets_utils.create_image_folder
if ext in self._IMAGE_EXTENSIONS
else datasets_utils.create_video_folder
)
num_examples = torch.randint(1, 3, size=()).item()
create_example_folder(tmpdir, cls, lambda idx: self._file_name_fn(cls, ext, idx), num_examples)
num_examples_total += num_examples
classes.append(cls)
return dict(num_examples=num_examples_total, classes=classes)
def _file_name_fn(self, cls, ext, idx):
return f"{cls}_{idx}.{ext}"
def _is_valid_file_to_extensions(self, is_valid_file):
return {ext for ext in self._EXTENSIONS if is_valid_file(f"foo.{ext}")}
@datasets_utils.test_all_configs
def test_is_valid_file(self, config):
extensions = config.pop("extensions")
with self.create_dataset(
config, extensions=None, is_valid_file=lambda file: pathlib.Path(file).suffix[1:] in extensions
) as (dataset, info):
self.assertEqual(len(dataset), info["num_examples"])
@datasets_utils.test_all_configs
def test_classes(self, config):
with self.create_dataset(config) as (dataset, info):
self.assertSequenceEqual(dataset.classes, info["classes"])
class ImageFolderTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.ImageFolder
def inject_fake_data(self, tmpdir, config):
num_examples_total = 0
classes = ("a", "b")
for cls in classes:
num_examples = torch.randint(1, 3, size=()).item()
num_examples_total += num_examples
datasets_utils.create_image_folder(tmpdir, cls, lambda idx: f"{cls}_{idx}.png", num_examples)
return dict(num_examples=num_examples_total, classes=classes)
@datasets_utils.test_all_configs
def test_classes(self, config):
with self.create_dataset(config) as (dataset, info):
self.assertSequenceEqual(dataset.classes, info["classes"])
class KittiTestCase(datasets_utils.ImageDatasetTestCase):
DATASET_CLASS = datasets.Kitti
FEATURE_TYPES = (PIL.Image.Image, (list, type(None)))
ADDITIONAL_CONFIGS = datasets_utils.combinations_grid(train=(True, False))
def inject_fake_data(self, tmpdir, config):
kitti_dir = os.path.join(tmpdir, "Kitti", "raw")
os.makedirs(kitti_dir)
split_to_num_examples = {
True: 1,
False: 2,
}
for is_training in (True, False):
num_examples = split_to_num_examples[is_training]
datasets_utils.create_image_folder(
root=kitti_dir,
name=os.path.join("training" if is_training else "testing", "image_2"),
file_name_fn=lambda image_idx: f"{image_idx:06d}.png",
num_examples=num_examples,
)
if is_training:
for image_idx in range(num_examples):
target_file_dir = os.path.join(kitti_dir, "training", "label_2")
os.makedirs(target_file_dir)
target_file_name = os.path.join(target_file_dir, f"{image_idx:06d}.txt")
target_contents = "Pedestrian 0.00 0 -0.20 712.40 143.00 810.73 307.92 1.89 0.48 1.20 1.84 1.47 8.41 0.01\n"
with open(target_file_name, "w") as target_file:
target_file.write(target_contents)
return split_to_num_examples[config["train"]]
if __name__ == "__main__":
unittest.main()
| true | true |
1c45af2d6128c89098abeaec9ca933517547a304 | 2,864 | py | Python | tests/functional/test_email_address.py | AutumnalDream/tartiflette-plugin-scalars | 2c73b20eac93b364a97b2192956e5fd4034ec35a | [
"MIT"
] | 8 | 2019-10-02T12:47:15.000Z | 2021-12-15T14:29:37.000Z | tests/functional/test_email_address.py | AutumnalDream/tartiflette-plugin-scalars | 2c73b20eac93b364a97b2192956e5fd4034ec35a | [
"MIT"
] | 109 | 2019-09-19T13:37:43.000Z | 2022-03-28T07:08:50.000Z | tests/functional/test_email_address.py | AutumnalDream/tartiflette-plugin-scalars | 2c73b20eac93b364a97b2192956e5fd4034ec35a | [
"MIT"
] | 4 | 2019-10-26T19:57:20.000Z | 2021-06-24T14:32:37.000Z | import pytest
from tartiflette import Resolver, create_engine
@pytest.mark.asyncio
async def test_email_address_ok():
@Resolver("Query.email", schema_name="test_email_address_ok")
async def email_resolver(*_args, **_kwargs):
return "alice.girardguittard@dm.com"
sdl = """
type Query {
email: EmailAddress
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_ok",
)
assert await engine.execute("query email { email }") == {
"data": {"email": "alice.girardguittard@dm.com"}
}
@pytest.mark.asyncio
async def test_email_address_nok():
@Resolver("Query.email", schema_name="test_email_address_nok")
async def email_resolver(*_args, **_kwargs):
return "nope"
sdl = """
type Query {
email: EmailAddress
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_nok",
)
result = await engine.execute("query email { email }")
assert result["data"]["email"] is None
assert len(result["errors"]) == 1
assert (
result["errors"][0]["message"]
== "Value is not a valid email address: < nope >"
)
@pytest.mark.asyncio
async def test_email_address_mutation_ok():
@Resolver("Mutation.email", schema_name="test_email_address_mutation_ok")
async def email_resolver(*_args, **_kwargs):
return True
sdl = """
type Query {
email: EmailAddress
}
type Mutation {
email(input: EmailAddress): Boolean
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_mutation_ok",
)
assert await engine.execute(
'mutation email { email(input:"alice.girardguittard@dailymotion.com") }'
) == {"data": {"email": True}}
@pytest.mark.asyncio
async def test_email_address_mutation_nok():
@Resolver("Mutation.email", schema_name="test_email_address_mutation_nok")
async def email_resolver(*_args, **_kwargs):
return True
sdl = """
type Query {
email: EmailAddress
}
type Mutation {
email(input: EmailAddress): Boolean
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_mutation_nok",
)
result = await engine.execute('mutation email { email(input:"nok") }')
assert result["data"] is None
assert len(result["errors"]) == 1
assert (
result["errors"][0]["message"]
== "Value nok is not of correct type EmailAddress"
)
| 25.571429 | 80 | 0.623953 | import pytest
from tartiflette import Resolver, create_engine
@pytest.mark.asyncio
async def test_email_address_ok():
@Resolver("Query.email", schema_name="test_email_address_ok")
async def email_resolver(*_args, **_kwargs):
return "alice.girardguittard@dm.com"
sdl = """
type Query {
email: EmailAddress
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_ok",
)
assert await engine.execute("query email { email }") == {
"data": {"email": "alice.girardguittard@dm.com"}
}
@pytest.mark.asyncio
async def test_email_address_nok():
@Resolver("Query.email", schema_name="test_email_address_nok")
async def email_resolver(*_args, **_kwargs):
return "nope"
sdl = """
type Query {
email: EmailAddress
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_nok",
)
result = await engine.execute("query email { email }")
assert result["data"]["email"] is None
assert len(result["errors"]) == 1
assert (
result["errors"][0]["message"]
== "Value is not a valid email address: < nope >"
)
@pytest.mark.asyncio
async def test_email_address_mutation_ok():
@Resolver("Mutation.email", schema_name="test_email_address_mutation_ok")
async def email_resolver(*_args, **_kwargs):
return True
sdl = """
type Query {
email: EmailAddress
}
type Mutation {
email(input: EmailAddress): Boolean
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_mutation_ok",
)
assert await engine.execute(
'mutation email { email(input:"alice.girardguittard@dailymotion.com") }'
) == {"data": {"email": True}}
@pytest.mark.asyncio
async def test_email_address_mutation_nok():
@Resolver("Mutation.email", schema_name="test_email_address_mutation_nok")
async def email_resolver(*_args, **_kwargs):
return True
sdl = """
type Query {
email: EmailAddress
}
type Mutation {
email(input: EmailAddress): Boolean
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_email_address_mutation_nok",
)
result = await engine.execute('mutation email { email(input:"nok") }')
assert result["data"] is None
assert len(result["errors"]) == 1
assert (
result["errors"][0]["message"]
== "Value nok is not of correct type EmailAddress"
)
| true | true |
1c45af5f2860e383958cbd656df2e212b922f313 | 3,327 | py | Python | tests/parse/test_parse_reference.py | wbknez/breakdb | f783820425c8cb70d8caedc6f5839a72de7c945e | [
"Apache-2.0"
] | 1 | 2020-02-03T18:31:20.000Z | 2020-02-03T18:31:20.000Z | tests/parse/test_parse_reference.py | wbknez/breakdb | f783820425c8cb70d8caedc6f5839a72de7c945e | [
"Apache-2.0"
] | null | null | null | tests/parse/test_parse_reference.py | wbknez/breakdb | f783820425c8cb70d8caedc6f5839a72de7c945e | [
"Apache-2.0"
] | null | null | null | """
Contains unit tests to ensure that all functions involved in parsing DICOM
references work as intended.
"""
import pytest
from breakdb.parse import has_reference, parse_reference
from breakdb.tag import ReferenceTag, get_tag_at, MalformedSequence, \
MissingSequence, MissingTag
from tests.helpers.tag import match
class TestParseReference:
"""
Test suite for :function: 'has_reference' and :function: 'parse_reference'.
"""
def test_has_reference_is_false_when_reference_is_missing(self,
create_dataset):
ds = create_dataset(excludes=[ReferenceTag.SEQUENCE])
assert not has_reference(ds)
def test_has_reference_is_false_when_no_references_exist(self,
create_dataset):
ds = create_dataset()
del ds[ReferenceTag.SEQUENCE.value].value[0]
assert not has_reference(ds)
def test_has_reference_succeeds(self, create_dataset):
ds = create_dataset()
assert has_reference(ds)
def test_parse_reference_succeeds(self, create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
obj = get_tag_at(seq, 0, ReferenceTag.OBJECT)
parsed = parse_reference(ds)
match(obj, parsed[ReferenceTag.SEQUENCE.value], ReferenceTag.SOP_CLASS)
match(obj, parsed[ReferenceTag.SEQUENCE.value], ReferenceTag.SOP_INSTANCE)
match(seq, parsed[ReferenceTag.SEQUENCE.value], ReferenceTag.SERIES)
def test_parse_reference_throws_when_sequence_is_missing(self,
create_dataset):
ds = create_dataset()
del ds[ReferenceTag.SEQUENCE.value]
with pytest.raises(MissingSequence):
parse_reference(ds)
def test_parse_reference_throws_when_object_is_missing(self,
create_dataset):
ds = create_dataset()
del ds[ReferenceTag.SEQUENCE.value].value[0]
with pytest.raises(MalformedSequence):
parse_reference(ds)
def test_parse_reference_throws_when_class_is_missing(self,
create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
obj = get_tag_at(seq, 0, ReferenceTag.OBJECT)
del obj[ReferenceTag.SOP_CLASS.value]
with pytest.raises(MissingTag):
parse_reference(ds)
def test_parse_reference_throws_when_instance_is_missing(self,
create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
obj = get_tag_at(seq, 0, ReferenceTag.OBJECT)
del obj[ReferenceTag.SOP_INSTANCE.value]
with pytest.raises(MissingTag):
parse_reference(ds)
def test_parse_reference_throws_when_series_is_missing(self,
create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
del seq[ReferenceTag.SERIES.value]
with pytest.raises(MissingTag):
parse_reference(ds)
| 32.940594 | 82 | 0.62098 | import pytest
from breakdb.parse import has_reference, parse_reference
from breakdb.tag import ReferenceTag, get_tag_at, MalformedSequence, \
MissingSequence, MissingTag
from tests.helpers.tag import match
class TestParseReference:
def test_has_reference_is_false_when_reference_is_missing(self,
create_dataset):
ds = create_dataset(excludes=[ReferenceTag.SEQUENCE])
assert not has_reference(ds)
def test_has_reference_is_false_when_no_references_exist(self,
create_dataset):
ds = create_dataset()
del ds[ReferenceTag.SEQUENCE.value].value[0]
assert not has_reference(ds)
def test_has_reference_succeeds(self, create_dataset):
ds = create_dataset()
assert has_reference(ds)
def test_parse_reference_succeeds(self, create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
obj = get_tag_at(seq, 0, ReferenceTag.OBJECT)
parsed = parse_reference(ds)
match(obj, parsed[ReferenceTag.SEQUENCE.value], ReferenceTag.SOP_CLASS)
match(obj, parsed[ReferenceTag.SEQUENCE.value], ReferenceTag.SOP_INSTANCE)
match(seq, parsed[ReferenceTag.SEQUENCE.value], ReferenceTag.SERIES)
def test_parse_reference_throws_when_sequence_is_missing(self,
create_dataset):
ds = create_dataset()
del ds[ReferenceTag.SEQUENCE.value]
with pytest.raises(MissingSequence):
parse_reference(ds)
def test_parse_reference_throws_when_object_is_missing(self,
create_dataset):
ds = create_dataset()
del ds[ReferenceTag.SEQUENCE.value].value[0]
with pytest.raises(MalformedSequence):
parse_reference(ds)
def test_parse_reference_throws_when_class_is_missing(self,
create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
obj = get_tag_at(seq, 0, ReferenceTag.OBJECT)
del obj[ReferenceTag.SOP_CLASS.value]
with pytest.raises(MissingTag):
parse_reference(ds)
def test_parse_reference_throws_when_instance_is_missing(self,
create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
obj = get_tag_at(seq, 0, ReferenceTag.OBJECT)
del obj[ReferenceTag.SOP_INSTANCE.value]
with pytest.raises(MissingTag):
parse_reference(ds)
def test_parse_reference_throws_when_series_is_missing(self,
create_dataset):
ds = create_dataset()
seq = get_tag_at(ds, 0, ReferenceTag.SEQUENCE)
del seq[ReferenceTag.SERIES.value]
with pytest.raises(MissingTag):
parse_reference(ds)
| true | true |
1c45b05c5d250ea77c37d28b3bab75d2b9cf9824 | 143,725 | py | Python | corehq/apps/accounting/models.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/accounting/models.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/accounting/models.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
] | null | null | null | import datetime
import itertools
from decimal import Decimal
from io import BytesIO
from tempfile import NamedTemporaryFile
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models, transaction
from django.db.models import F, Q
from django.db.models.manager import Manager
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.utils.translation import ugettext_lazy as _
import jsonfield
import stripe
from django_prbac.models import Role
from memoized import memoized
from corehq.apps.domain.shortcuts import publish_domain_saved
from dimagi.ext.couchdbkit import (
BooleanProperty,
DateTimeProperty,
SafeSaveDocument,
StringProperty,
)
from dimagi.utils.web import get_site_domain
from corehq.apps.accounting.emails import send_subscription_change_alert
from corehq.apps.accounting.exceptions import (
AccountingError,
CreditLineError,
InvoiceEmailThrottledError,
NewSubscriptionError,
ProductPlanNotFoundError,
SubscriptionAdjustmentError,
SubscriptionChangeError,
SubscriptionReminderError,
SubscriptionRenewalError,
)
from corehq.apps.accounting.invoice_pdf import InvoiceTemplate
from corehq.apps.accounting.signals import subscription_upgrade_or_downgrade
from corehq.apps.accounting.subscription_changes import (
DomainDowngradeActionHandler,
DomainUpgradeActionHandler,
)
from corehq.apps.accounting.utils import (
EXCHANGE_RATE_DECIMAL_PLACES,
ensure_domain_instance,
fmt_dollar_amount,
get_account_name_from_default_name,
get_address_from_invoice,
get_change_status,
get_dimagi_from_email,
get_privileges,
is_active_subscription,
log_accounting_error,
log_accounting_info,
quantize_accounting_decimal,
)
from corehq.apps.domain import UNKNOWN_DOMAIN
from corehq.apps.domain.models import Domain
from corehq.apps.hqwebapp.tasks import send_html_email_async
from corehq.apps.users.models import WebUser
from corehq.blobs.mixin import CODES, BlobMixin
from corehq.const import USER_DATE_FORMAT
from corehq.privileges import REPORT_BUILDER_ADD_ON_PRIVS
from corehq.util.dates import get_first_last_days
from corehq.util.mixin import ValidateModelMixin
from corehq.util.quickcache import quickcache
from corehq.util.soft_assert import soft_assert
from corehq.util.view_utils import absolute_reverse
integer_field_validators = [MaxValueValidator(2147483647), MinValueValidator(-2147483648)]
MAX_INVOICE_COMMUNICATIONS = 5
SMALL_INVOICE_THRESHOLD = 100
UNLIMITED_FEATURE_USAGE = -1
MINIMUM_SUBSCRIPTION_LENGTH = 30
_soft_assert_contact_emails_missing = soft_assert(
to=['{}@{}'.format(email, 'dimagi.com') for email in [
'accounts',
'billing-dev',
]],
exponential_backoff=False,
)
class BillingAccountType(object):
CONTRACT = "CONTRACT"
USER_CREATED = "USER_CREATED"
GLOBAL_SERVICES = "GLOBAL_SERVICES"
INVOICE_GENERATED = "INVOICE_GENERATED"
TRIAL = "TRIAL"
CHOICES = (
(CONTRACT, "Created by contract"),
(USER_CREATED, "Created by user"),
(GLOBAL_SERVICES, "Created by Global Services"),
(INVOICE_GENERATED, "Generated by an invoice"),
(TRIAL, "Is trial account"),
)
class InvoicingPlan(object):
MONTHLY = "MONTHLY"
QUARTERLY = "QUARTERLY"
YEARLY = "YEARLY"
CHOICES = (
(MONTHLY, "Monthly"),
(QUARTERLY, "Quarterly"),
(YEARLY, "Yearly")
)
class FeatureType(object):
USER = "User"
SMS = "SMS"
CHOICES = (
(USER, USER),
(SMS, SMS),
)
class SoftwarePlanEdition(object):
COMMUNITY = "Community"
STANDARD = "Standard"
PRO = "Pro"
ADVANCED = "Advanced"
ENTERPRISE = "Enterprise"
RESELLER = "Reseller"
MANAGED_HOSTING = "Managed Hosting"
PAUSED = "Paused"
CHOICES = (
(COMMUNITY, COMMUNITY),
(STANDARD, STANDARD),
(PRO, PRO),
(ADVANCED, ADVANCED),
(ENTERPRISE, ENTERPRISE),
(PAUSED, PAUSED),
(RESELLER, RESELLER),
(MANAGED_HOSTING, MANAGED_HOSTING),
)
SELF_SERVICE_ORDER = [
PAUSED,
COMMUNITY,
STANDARD,
PRO,
ADVANCED,
]
class SoftwarePlanVisibility(object):
PUBLIC = "PUBLIC"
INTERNAL = "INTERNAL"
TRIAL = "TRIAL"
CHOICES = (
(PUBLIC, "Anyone can subscribe"),
(INTERNAL, "Dimagi must create subscription"),
(TRIAL, "This is a Trial Plan"),
)
class CreditAdjustmentReason(object):
DIRECT_PAYMENT = "DIRECT_PAYMENT"
SALESFORCE = "SALESFORCE"
INVOICE = "INVOICE"
LINE_ITEM = "LINE_ITEM"
TRANSFER = "TRANSFER"
MANUAL = "MANUAL"
CHOICES = (
(MANUAL, "manual"),
(SALESFORCE, "via Salesforce"),
(INVOICE, "invoice generated"),
(LINE_ITEM, "line item generated"),
(TRANSFER, "transfer from another credit line"),
(DIRECT_PAYMENT, "payment from client received"),
)
class SubscriptionAdjustmentReason(object):
CREATE = "CREATE"
MODIFY = "MODIFY"
CANCEL = "CANCEL"
UPGRADE = "UPGRADE"
DOWNGRADE = "DOWNGRADE"
SWITCH = "SWITCH"
REACTIVATE = "REACTIVATE"
RENEW = "RENEW"
CHOICES = (
(CREATE, "A new subscription created from scratch."),
(MODIFY, "Some part of the subscription was modified...likely a date."),
(CANCEL, "The subscription was cancelled with no followup subscription."),
(UPGRADE, "The subscription was upgraded to the related subscription."),
(DOWNGRADE, "The subscription was downgraded to the related subscription."),
(SWITCH, "The plan was changed to the related subscription and "
"was neither an upgrade or downgrade."),
(REACTIVATE, "The subscription was reactivated."),
(RENEW, "The subscription was renewed."),
)
class SubscriptionAdjustmentMethod(object):
USER = "USER"
INTERNAL = "INTERNAL"
TASK = "TASK"
TRIAL = "TRIAL"
AUTOMATIC_DOWNGRADE = 'AUTOMATIC_DOWNGRADE'
DEFAULT_COMMUNITY = 'DEFAULT_COMMUNITY'
INVOICING = 'INVOICING'
CHOICES = (
(USER, "User"),
(INTERNAL, "Ops"),
(TASK, "[Deprecated] Task (Invoicing)"),
(TRIAL, "30 Day Trial"),
(AUTOMATIC_DOWNGRADE, "Automatic Downgrade"),
(DEFAULT_COMMUNITY, 'Default to Community'),
(INVOICING, 'Invoicing')
)
class PaymentMethodType(object):
STRIPE = "Stripe"
CHOICES = (
(STRIPE, STRIPE),
)
class SubscriptionType(object):
IMPLEMENTATION = "IMPLEMENTATION"
PRODUCT = "PRODUCT"
TRIAL = "TRIAL"
EXTENDED_TRIAL = "EXTENDED_TRIAL"
SANDBOX = "SANDBOX"
INTERNAL = "INTERNAL"
NOT_SET = "NOT_SET"
CHOICES = (
(IMPLEMENTATION, "Implementation"),
(PRODUCT, "Product"),
(TRIAL, "Trial"),
(EXTENDED_TRIAL, "Extended Trial"),
(SANDBOX, "Sandbox"),
(INTERNAL, "Internal"),
)
class ProBonoStatus(object):
YES = "PRO_BONO"
NO = "FULL_PRICE"
DISCOUNTED = "DISCOUNTED"
CHOICES = (
(NO, "Full Price"),
(DISCOUNTED, "Discounted"),
(YES, "Pro Bono"),
)
class FundingSource(object):
DIMAGI = "DIMAGI"
CLIENT = "CLIENT"
EXTERNAL = "EXTERNAL"
CHOICES = (
(DIMAGI, "Dimagi"),
(CLIENT, "Client Funding"),
(EXTERNAL, "External Funding"),
)
class EntryPoint(object):
CONTRACTED = "CONTRACTED"
SELF_STARTED = "SELF_STARTED"
NOT_SET = "NOT_SET"
CHOICES = (
(CONTRACTED, "Contracted"),
(SELF_STARTED, "Self-started"),
(NOT_SET, "Not Set"),
)
class LastPayment(object):
CC_ONE_TIME = "CC_ONE_TIME"
CC_AUTO = "CC_AUTO"
WIRE = "WIRE"
ACH = "ACH"
OTHER = "OTHER"
BU_PAYMENT = "BU_PAYMENT"
NONE = "NONE"
CHOICES = (
(CC_ONE_TIME, "Credit Card - One Time"),
(CC_AUTO, "Credit Card - Autopay"),
(WIRE, "Wire"),
(ACH, "ACH"),
(OTHER, "Other"),
(BU_PAYMENT, "Payment to local BU"),
(NONE, "None"),
)
class PreOrPostPay(object):
PREPAY = "PREPAY"
POSTPAY = "POSTPAY"
NOT_SET = "NOT_SET"
CHOICES = (
(PREPAY, "Prepay"),
(POSTPAY, "Postpay"),
(NOT_SET, "Not Set"),
)
class Currency(models.Model):
"""
Keeps track of the current conversion rates so that we don't have to poll the free, but rate limited API
from Open Exchange Rates. Necessary for billing things like MACH SMS.
"""
code = models.CharField(max_length=3, unique=True)
name = models.CharField(max_length=25, db_index=True)
symbol = models.CharField(max_length=10)
rate_to_default = models.DecimalField(
default=Decimal('1.0'), max_digits=20,
decimal_places=EXCHANGE_RATE_DECIMAL_PLACES,
)
date_updated = models.DateField(auto_now=True)
class Meta(object):
app_label = 'accounting'
@classmethod
def get_default(cls):
default, _ = cls.objects.get_or_create(code=settings.DEFAULT_CURRENCY)
return default
DEFAULT_ACCOUNT_FORMAT = 'Account for Project %s'
class BillingAccount(ValidateModelMixin, models.Model):
"""
The key model that links a Subscription to its financial source and methods of payment.
"""
name = models.CharField(max_length=200, db_index=True, unique=True)
salesforce_account_id = models.CharField(
db_index=True,
max_length=80,
blank=True,
null=True,
help_text="This is how we link to the salesforce account",
)
created_by = models.CharField(max_length=80, blank=True)
created_by_domain = models.CharField(max_length=256, null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True)
dimagi_contact = models.EmailField(blank=True)
currency = models.ForeignKey(Currency, on_delete=models.PROTECT)
is_auto_invoiceable = models.BooleanField(default=False)
date_confirmed_extra_charges = models.DateTimeField(null=True, blank=True)
account_type = models.CharField(
max_length=25,
default=BillingAccountType.CONTRACT,
choices=BillingAccountType.CHOICES,
)
is_active = models.BooleanField(default=True)
is_customer_billing_account = models.BooleanField(default=False, db_index=True)
enterprise_admin_emails = ArrayField(models.EmailField(), default=list, blank=True)
enterprise_restricted_signup_domains = ArrayField(models.CharField(max_length=128), default=list, blank=True)
invoicing_plan = models.CharField(
max_length=25,
default=InvoicingPlan.MONTHLY,
choices=InvoicingPlan.CHOICES
)
entry_point = models.CharField(
max_length=25,
default=EntryPoint.NOT_SET,
choices=EntryPoint.CHOICES,
)
auto_pay_user = models.CharField(max_length=80, null=True, blank=True)
last_modified = models.DateTimeField(auto_now=True)
last_payment_method = models.CharField(
max_length=25,
default=LastPayment.NONE,
choices=LastPayment.CHOICES,
)
pre_or_post_pay = models.CharField(
max_length=25,
default=PreOrPostPay.NOT_SET,
choices=PreOrPostPay.CHOICES,
)
# Settings visible to external users
restrict_domain_creation = models.BooleanField(default=False)
restrict_signup = models.BooleanField(default=False, db_index=True)
restrict_signup_message = models.CharField(max_length=512, null=True, blank=True)
class Meta(object):
app_label = 'accounting'
@property
def auto_pay_enabled(self):
return self.auto_pay_user is not None
@classmethod
def create_account_for_domain(cls, domain,
created_by=None, account_type=None,
entry_point=None, last_payment_method=None,
pre_or_post_pay=None):
account_type = account_type or BillingAccountType.INVOICE_GENERATED
entry_point = entry_point or EntryPoint.NOT_SET
last_payment_method = last_payment_method or LastPayment.NONE
pre_or_post_pay = pre_or_post_pay or PreOrPostPay.POSTPAY
default_name = DEFAULT_ACCOUNT_FORMAT % domain
name = get_account_name_from_default_name(default_name)
return BillingAccount.objects.create(
name=name,
created_by=created_by,
created_by_domain=domain,
currency=Currency.get_default(),
account_type=account_type,
entry_point=entry_point,
last_payment_method=last_payment_method,
pre_or_post_pay=pre_or_post_pay
)
@classmethod
def get_or_create_account_by_domain(cls, domain,
created_by=None, account_type=None,
entry_point=None, last_payment_method=None,
pre_or_post_pay=None):
"""
First try to grab the account used for the last subscription.
If an account is not found, create it.
"""
account = cls.get_account_by_domain(domain)
if account:
return account, False
return cls.create_account_for_domain(
domain,
created_by=created_by,
account_type=account_type,
entry_point=entry_point,
last_payment_method=last_payment_method,
pre_or_post_pay=pre_or_post_pay,
), True
@classmethod
def get_account_by_domain(cls, domain):
current_subscription = Subscription.get_active_subscription_by_domain(domain)
if current_subscription is not None:
return current_subscription.account
else:
return cls._get_account_by_created_by_domain(domain)
@classmethod
def _get_account_by_created_by_domain(cls, domain):
try:
return cls.objects.get(created_by_domain=domain)
except cls.DoesNotExist:
return None
except cls.MultipleObjectsReturned:
log_accounting_error(
f"Multiple billing accounts showed up for the domain '{domain}'. The "
"latest one was served, but you should reconcile very soon.",
show_stack_trace=True,
)
return cls.objects.filter(created_by_domain=domain).latest('date_created')
return None
@classmethod
@quickcache([], timeout=60 * 60)
def get_enterprise_restricted_signup_accounts(cls):
return BillingAccount.objects.filter(is_customer_billing_account=True, restrict_signup=True)
@property
def autopay_card(self):
if not self.auto_pay_enabled:
return None
return StripePaymentMethod.objects.get(web_user=self.auto_pay_user).get_autopay_card(self)
def has_enterprise_admin(self, email):
return self.is_customer_billing_account and email in self.enterprise_admin_emails
def update_autopay_user(self, new_user, domain):
if self.auto_pay_enabled and new_user != self.auto_pay_user:
self._send_autopay_card_removed_email(new_user=new_user, domain=domain)
self.auto_pay_user = new_user
self.save()
self._send_autopay_card_added_email(domain)
def remove_autopay_user(self):
self.auto_pay_user = None
self.save()
def _send_autopay_card_removed_email(self, new_user, domain):
"""Sends an email to the old autopayer for this account telling them {new_user} is now the autopayer"""
from corehq.apps.domain.views.accounting import EditExistingBillingAccountView
old_user = self.auto_pay_user
subject = _("Your card is no longer being used to auto-pay for {billing_account}").format(
billing_account=self.name)
old_web_user = WebUser.get_by_username(old_user)
if old_web_user:
old_user_name = old_web_user.first_name
else:
old_user_name = old_user
context = {
'new_user': new_user,
'old_user_name': old_user_name,
'billing_account_name': self.name,
'billing_info_url': absolute_reverse(EditExistingBillingAccountView.urlname,
args=[domain]),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
}
send_html_email_async(
subject,
old_user,
render_to_string('accounting/email/autopay_card_removed.html', context),
text_content=strip_tags(render_to_string('accounting/email/autopay_card_removed.html', context)),
)
def _send_autopay_card_added_email(self, domain):
"""Sends an email to the new autopayer for this account telling them they are now the autopayer"""
from corehq.apps.domain.views.accounting import EditExistingBillingAccountView
subject = _("Your card is being used to auto-pay for {billing_account}").format(
billing_account=self.name)
web_user = WebUser.get_by_username(self.auto_pay_user)
new_user_name = web_user.first_name if web_user else self.auto_pay_user
try:
last_4 = self.autopay_card.last4
except StripePaymentMethod.DoesNotExist:
last_4 = None
context = {
'name': new_user_name,
'email': self.auto_pay_user,
'domain': domain,
'last_4': last_4,
'billing_account_name': self.name,
'billing_info_url': absolute_reverse(EditExistingBillingAccountView.urlname,
args=[domain]),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
}
send_html_email_async(
subject,
self.auto_pay_user,
render_to_string('accounting/email/invoice_autopay_setup.html', context),
text_content=strip_tags(render_to_string('accounting/email/invoice_autopay_setup.html', context)),
)
class BillingContactInfo(models.Model):
account = models.OneToOneField(BillingAccount, primary_key=True, null=False, on_delete=models.CASCADE)
first_name = models.CharField(
max_length=50, null=True, blank=True, verbose_name=_("First Name")
)
last_name = models.CharField(
max_length=50, null=True, blank=True, verbose_name=_("Last Name")
)
# TODO - replace with models.ArrayField once django >= 1.9
email_list = jsonfield.JSONField(
default=list,
verbose_name=_("Contact Emails"),
help_text=_("We will email communications regarding your account "
"to the emails specified here.")
)
phone_number = models.CharField(
max_length=20, null=True, blank=True, verbose_name=_("Phone Number")
)
company_name = models.CharField(
max_length=50, null=True, blank=True,
verbose_name=_("Company / Organization")
)
first_line = models.CharField(
max_length=50, null=False,
verbose_name=_("Address First Line")
)
second_line = models.CharField(
max_length=50, null=True, blank=True,
verbose_name=_("Address Second Line")
)
city = models.CharField(
max_length=50, null=False, verbose_name=_("City")
)
state_province_region = models.CharField(
max_length=50, null=False,
verbose_name=_("State / Province / Region"),
)
postal_code = models.CharField(
max_length=20, null=False, verbose_name=_("Postal Code")
)
country = models.CharField(
max_length=50, null=False, verbose_name=_("Country")
)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __init__(self, *args, **kwargs):
super(BillingContactInfo, self).__init__(*args, **kwargs)
if self.email_list == '[]':
self.email_list = []
@property
def full_name(self):
if not self.first_name:
return self.last_name
elif not self.last_name:
return self.first_name
else:
return "%s %s" % (self.first_name, self.last_name)
class SoftwareProductRate(models.Model):
"""
Represents the monthly fixed fee for a software product.
Once created, SoftwareProductRates cannot be modified. Instead, a new SoftwareProductRate must be created.
"""
name = models.CharField(max_length=40)
monthly_fee = models.DecimalField(default=Decimal('0.00'), max_digits=10, decimal_places=2)
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return '%s @ $%s /month' % (self.name, self.monthly_fee)
def __eq__(self, other):
if not isinstance(other, self.__class__) or not self.name == other.name:
return False
for field in ['monthly_fee', 'is_active']:
if not getattr(self, field) == getattr(other, field):
return False
return True
@classmethod
def new_rate(cls, product_name, monthly_fee, save=True):
rate = SoftwareProductRate(name=product_name, monthly_fee=monthly_fee)
if save:
rate.save()
return rate
class Feature(models.Model):
"""
This is what will link a feature type (USER, API, etc.) to a name (Users Pro, API Standard, etc.)
and will be what the FeatureRate references to provide a monthly fee, limit and per-excess fee.
"""
name = models.CharField(max_length=40, unique=True)
feature_type = models.CharField(max_length=10, db_index=True, choices=FeatureType.CHOICES)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return "Feature '%s' of type '%s'" % (self.name, self.feature_type)
def get_rate(self, default_instance=True):
try:
return self.featurerate_set.filter(is_active=True).latest('date_created')
except FeatureRate.DoesNotExist:
return FeatureRate() if default_instance else None # the defaults
class FeatureRate(models.Model):
"""
Links a feature to a monthly fee, monthly limit, and a per-excess fee for exceeding the monthly limit.
Once created, Feature Rates cannot be modified. Instead, a new Feature Rate must be created.
"""
feature = models.ForeignKey(Feature, on_delete=models.PROTECT)
monthly_fee = models.DecimalField(default=Decimal('0.00'), max_digits=10, decimal_places=2,
verbose_name="Monthly Fee")
monthly_limit = models.IntegerField(default=0,
verbose_name="Monthly Included Limit",
validators=integer_field_validators)
per_excess_fee = models.DecimalField(default=Decimal('0.00'), max_digits=10, decimal_places=2,
verbose_name="Fee Per Excess of Limit")
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return '%s @ $%s /month, $%s /excess, limit: %d' % (
self.feature.name, self.monthly_fee, self.per_excess_fee, self.monthly_limit
)
def __eq__(self, other):
if not isinstance(other, self.__class__) or not self.feature.pk == other.feature.pk:
return False
for field in ['monthly_fee', 'monthly_limit', 'per_excess_fee', 'is_active']:
if not getattr(self, field) == getattr(other, field):
return False
return True
@classmethod
def new_rate(cls, feature_name, feature_type,
monthly_fee=None, monthly_limit=None, per_excess_fee=None, save=True):
feature, _ = Feature.objects.get_or_create(name=feature_name, feature_type=feature_type)
rate = FeatureRate(feature=feature)
if monthly_fee is not None:
rate.monthly_fee = monthly_fee
if monthly_limit is not None:
rate.monthly_limit = monthly_limit
if per_excess_fee is not None:
rate.per_excess_fee = per_excess_fee
if save:
rate.save()
return rate
class SoftwarePlan(models.Model):
"""
Subscriptions are created for Software Plans. Software Plans can have many Software Plan Versions, which
link the Software Plan to a set of permissions roles.
"""
name = models.CharField(max_length=80, unique=True)
description = models.TextField(blank=True,
help_text="If the visibility is INTERNAL, this description field will be used.")
edition = models.CharField(
max_length=25,
default=SoftwarePlanEdition.ENTERPRISE,
choices=SoftwarePlanEdition.CHOICES,
)
visibility = models.CharField(
max_length=10,
default=SoftwarePlanVisibility.INTERNAL,
choices=SoftwarePlanVisibility.CHOICES,
)
last_modified = models.DateTimeField(auto_now=True)
is_customer_software_plan = models.BooleanField(default=False)
max_domains = models.IntegerField(blank=True, null=True)
is_annual_plan = models.BooleanField(default=False)
class Meta(object):
app_label = 'accounting'
@quickcache(vary_on=['self.pk'], timeout=10)
def get_version(self):
try:
return self.softwareplanversion_set.filter(is_active=True).latest('date_created')
except SoftwarePlanVersion.DoesNotExist:
return None
def at_max_domains(self):
if not self.max_domains:
return False
subscription_count = 0
for version in self.softwareplanversion_set.all():
subscription_count += Subscription.visible_objects.filter(plan_version=version, is_active=True).count()
return subscription_count >= self.max_domains
class DefaultProductPlan(models.Model):
"""
This links a product type to its default SoftwarePlan (i.e. the Community Plan).
The latest SoftwarePlanVersion that's linked to this plan will be the one used to create a new subscription if
nothing is found for that domain.
"""
edition = models.CharField(
default=SoftwarePlanEdition.COMMUNITY,
choices=SoftwarePlanEdition.CHOICES,
max_length=25,
)
plan = models.ForeignKey(SoftwarePlan, on_delete=models.PROTECT)
is_trial = models.BooleanField(default=False)
is_report_builder_enabled = models.BooleanField(default=False)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
unique_together = ('edition', 'is_trial', 'is_report_builder_enabled')
@classmethod
@quickcache(['edition', 'is_trial', 'is_report_builder_enabled'],
skip_arg=lambda *args, **kwargs: not settings.ENTERPRISE_MODE or settings.UNIT_TESTING)
def get_default_plan_version(cls, edition=None, is_trial=False,
is_report_builder_enabled=False):
if not edition:
edition = (SoftwarePlanEdition.ENTERPRISE if settings.ENTERPRISE_MODE
else SoftwarePlanEdition.COMMUNITY)
try:
default_product_plan = DefaultProductPlan.objects.select_related('plan').get(
edition=edition, is_trial=is_trial,
is_report_builder_enabled=is_report_builder_enabled
)
return default_product_plan.plan.get_version()
except DefaultProductPlan.DoesNotExist:
raise AccountingError(
"No default product plan was set up, did you forget to run migrations?"
)
@classmethod
def get_lowest_edition(cls, requested_privileges, return_plan=False):
for edition in SoftwarePlanEdition.SELF_SERVICE_ORDER:
plan_version = cls.get_default_plan_version(edition)
privileges = get_privileges(plan_version) - REPORT_BUILDER_ADD_ON_PRIVS
if privileges.issuperset(requested_privileges):
return (plan_version if return_plan
else plan_version.plan.edition)
return None if return_plan else SoftwarePlanEdition.ENTERPRISE
class SoftwarePlanVersion(models.Model):
"""
Links a plan to its rates and provides versioning information.
Once a new SoftwarePlanVersion is created, it cannot be modified. Instead, a new SoftwarePlanVersion
must be created.
"""
plan = models.ForeignKey(SoftwarePlan, on_delete=models.PROTECT)
product_rate = models.ForeignKey(SoftwareProductRate, on_delete=models.CASCADE)
feature_rates = models.ManyToManyField(FeatureRate, blank=True)
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return "%(plan_name)s (v%(version_num)d)" % {
'plan_name': self.plan.name,
'version_num': self.version,
}
def save(self, *args, **kwargs):
super(SoftwarePlanVersion, self).save(*args, **kwargs)
SoftwarePlan.get_version.clear(self.plan)
@property
def version(self):
return (self.plan.softwareplanversion_set.count() -
self.plan.softwareplanversion_set.filter(
date_created__gt=self.date_created).count())
@property
def user_facing_description(self):
from corehq.apps.accounting.user_text import DESC_BY_EDITION, FEATURE_TYPE_TO_NAME
def _default_description(plan, monthly_limit):
if plan.edition in [
SoftwarePlanEdition.COMMUNITY,
SoftwarePlanEdition.STANDARD,
SoftwarePlanEdition.PRO,
SoftwarePlanEdition.ADVANCED,
]:
return DESC_BY_EDITION[plan.edition]['description'].format(monthly_limit)
else:
return DESC_BY_EDITION[plan.edition]['description']
desc = {
'name': self.plan.name,
}
if (
self.plan.visibility == SoftwarePlanVisibility.PUBLIC
or self.plan.visibility == SoftwarePlanVisibility.TRIAL
) or not self.plan.description:
desc['description'] = _default_description(self.plan, self.user_feature.monthly_limit)
else:
desc['description'] = self.plan.description
desc.update({
'monthly_fee': 'USD %s' % self.product_rate.monthly_fee,
'rates': [{'name': FEATURE_TYPE_TO_NAME[r.feature.feature_type],
'included': 'Infinite' if r.monthly_limit == UNLIMITED_FEATURE_USAGE else r.monthly_limit}
for r in self.feature_rates.all()],
'edition': self.plan.edition,
})
return desc
@property
@memoized
def user_feature(self):
user_features = self.feature_rates.filter(feature__feature_type=FeatureType.USER)
try:
user_feature = user_features.order_by('monthly_limit')[0]
if not user_feature.monthly_limit == UNLIMITED_FEATURE_USAGE:
user_feature = user_features.order_by('-monthly_limit')[0]
return user_feature
except IndexError:
pass
@property
def user_limit(self):
if self.user_feature is not None:
return self.user_feature.monthly_limit
return UNLIMITED_FEATURE_USAGE
@property
def user_fee(self):
if self.user_feature is not None:
return "USD %d" % self.user_feature.per_excess_fee
def feature_charges_exist_for_domain(self, domain, start_date=None, end_date=None):
domain_obj = ensure_domain_instance(domain)
if domain_obj is None:
return False
from corehq.apps.accounting.usage import FeatureUsageCalculator
for feature_rate in self.feature_rates.all():
if feature_rate.monthly_limit != UNLIMITED_FEATURE_USAGE:
calc = FeatureUsageCalculator(
feature_rate, domain_obj.name, start_date=start_date,
end_date=end_date
)
if calc.get_usage() > feature_rate.monthly_limit:
return True
return False
@property
def is_paused(self):
return self.plan.edition == SoftwarePlanEdition.PAUSED
class SubscriberManager(models.Manager):
def safe_get(self, *args, **kwargs):
try:
return self.get(*args, **kwargs)
except Subscriber.DoesNotExist:
return None
class Subscriber(models.Model):
"""
The objects that can be subscribed to a Subscription.
"""
domain = models.CharField(max_length=256, unique=True, db_index=True)
last_modified = models.DateTimeField(auto_now=True)
objects = SubscriberManager()
class Meta(object):
app_label = 'accounting'
def __str__(self):
return "DOMAIN %s" % self.domain
def create_subscription(self, new_plan_version, new_subscription, is_internal_change):
assert new_plan_version
assert new_subscription
return self._apply_upgrades_and_downgrades(
new_plan_version=new_plan_version,
new_subscription=new_subscription,
internal_change=is_internal_change,
)
def change_subscription(self, downgraded_privileges, upgraded_privileges, new_plan_version,
old_subscription, new_subscription, internal_change):
return self._apply_upgrades_and_downgrades(
downgraded_privileges=downgraded_privileges,
upgraded_privileges=upgraded_privileges,
new_plan_version=new_plan_version,
old_subscription=old_subscription,
new_subscription=new_subscription,
internal_change=internal_change,
)
def activate_subscription(self, upgraded_privileges, subscription):
return self._apply_upgrades_and_downgrades(
upgraded_privileges=upgraded_privileges,
new_subscription=subscription,
)
def deactivate_subscription(self, downgraded_privileges, upgraded_privileges,
old_subscription, new_subscription):
return self._apply_upgrades_and_downgrades(
downgraded_privileges=downgraded_privileges,
upgraded_privileges=upgraded_privileges,
old_subscription=old_subscription,
new_subscription=new_subscription,
)
def reactivate_subscription(self, new_plan_version, subscription):
return self._apply_upgrades_and_downgrades(
new_plan_version=new_plan_version,
old_subscription=subscription,
new_subscription=subscription,
)
def _apply_upgrades_and_downgrades(self, new_plan_version=None,
downgraded_privileges=None,
upgraded_privileges=None,
old_subscription=None,
new_subscription=None,
internal_change=False):
"""
downgraded_privileges is the list of privileges that should be removed
upgraded_privileges is the list of privileges that should be added
"""
if new_plan_version is None:
new_plan_version = DefaultProductPlan.get_default_plan_version()
if downgraded_privileges is None or upgraded_privileges is None:
change_status_result = get_change_status(None, new_plan_version)
downgraded_privileges = downgraded_privileges or change_status_result.downgraded_privs
upgraded_privileges = upgraded_privileges or change_status_result.upgraded_privs
if downgraded_privileges:
Subscriber._process_downgrade(self.domain, downgraded_privileges, new_plan_version)
if upgraded_privileges:
Subscriber._process_upgrade(self.domain, upgraded_privileges, new_plan_version)
if Subscriber.should_send_subscription_notification(old_subscription, new_subscription):
send_subscription_change_alert(self.domain, new_subscription, old_subscription, internal_change)
subscription_upgrade_or_downgrade.send_robust(None, domain=self.domain)
@staticmethod
def should_send_subscription_notification(old_subscription, new_subscription):
if not old_subscription:
return False
is_new_trial = new_subscription and new_subscription.is_trial
expired_trial = old_subscription.is_trial and not new_subscription
return not is_new_trial and not expired_trial
@staticmethod
def _process_downgrade(domain, downgraded_privileges, new_plan_version):
downgrade_handler = DomainDowngradeActionHandler(
domain, new_plan_version, downgraded_privileges,
)
if not downgrade_handler.get_response():
raise SubscriptionChangeError("The downgrade was not successful.")
@staticmethod
def _process_upgrade(domain, upgraded_privileges, new_plan_version):
upgrade_handler = DomainUpgradeActionHandler(
domain, new_plan_version, upgraded_privileges,
)
if not upgrade_handler.get_response():
raise SubscriptionChangeError("The upgrade was not successful.")
class VisibleSubscriptionManager(models.Manager):
use_in_migrations = True
def get_queryset(self):
return super(VisibleSubscriptionManager, self).get_queryset().filter(is_hidden_to_ops=False)
class DisabledManager(models.Manager):
def get_queryset(self):
raise NotImplementedError
class Subscription(models.Model):
"""
Links a Subscriber to a SoftwarePlan and BillingAccount, necessary for invoicing.
"""
account = models.ForeignKey(BillingAccount, on_delete=models.PROTECT)
plan_version = models.ForeignKey(SoftwarePlanVersion, on_delete=models.PROTECT)
subscriber = models.ForeignKey(Subscriber, on_delete=models.PROTECT)
salesforce_contract_id = models.CharField(blank=True, max_length=80)
date_start = models.DateField()
date_end = models.DateField(blank=True, null=True)
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=False)
do_not_invoice = models.BooleanField(default=False)
no_invoice_reason = models.CharField(blank=True, max_length=256)
do_not_email_invoice = models.BooleanField(default=False)
do_not_email_reminder = models.BooleanField(default=False)
auto_generate_credits = models.BooleanField(default=False)
is_trial = models.BooleanField(default=False)
skip_invoicing_if_no_feature_charges = models.BooleanField(default=False)
service_type = models.CharField(
max_length=25,
choices=SubscriptionType.CHOICES,
default=SubscriptionType.NOT_SET
)
pro_bono_status = models.CharField(
max_length=25,
choices=ProBonoStatus.CHOICES,
default=ProBonoStatus.NO,
)
funding_source = models.CharField(
max_length=25,
choices=FundingSource.CHOICES,
default=FundingSource.CLIENT
)
last_modified = models.DateTimeField(auto_now=True)
is_hidden_to_ops = models.BooleanField(default=False)
skip_auto_downgrade = models.BooleanField(default=False)
skip_auto_downgrade_reason = models.CharField(blank=True, max_length=256)
visible_objects = VisibleSubscriptionManager()
visible_and_suppressed_objects = models.Manager()
objects = DisabledManager()
class Meta(object):
app_label = 'accounting'
def __str__(self):
return ("Subscription to %(plan_version)s for %(subscriber)s. "
"[%(date_start)s - %(date_end)s]" % {
'plan_version': self.plan_version,
'subscriber': self.subscriber,
'date_start': self.date_start.strftime(USER_DATE_FORMAT),
'date_end': (self.date_end.strftime(USER_DATE_FORMAT)
if self.date_end is not None else "--"),
})
def __eq__(self, other):
return (
other is not None
and other.__class__.__name__ == self.__class__.__name__
and other.plan_version.pk == self.plan_version.pk
and other.date_start == self.date_start
and other.date_end == self.date_end
and other.subscriber.pk == self.subscriber.pk
and other.account.pk == self.account.pk
)
def save(self, *args, **kwargs):
"""
Overloaded to update domain pillow with subscription information
"""
from corehq.apps.accounting.mixins import get_overdue_invoice
super(Subscription, self).save(*args, **kwargs)
Subscription._get_active_subscription_by_domain.clear(Subscription, self.subscriber.domain)
get_overdue_invoice.clear(self.subscriber.domain)
domain = Domain.get_by_name(self.subscriber.domain)
# If a subscriber doesn't have a valid domain associated with it
# we don't care the pillow won't be updated
if domain:
publish_domain_saved(domain)
def delete(self, *args, **kwargs):
super(Subscription, self).delete(*args, **kwargs)
Subscription._get_active_subscription_by_domain.clear(Subscription, self.subscriber.domain)
@property
def is_community(self):
return self.plan_version.plan.edition == SoftwarePlanEdition.COMMUNITY
@property
def allowed_attr_changes(self):
"""
These are the attributes of a Subscription that can always be
changed while the subscription is active (or reactivated)
"""
return ['do_not_invoice', 'no_invoice_reason',
'salesforce_contract_id', 'skip_auto_downgrade']
@property
def next_subscription_filter(self):
return (Subscription.visible_objects.
filter(subscriber=self.subscriber, date_start__gt=self.date_start).
exclude(pk=self.pk).
filter(Q(date_end__isnull=True) | ~Q(date_start=F('date_end'))))
@property
def previous_subscription_filter(self):
return Subscription.visible_objects.filter(
subscriber=self.subscriber,
date_start__lt=self.date_start - datetime.timedelta(days=1)
).exclude(pk=self.pk)
@property
def is_renewed(self):
"""
Checks to see if there's another Subscription for this subscriber
that starts after this subscription.
"""
return self.next_subscription_filter.exists()
@property
def next_subscription(self):
try:
return self.next_subscription_filter.order_by('date_start')[0]
except (Subscription.DoesNotExist, IndexError):
return None
@property
def previous_subscription(self):
try:
return self.previous_subscription_filter.order_by('-date_end')[0]
except (Subscription.DoesNotExist, IndexError):
return None
def raise_conflicting_dates(self, date_start, date_end):
"""Raises a subscription Adjustment error if the specified date range
conflicts with other subscriptions related to this subscriber.
"""
assert date_start is not None
for sub in Subscription.visible_objects.filter(
Q(date_end__isnull=True) | Q(date_end__gt=F('date_start')),
subscriber=self.subscriber,
).exclude(
id=self.id,
):
related_has_no_end = sub.date_end is None
current_has_no_end = date_end is None
start_before_related_end = sub.date_end is not None and date_start < sub.date_end
start_before_related_start = date_start < sub.date_start
start_after_related_start = date_start > sub.date_start
end_before_related_end = (
date_end is not None and sub.date_end is not None
and date_end < sub.date_end
)
end_after_related_end = (
date_end is not None and sub.date_end is not None
and date_end > sub.date_end
)
end_after_related_start = date_end is not None and date_end > sub.date_start
if (
(start_before_related_end and start_after_related_start)
or (start_after_related_start and related_has_no_end)
or (end_after_related_start and end_before_related_end)
or (end_after_related_start and related_has_no_end)
or (start_before_related_start and end_after_related_end)
or (start_before_related_end and current_has_no_end)
or (current_has_no_end and related_has_no_end)
):
raise SubscriptionAdjustmentError(
"The start date of %(start_date)s conflicts with the "
"subscription dates to %(related_sub)s." % {
'start_date': self.date_start.strftime(USER_DATE_FORMAT),
'related_sub': sub,
}
)
def update_subscription(self, date_start, date_end,
do_not_invoice=None,
no_invoice_reason=None, do_not_email_invoice=None,
do_not_email_reminder=None, salesforce_contract_id=None,
auto_generate_credits=None,
web_user=None, note=None, adjustment_method=None,
service_type=None, pro_bono_status=None, funding_source=None,
skip_invoicing_if_no_feature_charges=None, skip_auto_downgrade=None,
skip_auto_downgrade_reason=None):
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
self._update_dates(date_start, date_end)
self._update_properties(
do_not_invoice=do_not_invoice,
no_invoice_reason=no_invoice_reason,
skip_invoicing_if_no_feature_charges=skip_invoicing_if_no_feature_charges,
do_not_email_invoice=do_not_email_invoice,
do_not_email_reminder=do_not_email_reminder,
auto_generate_credits=auto_generate_credits,
salesforce_contract_id=salesforce_contract_id,
service_type=service_type,
pro_bono_status=pro_bono_status,
funding_source=funding_source,
skip_auto_downgrade=skip_auto_downgrade,
skip_auto_downgrade_reason=skip_auto_downgrade_reason,
)
self.save()
SubscriptionAdjustment.record_adjustment(
self, method=adjustment_method, note=note, web_user=web_user,
reason=SubscriptionAdjustmentReason.MODIFY
)
def _update_dates(self, date_start, date_end):
if not date_start:
raise SubscriptionAdjustmentError('Start date must be provided')
if date_end is not None and date_start > date_end:
raise SubscriptionAdjustmentError(
"Can't have a subscription start after the end date."
)
self.raise_conflicting_dates(date_start, date_end)
self.date_start = date_start
self.date_end = date_end
is_active_dates = is_active_subscription(self.date_start, self.date_end)
if self.is_active != is_active_dates:
if is_active_dates:
self.is_active = True
self.subscriber.activate_subscription(get_privileges(self.plan_version), self)
else:
raise SubscriptionAdjustmentError(
'Cannot deactivate a subscription here. Cancel subscription instead.'
)
def _update_properties(self, **kwargs):
property_names = {
'do_not_invoice',
'no_invoice_reason',
'skip_invoicing_if_no_feature_charges',
'do_not_email_invoice',
'do_not_email_reminder',
'auto_generate_credits',
'salesforce_contract_id',
'service_type',
'pro_bono_status',
'funding_source',
'skip_auto_downgrade',
'skip_auto_downgrade_reason',
}
assert property_names >= set(kwargs.keys())
for property_name, property_value in kwargs.items():
if property_value is not None:
setattr(self, property_name, property_value)
@transaction.atomic
def change_plan(self, new_plan_version, date_end=None,
note=None, web_user=None, adjustment_method=None,
service_type=None, pro_bono_status=None, funding_source=None,
transfer_credits=True, internal_change=False, account=None,
do_not_invoice=None, no_invoice_reason=None,
auto_generate_credits=False, is_trial=False):
"""
Changing a plan TERMINATES the current subscription and
creates a NEW SUBSCRIPTION where the old plan left off.
This is not the same thing as simply updating the subscription.
"""
from corehq.apps.analytics.tasks import track_workflow
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
today = datetime.date.today()
assert self.is_active
assert date_end is None or date_end >= today
if new_plan_version.plan.at_max_domains() and self.plan_version.plan != new_plan_version.plan:
raise SubscriptionAdjustmentError(
'The maximum number of project spaces has been reached for %(new_plan_version)s. ' % {
'new_plan_version': new_plan_version,
}
)
self.date_end = today
self.is_active = False
self.save()
new_subscription = Subscription(
account=account if account else self.account,
plan_version=new_plan_version,
subscriber=self.subscriber,
salesforce_contract_id=self.salesforce_contract_id,
date_start=today,
date_end=date_end,
is_active=True,
do_not_invoice=do_not_invoice if do_not_invoice is not None else self.do_not_invoice,
no_invoice_reason=no_invoice_reason if no_invoice_reason is not None else self.no_invoice_reason,
auto_generate_credits=auto_generate_credits,
is_trial=is_trial,
service_type=(service_type or SubscriptionType.NOT_SET),
pro_bono_status=(pro_bono_status or ProBonoStatus.NO),
funding_source=(funding_source or FundingSource.CLIENT),
skip_auto_downgrade=False,
skip_auto_downgrade_reason='',
)
new_subscription.save()
new_subscription.raise_conflicting_dates(new_subscription.date_start, new_subscription.date_end)
new_subscription.set_billing_account_entry_point()
change_status_result = get_change_status(self.plan_version, new_plan_version)
self.subscriber.change_subscription(
downgraded_privileges=change_status_result.downgraded_privs,
upgraded_privileges=change_status_result.upgraded_privs,
new_plan_version=new_plan_version,
old_subscription=self,
new_subscription=new_subscription,
internal_change=internal_change,
)
# transfer existing credit lines to the new subscription
if transfer_credits:
self.transfer_credits(new_subscription)
# record transfer from old subscription
SubscriptionAdjustment.record_adjustment(
self, method=adjustment_method, note=note, web_user=web_user,
reason=change_status_result.adjustment_reason, related_subscription=new_subscription
)
SubscriptionAdjustment.record_adjustment(
new_subscription, method=adjustment_method, note=note, web_user=web_user,
reason=SubscriptionAdjustmentReason.CREATE
)
upgrade_reasons = [SubscriptionAdjustmentReason.UPGRADE, SubscriptionAdjustmentReason.CREATE]
if web_user and adjustment_method == SubscriptionAdjustmentMethod.USER:
if change_status_result.adjustment_reason in upgrade_reasons:
track_workflow(web_user, 'Changed Plan: Upgrade')
if change_status_result.adjustment_reason == SubscriptionAdjustmentReason.DOWNGRADE:
track_workflow(web_user, 'Changed Plan: Downgrade')
return new_subscription
def reactivate_subscription(self, date_end=None, note=None, web_user=None,
adjustment_method=None, **kwargs):
"""
This assumes that a subscription was cancelled then recreated with the
same date_start as the last subscription's date_end (with no other subscriptions
created in between).
"""
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
self.date_end = date_end
self.is_active = True
for allowed_attr in self.allowed_attr_changes:
if allowed_attr in kwargs:
setattr(self, allowed_attr, kwargs[allowed_attr])
self.save()
self.subscriber.reactivate_subscription(
new_plan_version=self.plan_version,
subscription=self,
)
SubscriptionAdjustment.record_adjustment(
self, reason=SubscriptionAdjustmentReason.REACTIVATE,
method=adjustment_method, note=note, web_user=web_user,
)
def renew_subscription(self, note=None, web_user=None,
adjustment_method=None,
service_type=None, pro_bono_status=None,
funding_source=None, new_version=None):
"""
This creates a new subscription with a date_start that is
equivalent to the current subscription's date_end.
- The date_end is left None.
- The plan_version is the cheapest self-subscribable plan with the
same set of privileges that the current plan has.
"""
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
if self.date_end is None:
raise SubscriptionRenewalError(
"Cannot renew a subscription with no date_end set."
)
if new_version is None:
current_privileges = get_privileges(self.plan_version)
new_version = DefaultProductPlan.get_lowest_edition(
current_privileges, return_plan=True,
)
if new_version is None:
# this should NEVER happen, but on the off-chance that it does...
raise SubscriptionRenewalError(
"There was an issue renewing your subscription. Someone "
"from Dimagi will get back to you shortly."
)
renewed_subscription = Subscription(
account=self.account,
plan_version=new_version,
subscriber=self.subscriber,
salesforce_contract_id=self.salesforce_contract_id,
date_start=self.date_end,
date_end=None,
)
if service_type is not None:
renewed_subscription.service_type = service_type
if pro_bono_status is not None:
renewed_subscription.pro_bono_status = pro_bono_status
if funding_source is not None:
renewed_subscription.funding_source = funding_source
if datetime.date.today() == self.date_end:
renewed_subscription.is_active = True
renewed_subscription.save()
# record renewal from old subscription
SubscriptionAdjustment.record_adjustment(
self, method=adjustment_method, note=note, web_user=web_user,
reason=SubscriptionAdjustmentReason.RENEW,
)
return renewed_subscription
def transfer_credits(self, subscription=None):
"""Transfers all credit balances related to an account or subscription
(if specified).
"""
if subscription is not None and self.account.pk != subscription.account.pk:
raise CreditLineError(
"Can only transfer subscription credits under the same "
"Billing Account."
)
source_credits = CreditLine.objects.filter(
account=self.account,
subscription=self,
).all()
for credit_line in source_credits:
transferred_credit = CreditLine.add_credit(
credit_line.balance,
account=self.account,
subscription=subscription,
feature_type=credit_line.feature_type,
is_product=credit_line.is_product,
related_credit=credit_line
)
credit_line.is_active = False
credit_line.adjust_credit_balance(
credit_line.balance * Decimal('-1'),
related_credit=transferred_credit,
)
def send_ending_reminder_email(self):
"""
Sends a reminder email to the emails specified in the accounting
contacts that the subscription will end on the specified end date.
"""
if self.date_end is None:
raise SubscriptionReminderError(
"This subscription has no end date."
)
today = datetime.date.today()
num_days_left = (self.date_end - today).days
domain_name = self.subscriber.domain
context = self.ending_reminder_context
subject = context['subject']
template = self.ending_reminder_email_html
template_plaintext = self.ending_reminder_email_text
email_html = render_to_string(template, context)
email_plaintext = render_to_string(template_plaintext, context)
bcc = [settings.ACCOUNTS_EMAIL] if not self.is_trial else []
if self.account.dimagi_contact is not None:
bcc.append(self.account.dimagi_contact)
for email in self._reminder_email_contacts(domain_name):
send_html_email_async.delay(
subject, email, email_html,
text_content=email_plaintext,
email_from=get_dimagi_from_email(),
bcc=bcc,
)
log_accounting_info(
"Sent %(days_left)s-day subscription reminder "
"email for %(domain)s to %(email)s." % {
'days_left': num_days_left,
'domain': domain_name,
'email': email,
}
)
@property
def ending_reminder_email_html(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder.html'
elif self.is_trial:
return 'accounting/email/trial_ending_reminder.html'
else:
return 'accounting/email/subscription_ending_reminder.html'
@property
def ending_reminder_email_text(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder.txt'
elif self.is_trial:
return 'accounting/email/trial_ending_reminder.txt'
else:
return 'accounting/email/subscription_ending_reminder.txt'
@property
def ending_reminder_context(self):
from corehq.apps.domain.views.accounting import DomainSubscriptionView
today = datetime.date.today()
num_days_left = (self.date_end - today).days
if num_days_left == 1:
ending_on = _("tomorrow!")
else:
ending_on = _("on %s." % self.date_end.strftime(USER_DATE_FORMAT))
user_desc = self.plan_version.user_facing_description
plan_name = user_desc['name']
domain_name = self.subscriber.domain
context = {
'domain': domain_name,
'plan_name': plan_name,
'account': self.account.name,
'ending_on': ending_on,
'subscription_url': absolute_reverse(
DomainSubscriptionView.urlname, args=[self.subscriber.domain]),
'base_url': get_site_domain(),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
'sales_email': settings.SALES_EMAIL,
}
if self.account.is_customer_billing_account:
subject = _(
"CommCare Alert: %(account_name)s's subscription to "
"%(plan_name)s ends %(ending_on)s"
) % {
'account_name': self.account.name,
'plan_name': plan_name,
'ending_on': ending_on,
}
elif self.is_trial:
subject = _("CommCare Alert: 30 day trial for '%(domain)s' "
"ends %(ending_on)s") % {
'domain': domain_name,
'ending_on': ending_on,
}
else:
subject = _(
"CommCare Alert: %(domain)s's subscription to "
"%(plan_name)s ends %(ending_on)s"
) % {
'plan_name': plan_name,
'domain': domain_name,
'ending_on': ending_on,
}
context.update({'subject': subject})
return context
def send_dimagi_ending_reminder_email(self):
if self.date_end is None:
raise SubscriptionReminderError(
"This subscription has no end date."
)
if self.account.dimagi_contact is None:
raise SubscriptionReminderError(
"This subscription has no Dimagi contact."
)
subject = self.dimagi_ending_reminder_subject
context = self.dimagi_ending_reminder_context
email_html = render_to_string(self.dimagi_ending_reminder_email_html, context)
email_plaintext = render_to_string(self.dimagi_ending_reminder_email_text, context)
send_html_email_async.delay(
subject, self.account.dimagi_contact, email_html,
text_content=email_plaintext,
email_from=settings.DEFAULT_FROM_EMAIL,
)
@property
def dimagi_ending_reminder_email_html(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder_dimagi.html'
else:
return 'accounting/email/subscription_ending_reminder_dimagi.html'
@property
def dimagi_ending_reminder_email_text(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder_dimagi.txt'
else:
return 'accounting/email/subscription_ending_reminder_dimagi.txt'
@property
def dimagi_ending_reminder_subject(self):
if self.account.is_customer_billing_account:
return "Alert: {account}'s subscriptions are ending on {end_date}".format(
account=self.account.name,
end_date=self.date_end.strftime(USER_DATE_FORMAT))
else:
return "Alert: {domain}'s subscription is ending on {end_date}".format(
domain=self.subscriber.domain,
end_date=self.date_end.strftime(USER_DATE_FORMAT))
@property
def dimagi_ending_reminder_context(self):
end_date = self.date_end.strftime(USER_DATE_FORMAT)
email = self.account.dimagi_contact
if self.account.is_customer_billing_account:
account = self.account.name
plan = self.plan_version.plan.edition
context = {
'account': account,
'plan': plan,
'end_date': end_date,
'client_reminder_email_date': (self.date_end - datetime.timedelta(days=30)).strftime(
USER_DATE_FORMAT),
'contacts': ', '.join(self._reminder_email_contacts(self.subscriber.domain)),
'dimagi_contact': email,
'accounts_email': settings.ACCOUNTS_EMAIL
}
else:
domain = self.subscriber.domain
context = {
'domain': domain,
'end_date': end_date,
'client_reminder_email_date': (self.date_end - datetime.timedelta(days=30)).strftime(
USER_DATE_FORMAT),
'contacts': ', '.join(self._reminder_email_contacts(domain)),
'dimagi_contact': email,
}
return context
def _reminder_email_contacts(self, domain_name):
emails = {a.username for a in WebUser.get_admins_by_domain(domain_name)}
emails |= {e for e in WebUser.get_dimagi_emails_by_domain(domain_name)}
if not self.is_trial:
billing_contact_emails = (
self.account.billingcontactinfo.email_list
if BillingContactInfo.objects.filter(account=self.account).exists() else []
)
if not billing_contact_emails:
from corehq.apps.accounting.views import ManageBillingAccountView
_soft_assert_contact_emails_missing(
False,
'Billing Account for project %s is missing client contact emails: %s' % (
domain_name,
absolute_reverse(ManageBillingAccountView.urlname, args=[self.account.id])
)
)
emails |= {billing_contact_email for billing_contact_email in billing_contact_emails}
if self.account.is_customer_billing_account:
enterprise_admin_emails = self.account.enterprise_admin_emails
emails |= {enterprise_admin_email for enterprise_admin_email in enterprise_admin_emails}
return emails
def set_billing_account_entry_point(self):
no_current_entry_point = self.account.entry_point == EntryPoint.NOT_SET
self_serve = self.service_type == SubscriptionType.PRODUCT
if no_current_entry_point and self_serve and not self.is_trial:
self.account.entry_point = EntryPoint.SELF_STARTED
self.account.save()
@classmethod
def get_active_subscription_by_domain(cls, domain_name_or_obj):
if settings.ENTERPRISE_MODE:
# Use the default plan, which is Enterprise when in ENTERPRISE_MODE
return None
if isinstance(domain_name_or_obj, Domain):
return cls._get_active_subscription_by_domain(domain_name_or_obj.name)
return cls._get_active_subscription_by_domain(domain_name_or_obj)
@classmethod
@quickcache(['domain_name'], timeout=60 * 60)
def _get_active_subscription_by_domain(cls, domain_name):
try:
return cls.visible_objects.select_related(
'plan_version__role'
).get(
is_active=True,
subscriber__domain=domain_name,
)
except cls.DoesNotExist:
return None
@classmethod
def get_subscribed_plan_by_domain(cls, domain):
"""
Returns SoftwarePlanVersion for the given domain.
"""
domain_obj = ensure_domain_instance(domain)
if domain_obj is None:
try:
return DefaultProductPlan.get_default_plan_version()
except DefaultProductPlan.DoesNotExist:
raise ProductPlanNotFoundError
else:
active_subscription = cls.get_active_subscription_by_domain(domain_obj.name)
if active_subscription is not None:
return active_subscription.plan_version
else:
return DefaultProductPlan.get_default_plan_version()
@classmethod
def new_domain_subscription(cls, account, domain, plan_version,
date_start=None, date_end=None, note=None,
web_user=None, adjustment_method=None, internal_change=False,
**kwargs):
if plan_version.plan.at_max_domains():
raise NewSubscriptionError(
'The maximum number of project spaces has been reached for %(plan_version)s. ' % {
'plan_version': plan_version,
}
)
if plan_version.plan.is_customer_software_plan != account.is_customer_billing_account:
if plan_version.plan.is_customer_software_plan:
raise NewSubscriptionError(
'You are trying to add a Customer Software Plan to a regular Billing Account. '
'Both or neither must be customer-level.'
)
else:
raise NewSubscriptionError(
'You are trying to add a regular Software Plan to a Customer Billing Account. '
'Both or neither must be customer-level.'
)
subscriber = Subscriber.objects.get_or_create(domain=domain)[0]
today = datetime.date.today()
date_start = date_start or today
# find subscriptions that end in the future / after this subscription
available_subs = Subscription.visible_objects.filter(
subscriber=subscriber,
)
future_subscription_no_end = available_subs.filter(
date_end__exact=None,
)
if date_end is not None:
future_subscription_no_end = future_subscription_no_end.filter(date_start__lt=date_end)
if future_subscription_no_end.count() > 0:
raise NewSubscriptionError(_(
"There is already a subscription '%s' with no end date "
"that conflicts with the start and end dates of this "
"subscription.") %
future_subscription_no_end.latest('date_created')
)
future_subscriptions = available_subs.filter(
date_end__gt=date_start
)
if date_end is not None:
future_subscriptions = future_subscriptions.filter(date_start__lt=date_end)
if future_subscriptions.count() > 0:
raise NewSubscriptionError(str(
_(
"There is already a subscription '%(sub)s' that has an end date "
"that conflicts with the start and end dates of this "
"subscription %(start)s - %(end)s."
) % {
'sub': future_subscriptions.latest('date_created'),
'start': date_start,
'end': date_end
}
))
can_reactivate, last_subscription = cls.can_reactivate_domain_subscription(
account, domain, plan_version, date_start=date_start
)
if can_reactivate:
last_subscription.reactivate_subscription(
date_end=date_end, note=note, web_user=web_user,
adjustment_method=adjustment_method,
**kwargs
)
return last_subscription
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
subscription = Subscription.visible_objects.create(
account=account,
plan_version=plan_version,
subscriber=subscriber,
date_start=date_start,
date_end=date_end,
**kwargs
)
subscription.is_active = is_active_subscription(date_start, date_end)
if subscription.is_active:
subscriber.create_subscription(
new_plan_version=plan_version,
new_subscription=subscription,
is_internal_change=internal_change,
)
SubscriptionAdjustment.record_adjustment(
subscription, method=adjustment_method, note=note,
web_user=web_user
)
subscription.save()
subscription.set_billing_account_entry_point()
return subscription
@classmethod
def can_reactivate_domain_subscription(cls, account, domain, plan_version,
date_start=None):
subscriber = Subscriber.objects.get_or_create(domain=domain)[0]
date_start = date_start or datetime.date.today()
last_subscription = Subscription.visible_objects.filter(
subscriber=subscriber, date_end=date_start
)
if not last_subscription.exists():
return False, None
last_subscription = last_subscription.latest('date_created')
return (
last_subscription.account.pk == account.pk and
last_subscription.plan_version.pk == plan_version.pk
), last_subscription
@property
def is_below_minimum_subscription(self):
if self.is_trial:
return False
elif self.date_start < datetime.date(2018, 9, 5):
# Only block upgrades for subscriptions created after the date we launched the 30-Day Minimum
return False
elif self.date_start + datetime.timedelta(days=MINIMUM_SUBSCRIPTION_LENGTH) >= datetime.date.today():
return True
else:
return False
def user_can_change_subscription(self, user):
if user.is_superuser:
return True
elif self.account.is_customer_billing_account:
return self.account.has_enterprise_admin(user.email)
else:
return True
class InvoiceBaseManager(models.Manager):
def get_queryset(self):
return super(InvoiceBaseManager, self).get_queryset().filter(is_hidden_to_ops=False)
class InvoiceBase(models.Model):
date_created = models.DateTimeField(auto_now_add=True)
is_hidden = models.BooleanField(default=False)
tax_rate = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
balance = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
date_due = models.DateField(db_index=True, null=True)
date_paid = models.DateField(blank=True, null=True)
date_start = models.DateField()
date_end = models.DateField()
# If set to True invoice will not appear in invoice report. There is no UI to
# control this filter
is_hidden_to_ops = models.BooleanField(default=False)
last_modified = models.DateTimeField(auto_now=True)
objects = InvoiceBaseManager()
api_objects = Manager()
class Meta(object):
abstract = True
@property
def is_customer_invoice(self):
return False
@property
def invoice_number(self):
ops_num = settings.INVOICE_STARTING_NUMBER + self.id
return "%s%d" % (settings.INVOICE_PREFIX, ops_num)
@property
def is_wire(self):
return False
def get_domain(self):
raise NotImplementedError()
@property
def account(self):
raise NotImplementedError()
@property
def is_paid(self):
return bool(self.date_paid)
@property
def email_recipients(self):
raise NotImplementedError
class WireInvoice(InvoiceBase):
# WireInvoice is tied to a domain, rather than a subscription
domain = models.CharField(max_length=100)
class Meta(object):
app_label = 'accounting'
@property
@memoized
def account(self):
return BillingAccount.get_account_by_domain(self.domain)
@property
def subtotal(self):
return self.balance
@property
def is_wire(self):
return True
@property
def is_prepayment(self):
return False
def get_domain(self):
return self.domain
def get_total(self):
return self.balance
@property
def email_recipients(self):
try:
original_record = WireBillingRecord.objects.filter(invoice=self).order_by('-date_created')[0]
return original_record.emailed_to_list
except IndexError:
log_accounting_error(
"Strange that WireInvoice %d has no associated WireBillingRecord. "
"Should investigate."
% self.id
)
return []
class WirePrepaymentInvoice(WireInvoice):
class Meta(object):
app_label = 'accounting'
proxy = True
items = []
@property
def is_prepayment(self):
return True
class Invoice(InvoiceBase):
"""
This is what we'll use to calculate the balance on the accounts based on the current balance
held by the Invoice. Balance updates will be tied to CreditAdjustmentTriggers which are tied
to CreditAdjustments.
"""
subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT)
class Meta(object):
app_label = 'accounting'
def save(self, *args, **kwargs):
from corehq.apps.accounting.mixins import get_overdue_invoice
super(Invoice, self).save(*args, **kwargs)
get_overdue_invoice.clear(self.subscription.subscriber.domain)
@property
def email_recipients(self):
if self.subscription.service_type == SubscriptionType.IMPLEMENTATION:
return [settings.ACCOUNTS_EMAIL]
else:
return self.contact_emails
@property
def contact_emails(self):
try:
billing_contact_info = BillingContactInfo.objects.get(account=self.account)
contact_emails = billing_contact_info.email_list
except BillingContactInfo.DoesNotExist:
contact_emails = []
if not contact_emails:
from corehq.apps.accounting.views import ManageBillingAccountView
admins = WebUser.get_admins_by_domain(self.get_domain())
contact_emails = [admin.email if admin.email else admin.username for admin in admins]
if not settings.UNIT_TESTING:
_soft_assert_contact_emails_missing(
False,
"Could not find an email to send the invoice "
"email to for the domain %s. Sending to domain admins instead: %s."
" Add client contact emails here: %s" % (
self.get_domain(),
', '.join(contact_emails),
absolute_reverse(ManageBillingAccountView.urlname, args=[self.account.id]),
)
)
return contact_emails
@property
def subtotal(self):
"""
This will be inserted in the subtotal field on the printed invoice.
"""
if self.lineitem_set.count() == 0:
return Decimal('0.0000')
return sum([line_item.total for line_item in self.lineitem_set.all()])
@property
def applied_tax(self):
return Decimal('%.4f' % round(self.tax_rate * self.subtotal, 4))
@property
@memoized
def account(self):
return self.subscription.account
@property
def applied_credit(self):
if self.creditadjustment_set.count() == 0:
return Decimal('0.0000')
return sum([credit.amount for credit in self.creditadjustment_set.all()])
def get_total(self):
"""
This will be inserted in the total field on the printed invoice.
"""
return self.subtotal + self.applied_tax + self.applied_credit
def update_balance(self):
self.balance = self.get_total()
if self.balance <= 0:
self.date_paid = datetime.date.today()
else:
self.date_paid = None
def calculate_credit_adjustments(self):
"""
This goes through all credit lines that:
- do not have feature/product rates, but specify the related subscription and billing account
- do not have feature/product rates or a subscription, but specify the related billing account
"""
# first apply credits to all the line items
for line_item in self.lineitem_set.all():
line_item.calculate_credit_adjustments()
# finally, apply credits to the leftover invoice balance
current_total = self.get_total()
credit_lines = CreditLine.get_credits_for_invoice(self)
CreditLine.apply_credits_toward_balance(credit_lines, current_total, invoice=self)
@classmethod
def exists_for_domain(cls, domain):
return cls.objects.filter(
subscription__subscriber__domain=domain, is_hidden=False
).count() > 0
def get_domain(self):
return self.subscription.subscriber.domain
@classmethod
def autopayable_invoices(cls, date_due):
""" Invoices that can be auto paid on date_due """
invoices = cls.objects.select_related('subscription__account').filter(
date_due=date_due,
is_hidden=False,
subscription__account__auto_pay_user__isnull=False,
)
return invoices
def pay_invoice(self, payment_record):
CreditLine.make_payment_towards_invoice(
invoice=self,
payment_record=payment_record,
)
self.update_balance()
self.save()
class CustomerInvoice(InvoiceBase):
# CustomerInvoice is tied to a customer level account, instead of a subscription
account = models.ForeignKey(BillingAccount, on_delete=models.PROTECT)
subscriptions = models.ManyToManyField(Subscription, default=list, blank=True)
class Meta(object):
app_label = 'accounting'
@property
def is_customer_invoice(self):
return True
def get_domain(self):
return None
@property
def email_recipients(self):
try:
billing_contact_info = BillingContactInfo.objects.get(account=self.account)
contact_emails = billing_contact_info.email_list
except BillingContactInfo.DoesNotExist:
contact_emails = []
return contact_emails
@property
def contact_emails(self):
return self.account.enterprise_admin_emails
@property
def subtotal(self):
"""
This will be inserted in the subtotal field on the printed invoice.
"""
if self.lineitem_set.count() == 0:
return Decimal('0.0000')
return sum([line_item.total for line_item in self.lineitem_set.all()])
@property
def applied_tax(self):
return Decimal('%.4f' % round(self.tax_rate * self.subtotal, 4))
@property
def applied_credit(self):
if self.creditadjustment_set.count() == 0:
return Decimal('0.0000')
return sum([credit.amount for credit in self.creditadjustment_set.all()])
def get_total(self):
"""
This will be inserted in the total field on the printed invoice.
"""
return self.subtotal + self.applied_tax + self.applied_credit
def update_balance(self):
self.balance = self.get_total()
if self.balance <= 0:
self.date_paid = datetime.date.today()
else:
self.date_paid = None
def calculate_credit_adjustments(self):
for line_item in self.lineitem_set.all():
line_item.calculate_credit_adjustments()
current_total = self.get_total()
credit_lines = CreditLine.get_credits_for_customer_invoice(self)
CreditLine.apply_credits_toward_balance(credit_lines, current_total, customer_invoice=self)
def pay_invoice(self, payment_record):
CreditLine.make_payment_towards_invoice(
invoice=self,
payment_record=payment_record,
)
self.update_balance()
self.save()
@classmethod
def exists_for_domain(cls, domain):
invoices = cls.objects.filter(is_hidden=False)
for subscription in invoices.subscriptions.filter(is_hidden=False):
if subscription.subscriber.domain == domain:
return True
return False
@classmethod
def autopayable_invoices(cls, date_due):
""" Invoices that can be auto paid on date_due """
invoices = cls.objects.select_related('account').filter(
date_due=date_due,
is_hidden=False,
account__auto_pay_user__isnull=False
)
return invoices
class SubscriptionAdjustment(models.Model):
"""
A record of any adjustments made to a subscription, so we always have a paper trail.
Things that cannot be modified after a subscription is created:
- account
- plan
- subscriber
Things that have limited modification abilities:
- dates if the current date is today or earlier
All other modifications require cancelling the current subscription and creating a new one.
Note: related_subscription is the subscription to be filled in when the subscription is upgraded / downgraded.
"""
subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT)
reason = models.CharField(max_length=50, default=SubscriptionAdjustmentReason.CREATE,
choices=SubscriptionAdjustmentReason.CHOICES)
method = models.CharField(max_length=50, default=SubscriptionAdjustmentMethod.INTERNAL,
choices=SubscriptionAdjustmentMethod.CHOICES)
note = models.TextField(null=True)
web_user = models.CharField(max_length=80, null=True)
invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT, null=True)
related_subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT, null=True,
related_name='subscriptionadjustment_related')
date_created = models.DateTimeField(auto_now_add=True)
new_date_start = models.DateField()
new_date_end = models.DateField(blank=True, null=True)
new_date_delay_invoicing = models.DateField(blank=True, null=True)
new_salesforce_contract_id = models.CharField(blank=True, null=True, max_length=80)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
@classmethod
def record_adjustment(cls, subscription, **kwargs):
adjustment = SubscriptionAdjustment(
subscription=subscription,
new_date_start=subscription.date_start,
new_date_end=subscription.date_end,
new_salesforce_contract_id=subscription.salesforce_contract_id,
**kwargs
)
adjustment.save()
return adjustment
class BillingRecordBase(models.Model):
"""
This stores any interaction we have with the client in sending a physical / pdf invoice to their contact email.
"""
date_created = models.DateTimeField(auto_now_add=True, db_index=True)
emailed_to_list = ArrayField(models.EmailField(), default=list)
skipped_email = models.BooleanField(default=False)
pdf_data_id = models.CharField(max_length=48)
last_modified = models.DateTimeField(auto_now=True)
INVOICE_HTML_TEMPLATE = 'accounting/email/invoice.html'
INVOICE_TEXT_TEMPLATE = 'accounting/email/invoice.txt'
class Meta(object):
abstract = True
_pdf = None
@property
def pdf(self):
if self._pdf is None:
return InvoicePdf.get(self.pdf_data_id)
return self._pdf
@property
def html_template(self):
return self.INVOICE_HTML_TEMPLATE
@property
def text_template(self):
return self.INVOICE_TEXT_TEMPLATE
@property
def should_send_email(self):
raise NotImplementedError("should_send_email is required")
@classmethod
def generate_record(cls, invoice):
record = cls(invoice=invoice)
invoice_pdf = InvoicePdf()
invoice_pdf.generate_pdf(record.invoice)
record.pdf_data_id = invoice_pdf._id
record._pdf = invoice_pdf
record.save()
return record
def handle_throttled_email(self, contact_emails):
self.skipped_email = True
month_name = self.invoice.date_start.strftime("%B")
self.save()
log_accounting_info(
"Throttled billing statements for domain %(domain)s "
"to %(emails)s." % {
'domain': self.invoice.get_domain(),
'emails': ', '.join(contact_emails),
}
)
raise InvoiceEmailThrottledError(
"Invoice communications exceeded the maximum limit of "
"%(max_limit)d for domain %(domain)s for the month of "
"%(month_name)s." % {
'max_limit': MAX_INVOICE_COMMUNICATIONS,
'domain': self.invoice.get_domain(),
'month_name': month_name,
})
def email_context(self):
from corehq.apps.domain.views.accounting import DomainBillingStatementsView
from corehq.apps.domain.views.settings import DefaultProjectSettingsView
month_name = self.invoice.date_start.strftime("%B")
domain = self.invoice.get_domain()
context = {
'month_name': month_name,
'domain': domain,
'domain_url': absolute_reverse(DefaultProjectSettingsView.urlname,
args=[domain]),
'statement_number': self.invoice.invoice_number,
'payment_status': (_("Paid") if self.invoice.is_paid
else _("Payment Required")),
'amount_due': fmt_dollar_amount(self.invoice.balance),
'statements_url': absolute_reverse(
DomainBillingStatementsView.urlname, args=[domain]),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
'accounts_email': settings.ACCOUNTS_EMAIL,
}
return context
def email_subject(self):
raise NotImplementedError()
def can_view_statement(self, web_user):
raise NotImplementedError()
def send_email(self, contact_email=None, cc_emails=None):
pdf_attachment = {
'title': self.pdf.get_filename(self.invoice),
'file_obj': BytesIO(self.pdf.get_data(self.invoice)),
'mimetype': 'application/pdf',
}
domain = self.invoice.get_domain()
subject = self.email_subject()
context = self.email_context()
email_from = self.email_from()
greeting = _("Hello,")
can_view_statement = False
web_user = WebUser.get_by_username(contact_email)
if web_user is not None:
if web_user.first_name:
greeting = _("Dear %s,") % web_user.first_name
can_view_statement = self.can_view_statement(web_user)
context['greeting'] = greeting
context['can_view_statement'] = can_view_statement
email_html = render_to_string(self.html_template, context)
email_plaintext = render_to_string(self.text_template, context)
send_html_email_async.delay(
subject, contact_email, email_html,
text_content=email_plaintext,
email_from=email_from,
file_attachments=[pdf_attachment],
cc=cc_emails
)
self.emailed_to_list.extend([contact_email])
if cc_emails:
self.emailed_to_list.extend(cc_emails)
self.save()
if self.invoice.is_customer_invoice:
log_message = "Sent billing statements for account %(account)s to %(emails)s." % {
'account': self.invoice.account,
'emails': contact_email,
}
else:
log_message = "Sent billing statements for domain %(domain)s to %(emails)s." % {
'domain': domain,
'emails': contact_email,
}
log_accounting_info(log_message)
class WireBillingRecord(BillingRecordBase):
invoice = models.ForeignKey(WireInvoice, on_delete=models.PROTECT)
INVOICE_HTML_TEMPLATE = 'accounting/email/wire_invoice.html'
INVOICE_TEXT_TEMPLATE = 'accounting/email/wire_invoice.txt'
class Meta(object):
app_label = 'accounting'
@property
def should_send_email(self):
hidden = self.invoice.is_hidden
return not hidden
@staticmethod
def is_email_throttled():
return False
def email_subject(self):
month_name = self.invoice.date_start.strftime("%B")
return "Your %(month)s Bulk Billing Statement for Project Space %(domain)s" % {
'month': month_name,
'domain': self.invoice.get_domain(),
}
@staticmethod
def email_from():
return "Dimagi Accounting <{email}>".format(email=settings.INVOICING_CONTACT_EMAIL)
def can_view_statement(self, web_user):
return web_user.is_domain_admin(self.invoice.get_domain())
class WirePrepaymentBillingRecord(WireBillingRecord):
class Meta(object):
app_label = 'accounting'
proxy = True
def email_subject(self):
return _("Your prepayment invoice")
def can_view_statement(self, web_user):
return web_user.is_domain_admin(self.invoice.get_domain())
class BillingRecord(BillingRecordBase):
invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT)
INVOICE_CONTRACTED_HTML_TEMPLATE = 'accounting/email/invoice_contracted.html'
INVOICE_CONTRACTED_TEXT_TEMPLATE = 'accounting/email/invoice_contracted.txt'
INVOICE_AUTOPAY_HTML_TEMPLATE = 'accounting/email/invoice_autopayment.html'
INVOICE_AUTOPAY_TEXT_TEMPLATE = 'accounting/email/invoice_autopayment.txt'
class Meta(object):
app_label = 'accounting'
@property
def html_template(self):
if self.invoice.subscription.service_type == SubscriptionType.IMPLEMENTATION:
return self.INVOICE_CONTRACTED_HTML_TEMPLATE
if self.invoice.subscription.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_HTML_TEMPLATE
return self.INVOICE_HTML_TEMPLATE
@property
def text_template(self):
if self.invoice.subscription.service_type == SubscriptionType.IMPLEMENTATION:
return self.INVOICE_CONTRACTED_TEXT_TEMPLATE
if self.invoice.subscription.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_TEXT_TEMPLATE
return self.INVOICE_TEXT_TEMPLATE
@property
def should_send_email(self):
subscription = self.invoice.subscription
autogenerate = (subscription.auto_generate_credits and not self.invoice.balance)
small_contracted = (self.invoice.balance <= SMALL_INVOICE_THRESHOLD and
subscription.service_type == SubscriptionType.IMPLEMENTATION)
hidden = self.invoice.is_hidden
do_not_email_invoice = self.invoice.subscription.do_not_email_invoice
return not (autogenerate or small_contracted or hidden or do_not_email_invoice)
def is_email_throttled(self):
month = self.invoice.date_start.month
year = self.invoice.date_start.year
date_start, date_end = get_first_last_days(year, month)
return self.__class__.objects.filter(
invoice__date_start__lte=date_end, invoice__date_end__gte=date_start,
invoice__subscription__subscriber=self.invoice.subscription.subscriber,
invoice__is_hidden_to_ops=False,
).count() > MAX_INVOICE_COMMUNICATIONS
def email_context(self):
context = super(BillingRecord, self).email_context()
total_balance = sum(invoice.balance for invoice in Invoice.objects.filter(
is_hidden=False,
subscription__subscriber__domain=self.invoice.get_domain(),
))
is_small_invoice = self.invoice.balance < SMALL_INVOICE_THRESHOLD
payment_status = (_("Paid")
if self.invoice.is_paid or total_balance == 0
else _("Payment Required"))
context.update({
'plan_name': self.invoice.subscription.plan_version.plan.name,
'date_due': self.invoice.date_due,
'is_small_invoice': is_small_invoice,
'total_balance': total_balance,
'is_total_balance_due': total_balance >= SMALL_INVOICE_THRESHOLD,
'payment_status': payment_status,
})
if self.invoice.subscription.service_type == SubscriptionType.IMPLEMENTATION:
from corehq.apps.accounting.dispatcher import AccountingAdminInterfaceDispatcher
context.update({
'salesforce_contract_id': self.invoice.subscription.salesforce_contract_id,
'billing_account': self.invoice.subscription.account.name,
'billing_contacts': self.invoice.contact_emails,
'admin_invoices_url': "{url}?subscriber={domain}".format(
url=absolute_reverse(AccountingAdminInterfaceDispatcher.name(), args=['invoices']),
domain=self.invoice.get_domain()
)
})
if self.invoice.subscription.account.auto_pay_enabled:
try:
last_4 = getattr(self.invoice.subscription.account.autopay_card, 'last4', None)
except StripePaymentMethod.DoesNotExist:
last_4 = None
context.update({
'auto_pay_user': self.invoice.subscription.account.auto_pay_user,
'last_4': last_4,
})
context.update({
'credits': self.credits,
})
return context
def credits(self):
credits = {
'account': {},
'subscription': {},
}
self._add_product_credits(credits)
self._add_user_credits(credits)
self._add_sms_credits(credits)
self._add_general_credits(credits)
return credits
def _add_product_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__product_rate__isnull=False,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
is_product=True,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'product': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
is_product=True,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'product': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def _add_user_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.USER,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
feature_type=FeatureType.USER,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'user': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
feature_type=FeatureType.USER,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'user': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def _add_sms_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.SMS,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
feature_type=FeatureType.SMS,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'sms': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
feature_type=FeatureType.SMS,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'sms': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def _add_general_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__feature_rate=None,
line_item__product_rate=None,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'general': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'general': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def email_subject(self):
month_name = self.invoice.date_start.strftime("%B")
return "Your %(month)s CommCare Billing Statement for Project Space %(domain)s" % {
'month': month_name,
'domain': self.invoice.subscription.subscriber.domain,
}
def email_from(self):
return get_dimagi_from_email()
@staticmethod
def _get_total_balance(credit_lines):
return (
sum([credit_line.balance for credit_line in credit_lines])
if credit_lines else Decimal('0.0')
)
def can_view_statement(self, web_user):
return web_user.is_domain_admin(self.invoice.get_domain())
class CustomerBillingRecord(BillingRecordBase):
invoice = models.ForeignKey(CustomerInvoice, on_delete=models.PROTECT)
INVOICE_AUTOPAY_HTML_TEMPLATE = 'accounting/email/invoice_autopayment.html'
INVOICE_AUTOPAY_TEXT_TEMPLATE = 'accounting/email/invoice_autopayment.txt'
INVOICE_HTML_TEMPLATE = 'accounting/email/customer_invoice.html'
INVOICE_TEXT_TEMPLATE = 'accounting/email/customer_invoice.txt'
class Meta(object):
app_label = 'accounting'
@property
def html_template(self):
if self.invoice.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_HTML_TEMPLATE
return self.INVOICE_HTML_TEMPLATE
@property
def text_template(self):
if self.invoice.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_TEXT_TEMPLATE
return self.INVOICE_TEXT_TEMPLATE
@property
def should_send_email(self):
return not self.invoice.is_hidden
def email_context(self):
from corehq.apps.accounting.views import EnterpriseBillingStatementsView
context = super(CustomerBillingRecord, self).email_context()
is_small_invoice = self.invoice.balance < SMALL_INVOICE_THRESHOLD
payment_status = (_("Paid")
if self.invoice.is_paid or self.invoice.balance == 0
else _("Payment Required"))
# Random domain, because all subscriptions on a customer account link to the same Enterprise Dashboard
domain = self.invoice.subscriptions.first().subscriber.domain
context.update({
'account_name': self.invoice.account.name,
'date_due': self.invoice.date_due,
'is_small_invoice': is_small_invoice,
'total_balance': '{:.2f}'.format(self.invoice.balance),
'is_total_balance_due': self.invoice.balance >= SMALL_INVOICE_THRESHOLD,
'payment_status': payment_status,
'statements_url': absolute_reverse(
EnterpriseBillingStatementsView.urlname, args=[domain]),
})
if self.invoice.account.auto_pay_enabled:
try:
last_4 = getattr(self.invoice.account.autopay_card, 'last4', None)
except StripePaymentMethod.DoesNotExist:
last_4 = None
context.update({
'auto_pay_user': self.invoice.account.auto_pay_user,
'last_4': last_4,
})
context.update({
'credits': self.credits,
})
return context
def credits(self):
credits = {
'account': {},
'subscription': {},
}
self._add_product_credits(credits)
self._add_user_credits(credits)
self._add_sms_credits(credits)
self._add_general_credits(credits)
return credits
def _add_product_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__product_rate__isnull=False
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions,
is_product=True
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credit_adjustments['subscription'].update({
'product': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account,
is_product=True
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'product': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _add_user_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.USER
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions,
feature_type=FeatureType.USER
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credits['subscription'].update({
'user': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account,
feature_type=FeatureType.USER
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'user': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _add_sms_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.SMS
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions,
feature_type=FeatureType.SMS
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credits['subscription'].update({
'sms': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account,
feature_type=FeatureType.SMS
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'sms': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _add_general_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__feature_rate=None,
line_item__product_rate=None
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credits['subscription'].update({
'general': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'general': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _subscriptions_in_credit_adjustments(self, credit_adjustments):
for subscription in self.invoice.subscriptions.all():
if credit_adjustments.filter(
credit_line__subscription=subscription
):
return True
return False
def email_subject(self):
month_name = self.invoice.date_start.strftime("%B")
return "Your %(month)s CommCare Billing Statement for Customer Account %(account_name)s" % {
'month': month_name,
'account_name': self.invoice.account.name,
}
def email_from(self):
return get_dimagi_from_email()
@staticmethod
def _get_total_balance(credit_lines):
return (
sum([credit_line.balance for credit_line in credit_lines])
if credit_lines else Decimal('0.0')
)
def can_view_statement(self, web_user):
for subscription in self.invoice.subscriptions.all():
if web_user.is_domain_admin(subscription.subscriber.domain):
return True
return False
class InvoicePdf(BlobMixin, SafeSaveDocument):
invoice_id = StringProperty()
date_created = DateTimeProperty()
is_wire = BooleanProperty(default=False)
is_customer = BooleanProperty(default=False)
_blobdb_type_code = CODES.invoice
def generate_pdf(self, invoice):
self.save()
domain = invoice.get_domain()
pdf_data = NamedTemporaryFile()
account_name = ''
if invoice.is_customer_invoice:
account_name = invoice.account.name
template = InvoiceTemplate(
pdf_data.name,
invoice_number=invoice.invoice_number,
to_address=get_address_from_invoice(invoice),
project_name=domain,
invoice_date=invoice.date_created.date(),
due_date=invoice.date_due,
date_start=invoice.date_start,
date_end=invoice.date_end,
subtotal=invoice.subtotal,
tax_rate=invoice.tax_rate,
applied_tax=getattr(invoice, 'applied_tax', Decimal('0.000')),
applied_credit=getattr(invoice, 'applied_credit', Decimal('0.000')),
total=invoice.get_total(),
is_wire=invoice.is_wire,
is_customer=invoice.is_customer_invoice,
is_prepayment=invoice.is_wire and invoice.is_prepayment,
account_name=account_name
)
if not invoice.is_wire:
if invoice.is_customer_invoice:
line_items = LineItem.objects.filter(customer_invoice=invoice)
else:
line_items = LineItem.objects.filter(subscription_invoice=invoice)
for line_item in line_items:
is_unit = line_item.unit_description is not None
is_quarterly = line_item.invoice.is_customer_invoice and \
line_item.invoice.account.invoicing_plan != InvoicingPlan.MONTHLY
unit_cost = line_item.subtotal
if is_unit:
unit_cost = line_item.unit_cost
if is_quarterly and line_item.base_description is not None:
unit_cost = line_item.product_rate.monthly_fee
description = line_item.base_description or line_item.unit_description
if line_item.quantity > 0:
template.add_item(
description,
line_item.quantity if is_unit or is_quarterly else 1,
unit_cost,
line_item.subtotal,
line_item.applied_credit,
line_item.total
)
if invoice.is_wire and invoice.is_prepayment:
unit_cost = 1
applied_credit = 0
for item in invoice.items:
template.add_item(item['type'],
item['amount'],
unit_cost,
item['amount'],
applied_credit,
item['amount'])
template.get_pdf()
filename = self.get_filename(invoice)
blob_domain = domain or UNKNOWN_DOMAIN
# this is slow and not unit tested
# best to just skip during unit tests for speed
if not settings.UNIT_TESTING:
self.put_attachment(pdf_data, filename, 'application/pdf', domain=blob_domain)
else:
self.put_attachment('', filename, 'application/pdf', domain=blob_domain)
pdf_data.close()
self.invoice_id = str(invoice.id)
self.date_created = datetime.datetime.utcnow()
self.is_wire = invoice.is_wire
self.is_customer = invoice.is_customer_invoice
self.save()
@staticmethod
def get_filename(invoice):
return "statement_%(year)d_%(month)d.pdf" % {
'year': invoice.date_start.year,
'month': invoice.date_start.month,
}
def get_data(self, invoice):
with self.fetch_attachment(self.get_filename(invoice), stream=True) as fh:
return fh.read()
class LineItemManager(models.Manager):
def get_products(self):
return self.get_queryset().filter(feature_rate__exact=None)
def get_features(self):
return self.get_queryset().filter(product_rate__exact=None)
def get_feature_by_type(self, feature_type):
return self.get_queryset().filter(feature_rate__feature__feature_type=feature_type)
class LineItem(models.Model):
subscription_invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT, null=True)
customer_invoice = models.ForeignKey(CustomerInvoice, on_delete=models.PROTECT, null=True)
feature_rate = models.ForeignKey(FeatureRate, on_delete=models.PROTECT, null=True)
product_rate = models.ForeignKey(SoftwareProductRate, on_delete=models.PROTECT, null=True)
base_description = models.TextField(blank=True, null=True)
base_cost = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
unit_description = models.TextField(blank=True, null=True)
unit_cost = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
quantity = models.IntegerField(default=1, validators=integer_field_validators)
last_modified = models.DateTimeField(auto_now=True)
objects = LineItemManager()
class Meta(object):
app_label = 'accounting'
@property
def invoice(self):
if self.subscription_invoice:
return self.subscription_invoice
else:
return self.customer_invoice
@invoice.setter
def invoice(self, invoice):
if invoice.is_customer_invoice:
self.customer_invoice = invoice
else:
self.subscription_invoice = invoice
@property
def subtotal(self):
if self.customer_invoice and self.customer_invoice.account.invoicing_plan != InvoicingPlan.MONTHLY:
return self.base_cost * self.quantity + self.unit_cost * self.quantity
return self.base_cost + self.unit_cost * self.quantity
@property
def applied_credit(self):
"""
The total amount of credit applied specifically to this LineItem.
"""
if self.creditadjustment_set.count() == 0:
return Decimal('0.0000')
return sum([credit.amount for credit in self.creditadjustment_set.all()])
@property
def total(self):
return self.subtotal + self.applied_credit
def calculate_credit_adjustments(self):
"""
This goes through all credit lines that:
- specify the related feature or product rate that generated this line item
"""
current_total = self.total
credit_lines = CreditLine.get_credits_for_line_item(self)
CreditLine.apply_credits_toward_balance(credit_lines, current_total, line_item=self)
class CreditLine(models.Model):
"""
The amount of money in USD that exists can can be applied toward a specific account,
a specific subscription, or specific rates in that subscription.
"""
account = models.ForeignKey(BillingAccount, on_delete=models.PROTECT)
subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT, null=True, blank=True)
is_product = models.BooleanField(default=False)
feature_type = models.CharField(max_length=10, null=True, blank=True,
choices=FeatureType.CHOICES)
date_created = models.DateTimeField(auto_now_add=True)
balance = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
is_active = models.BooleanField(default=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
credit_level = ("Account-Level" if self.subscription is None
else "Subscription-Level")
return ("%(level)s credit [Account %(account_id)d]%(feature)s"
"%(product)s, balance %(balance)s" % {
'level': credit_level,
'account_id': self.account.id,
'feature': (' for Feature %s' % self.feature_type
if self.feature_type is not None else ""),
'product': (' for Product'
if self.is_product else ""),
'balance': self.balance,
})
def save(self, *args, **kwargs):
from corehq.apps.accounting.mixins import (
get_credits_available_for_product_in_account,
get_credits_available_for_product_in_subscription,
)
super(CreditLine, self).save(*args, **kwargs)
if self.account:
get_credits_available_for_product_in_account.clear(self.account)
if self.subscription:
get_credits_available_for_product_in_subscription.clear(self.subscription)
def adjust_credit_balance(self, amount, is_new=False, note=None,
line_item=None, invoice=None, customer_invoice=None,
payment_record=None, related_credit=None,
reason=None, web_user=None):
note = note or ""
if line_item is not None and (invoice is not None or customer_invoice is not None):
raise CreditLineError("You may only have an invoice OR a line item making this adjustment.")
if reason is None:
reason = CreditAdjustmentReason.MANUAL
if payment_record is not None:
reason = CreditAdjustmentReason.DIRECT_PAYMENT
elif related_credit is not None:
reason = CreditAdjustmentReason.TRANSFER
elif invoice is not None:
reason = CreditAdjustmentReason.INVOICE
elif customer_invoice is not None:
reason = CreditAdjustmentReason.INVOICE
elif line_item is not None:
reason = CreditAdjustmentReason.LINE_ITEM
if is_new:
note = "Initialization of credit line. %s" % note
credit_adjustment = CreditAdjustment(
credit_line=self,
note=note,
amount=amount,
reason=reason,
payment_record=payment_record,
line_item=line_item,
invoice=invoice,
customer_invoice=customer_invoice,
related_credit=related_credit,
web_user=web_user,
)
credit_adjustment.save()
self.balance = F('balance') + amount
self.save()
self.refresh_from_db()
@classmethod
def get_credits_for_line_item(cls, line_item):
is_product = line_item.product_rate is not None
feature_type = (
line_item.feature_rate.feature.feature_type
if line_item.feature_rate is not None else None
)
assert is_product or feature_type
assert not (is_product and feature_type)
if line_item.invoice.is_customer_invoice:
return cls.get_credits_for_line_item_in_customer_invoice(line_item, feature_type, is_product)
else:
return cls.get_credits_for_line_item_in_invoice(line_item, feature_type, is_product)
@classmethod
def get_credits_for_line_item_in_invoice(cls, line_item, feature_type, is_product):
if feature_type:
return itertools.chain(
cls.get_credits_by_subscription_and_features(
line_item.invoice.subscription,
feature_type=feature_type,
),
cls.get_credits_for_account(
line_item.invoice.subscription.account,
feature_type=feature_type,
)
)
if is_product:
return itertools.chain(
cls.get_credits_by_subscription_and_features(
line_item.invoice.subscription,
is_product=True,
),
cls.get_credits_for_account(
line_item.invoice.subscription.account,
is_product=True,
)
)
@classmethod
def get_credits_for_line_item_in_customer_invoice(cls, line_item, feature_type, is_product):
if feature_type:
return itertools.chain(
cls.get_credits_for_subscriptions(
subscriptions=line_item.invoice.subscriptions.all(),
feature_type=feature_type
),
cls.get_credits_for_account(
account=line_item.invoice.account,
feature_type=feature_type
)
)
if is_product:
return itertools.chain(
cls.get_credits_for_subscriptions(
subscriptions=line_item.invoice.subscriptions.all(),
is_product=is_product
),
cls.get_credits_for_account(
account=line_item.invoice.account,
is_product=is_product
)
)
@classmethod
def get_credits_for_invoice(cls, invoice):
relevant_credits = [
cls.get_credits_by_subscription_and_features(invoice.subscription),
cls.get_credits_for_account(invoice.subscription.account)
]
if invoice.subscription.next_subscription:
# check for a transfer of subscription credits due to upgrades by
# looking first at the active subscription or the "next" subscription
# if the accounts don't match with the active subscription.
active_sub = Subscription.get_active_subscription_by_domain(
invoice.subscription.subscriber.domain
)
if active_sub.account == invoice.subscription.account:
relevant_credits.append(
cls.get_credits_by_subscription_and_features(active_sub)
)
elif (invoice.subscription.next_subscription.account
== invoice.subscription.account):
relevant_credits.append(
cls.get_credits_by_subscription_and_features(
invoice.subscription.next_subscription
)
)
return itertools.chain(*relevant_credits)
@classmethod
def get_credits_for_customer_invoice(cls, invoice):
return itertools.chain(
cls.get_credits_for_subscriptions(invoice.subscriptions.all()),
cls.get_credits_for_account(invoice.account)
)
@classmethod
def get_credits_for_subscriptions(cls, subscriptions, feature_type=None, is_product=False):
credit_list = cls.objects.none()
for subscription in subscriptions.all():
credit_list = credit_list.union(cls.get_credits_by_subscription_and_features(
subscription,
feature_type=feature_type,
is_product=is_product
))
return credit_list
@classmethod
def get_credits_for_account(cls, account, feature_type=None, is_product=False):
assert not (feature_type and is_product)
return cls.objects.filter(
account=account, subscription__exact=None, is_active=True
).filter(
is_product=is_product, feature_type__exact=feature_type
).all()
@classmethod
def get_credits_by_subscription_and_features(cls, subscription,
feature_type=None,
is_product=False):
assert not (feature_type and is_product)
return cls.objects.filter(
subscription=subscription,
feature_type__exact=feature_type,
is_product=is_product,
is_active=True
).all()
@classmethod
def get_non_general_credits_by_subscription(cls, subscription):
return cls.objects.filter(subscription=subscription, is_active=True).filter(
Q(is_product=True) |
Q(feature_type__in=[f[0] for f in FeatureType.CHOICES])
).all()
@classmethod
def add_credit(cls, amount, account=None, subscription=None,
is_product=False, feature_type=None, payment_record=None,
invoice=None, customer_invoice=None, line_item=None, related_credit=None,
note=None, reason=None, web_user=None, permit_inactive=False):
if account is None and subscription is None:
raise CreditLineError(
"You must specify either a subscription "
"or account to add this credit to."
)
if feature_type is not None and is_product:
raise CreditLineError(
"Can only add credit for a product OR a feature, but not both."
)
account = account or subscription.account
try:
credit_line = cls.objects.get(
account__exact=account,
subscription__exact=subscription,
is_product=is_product,
feature_type__exact=feature_type,
is_active=True
)
if not permit_inactive and not credit_line.is_active and not invoice:
raise CreditLineError(
"Could not add credit to CreditLine %s because it is "
"inactive." % str(credit_line)
)
is_new = False
except cls.MultipleObjectsReturned as e:
raise CreditLineError(
"Could not find a unique credit line for %(account)s"
"%(subscription)s%(feature)s%(product)s. %(error)s"
"instead." % {
'account': "Account ID %d" % account.id,
'subscription': (" | Subscription ID %d" % subscription.id
if subscription is not None else ""),
'feature': (" | Feature %s" % feature_type
if feature_type is not None else ""),
'product': (" | Product" if is_product else ""),
'error': str(e),
}
)
except cls.DoesNotExist:
credit_line = cls.objects.create(
account=account,
subscription=subscription,
is_product=is_product,
feature_type=feature_type,
)
is_new = True
credit_line.adjust_credit_balance(amount, is_new=is_new, note=note,
payment_record=payment_record,
invoice=invoice, customer_invoice=customer_invoice, line_item=line_item,
related_credit=related_credit,
reason=reason, web_user=web_user)
return credit_line
@classmethod
def apply_credits_toward_balance(cls, credit_lines, balance, **kwargs):
for credit_line in credit_lines:
if balance == Decimal('0.0000'):
return
if balance <= Decimal('0.0000'):
raise CreditLineError(
"A balance went below zero dollars when applying credits "
"to credit line %d." % credit_line.pk
)
adjustment_amount = min(credit_line.balance, balance)
if adjustment_amount > Decimal('0.0000'):
credit_line.adjust_credit_balance(-adjustment_amount, **kwargs)
balance -= adjustment_amount
@classmethod
def make_payment_towards_invoice(cls, invoice, payment_record):
""" Make a payment for a billing account towards an invoice """
if invoice.is_customer_invoice:
billing_account = invoice.account
else:
billing_account = invoice.subscription.account
cls.add_credit(
payment_record.amount,
account=billing_account,
payment_record=payment_record,
)
cls.add_credit(
-payment_record.amount,
account=billing_account,
invoice=invoice,
)
class PaymentMethod(models.Model):
"""A link to a particular payment method for an account.
Right now the only payment methods are via Stripe, but leaving that
open for future changes.
:customer_id: is used by the API of the payment method we're using that
uniquely identifies the payer on their end.
"""
web_user = models.CharField(max_length=80, db_index=True)
method_type = models.CharField(max_length=50,
default=PaymentMethodType.STRIPE,
choices=PaymentMethodType.CHOICES,
db_index=True)
customer_id = models.CharField(max_length=255, null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
unique_together = ('web_user', 'method_type')
class StripePaymentMethod(PaymentMethod):
""" Do stuff with Stripe """
class Meta(object):
proxy = True
app_label = 'accounting'
STRIPE_GENERIC_ERROR = (stripe.error.AuthenticationError,
stripe.error.InvalidRequestError,
stripe.error.APIConnectionError,
stripe.error.StripeError,)
@property
def customer(self):
return self._get_or_create_stripe_customer()
def _get_or_create_stripe_customer(self):
customer = None
if self.customer_id is not None:
try:
customer = self._get_stripe_customer()
except stripe.InvalidRequestError:
pass
if customer is None:
customer = self._create_stripe_customer()
return customer
def _create_stripe_customer(self):
customer = stripe.Customer.create(
description="{}'s cards".format(self.web_user),
email=self.web_user,
)
self.customer_id = customer.id
self.save()
return customer
def _get_stripe_customer(self):
return stripe.Customer.retrieve(self.customer_id)
@property
def all_cards(self):
try:
return [card for card in self.customer.cards.data if card is not None]
except stripe.error.AuthenticationError:
if not settings.STRIPE_PRIVATE_KEY:
log_accounting_info("Private key is not defined in settings")
return []
else:
raise
def all_cards_serialized(self, billing_account):
return [{
'brand': card.brand,
'last4': card.last4,
'exp_month': card.exp_month,
'exp_year': card.exp_year,
'token': card.id,
'is_autopay': self._is_autopay(card, billing_account),
} for card in self.all_cards]
def get_card(self, card_token):
return self.customer.cards.retrieve(card_token)
def get_autopay_card(self, billing_account):
return next((
card for card in self.all_cards
if self._is_autopay(card, billing_account)
), None)
def remove_card(self, card_token):
card = self.get_card(card_token)
self._remove_card_from_all_accounts(card)
card.delete()
def _remove_card_from_all_accounts(self, card):
accounts = BillingAccount.objects.filter(auto_pay_user=self.web_user)
for account in accounts:
if account.autopay_card == card:
account.remove_autopay_user()
def create_card(self, stripe_token, billing_account, domain, autopay=False):
customer = self.customer
card = customer.cards.create(card=stripe_token)
self.set_default_card(card)
if autopay:
self.set_autopay(card, billing_account, domain)
return card
def set_default_card(self, card):
self.customer.default_card = card
self.customer.save()
return card
def set_autopay(self, card, billing_account, domain):
"""
Sets the auto_pay status on the card for a billing account
If there are other cards that auto_pay for that billing account, remove them
"""
if billing_account.auto_pay_enabled:
self._remove_other_auto_pay_cards(billing_account)
self._update_autopay_status(card, billing_account, autopay=True)
billing_account.update_autopay_user(self.web_user, domain)
def unset_autopay(self, card, billing_account):
"""
Unsets the auto_pay status for this card, and removes it from the billing account
"""
if self._is_autopay(card, billing_account):
self._update_autopay_status(card, billing_account, autopay=False)
billing_account.remove_autopay_user()
def _update_autopay_status(self, card, billing_account, autopay):
metadata = card.metadata.copy()
metadata.update({self._auto_pay_card_metadata_key(billing_account): autopay})
card.metadata = metadata
card.save()
def _remove_autopay_card(self, billing_account):
autopay_card = self.get_autopay_card(billing_account)
if autopay_card is not None:
self._update_autopay_status(autopay_card, billing_account, autopay=False)
@staticmethod
def _remove_other_auto_pay_cards(billing_account):
user = billing_account.auto_pay_user
try:
other_payment_method = StripePaymentMethod.objects.get(web_user=user)
other_payment_method._remove_autopay_card(billing_account)
except StripePaymentMethod.DoesNotExist:
pass
@staticmethod
def _is_autopay(card, billing_account):
return card.metadata.get(StripePaymentMethod._auto_pay_card_metadata_key(billing_account)) == 'True'
@staticmethod
def _auto_pay_card_metadata_key(billing_account):
"""
Returns the autopay key for the billing account
Cards can be used to autopay for multiple billing accounts. This is stored in the `metadata` property
on the card: {metadata: {auto_pay_{billing_account_id_1}: True, auto_pay_{billing_account_id_2}: False}}
"""
return 'auto_pay_{billing_account_id}'.format(billing_account_id=billing_account.id)
def create_charge(self, card, amount_in_dollars, description):
""" Charges a stripe card and returns a transaction id """
amount_in_cents = int((amount_in_dollars * Decimal('100')).quantize(Decimal(10)))
transaction_record = stripe.Charge.create(
card=card,
customer=self.customer,
amount=amount_in_cents,
currency=settings.DEFAULT_CURRENCY,
description=description,
)
return transaction_record.id
class PaymentRecord(models.Model):
"""Records the transaction with external payment APIs.
"""
payment_method = models.ForeignKey(PaymentMethod, on_delete=models.PROTECT,
db_index=True)
date_created = models.DateTimeField(auto_now_add=True)
transaction_id = models.CharField(max_length=255, unique=True)
amount = models.DecimalField(default=Decimal('0.0000'),
max_digits=10, decimal_places=4)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
@property
def public_transaction_id(self):
ops_num = settings.INVOICE_STARTING_NUMBER + self.id
return "%sP-%d" % (settings.INVOICE_PREFIX, ops_num)
@classmethod
def create_record(cls, payment_method, transaction_id, amount):
return cls.objects.create(
payment_method=payment_method,
transaction_id=transaction_id,
amount=amount,
)
class CreditAdjustment(ValidateModelMixin, models.Model):
"""
A record of any additions (positive amounts) or deductions (negative amounts) that contributed to the
current balance of the associated CreditLine.
"""
credit_line = models.ForeignKey(CreditLine, on_delete=models.PROTECT)
reason = models.CharField(max_length=25, default=CreditAdjustmentReason.MANUAL,
choices=CreditAdjustmentReason.CHOICES)
note = models.TextField(blank=True)
amount = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
line_item = models.ForeignKey(LineItem, on_delete=models.PROTECT, null=True, blank=True)
invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT, null=True, blank=True)
customer_invoice = models.ForeignKey(CustomerInvoice, on_delete=models.PROTECT, null=True, blank=True)
payment_record = models.ForeignKey(PaymentRecord,
on_delete=models.PROTECT, null=True, blank=True)
related_credit = models.ForeignKey(CreditLine, on_delete=models.PROTECT,
null=True, blank=True, related_name='creditadjustment_related')
date_created = models.DateTimeField(auto_now_add=True)
web_user = models.CharField(max_length=80, null=True, blank=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def clean(self):
"""
Only one of either a line item or invoice may be specified as the adjuster.
"""
if self.line_item and self.invoice:
raise ValidationError(_("You can't specify both an invoice and a line item."))
class DomainUserHistory(models.Model):
"""
A record of the number of users in a domain at the record_date.
Created by task calculate_users_and_sms_in_all_domains on the first of every month.
Used to bill clients for the appropriate number of users
"""
domain = models.CharField(max_length=256)
record_date = models.DateField()
num_users = models.IntegerField(default=0)
class Meta:
unique_together = ('domain', 'record_date')
| 38.552843 | 115 | 0.641621 | import datetime
import itertools
from decimal import Decimal
from io import BytesIO
from tempfile import NamedTemporaryFile
from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models, transaction
from django.db.models import F, Q
from django.db.models.manager import Manager
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.utils.translation import ugettext_lazy as _
import jsonfield
import stripe
from django_prbac.models import Role
from memoized import memoized
from corehq.apps.domain.shortcuts import publish_domain_saved
from dimagi.ext.couchdbkit import (
BooleanProperty,
DateTimeProperty,
SafeSaveDocument,
StringProperty,
)
from dimagi.utils.web import get_site_domain
from corehq.apps.accounting.emails import send_subscription_change_alert
from corehq.apps.accounting.exceptions import (
AccountingError,
CreditLineError,
InvoiceEmailThrottledError,
NewSubscriptionError,
ProductPlanNotFoundError,
SubscriptionAdjustmentError,
SubscriptionChangeError,
SubscriptionReminderError,
SubscriptionRenewalError,
)
from corehq.apps.accounting.invoice_pdf import InvoiceTemplate
from corehq.apps.accounting.signals import subscription_upgrade_or_downgrade
from corehq.apps.accounting.subscription_changes import (
DomainDowngradeActionHandler,
DomainUpgradeActionHandler,
)
from corehq.apps.accounting.utils import (
EXCHANGE_RATE_DECIMAL_PLACES,
ensure_domain_instance,
fmt_dollar_amount,
get_account_name_from_default_name,
get_address_from_invoice,
get_change_status,
get_dimagi_from_email,
get_privileges,
is_active_subscription,
log_accounting_error,
log_accounting_info,
quantize_accounting_decimal,
)
from corehq.apps.domain import UNKNOWN_DOMAIN
from corehq.apps.domain.models import Domain
from corehq.apps.hqwebapp.tasks import send_html_email_async
from corehq.apps.users.models import WebUser
from corehq.blobs.mixin import CODES, BlobMixin
from corehq.const import USER_DATE_FORMAT
from corehq.privileges import REPORT_BUILDER_ADD_ON_PRIVS
from corehq.util.dates import get_first_last_days
from corehq.util.mixin import ValidateModelMixin
from corehq.util.quickcache import quickcache
from corehq.util.soft_assert import soft_assert
from corehq.util.view_utils import absolute_reverse
integer_field_validators = [MaxValueValidator(2147483647), MinValueValidator(-2147483648)]
MAX_INVOICE_COMMUNICATIONS = 5
SMALL_INVOICE_THRESHOLD = 100
UNLIMITED_FEATURE_USAGE = -1
MINIMUM_SUBSCRIPTION_LENGTH = 30
_soft_assert_contact_emails_missing = soft_assert(
to=['{}@{}'.format(email, 'dimagi.com') for email in [
'accounts',
'billing-dev',
]],
exponential_backoff=False,
)
class BillingAccountType(object):
CONTRACT = "CONTRACT"
USER_CREATED = "USER_CREATED"
GLOBAL_SERVICES = "GLOBAL_SERVICES"
INVOICE_GENERATED = "INVOICE_GENERATED"
TRIAL = "TRIAL"
CHOICES = (
(CONTRACT, "Created by contract"),
(USER_CREATED, "Created by user"),
(GLOBAL_SERVICES, "Created by Global Services"),
(INVOICE_GENERATED, "Generated by an invoice"),
(TRIAL, "Is trial account"),
)
class InvoicingPlan(object):
MONTHLY = "MONTHLY"
QUARTERLY = "QUARTERLY"
YEARLY = "YEARLY"
CHOICES = (
(MONTHLY, "Monthly"),
(QUARTERLY, "Quarterly"),
(YEARLY, "Yearly")
)
class FeatureType(object):
USER = "User"
SMS = "SMS"
CHOICES = (
(USER, USER),
(SMS, SMS),
)
class SoftwarePlanEdition(object):
COMMUNITY = "Community"
STANDARD = "Standard"
PRO = "Pro"
ADVANCED = "Advanced"
ENTERPRISE = "Enterprise"
RESELLER = "Reseller"
MANAGED_HOSTING = "Managed Hosting"
PAUSED = "Paused"
CHOICES = (
(COMMUNITY, COMMUNITY),
(STANDARD, STANDARD),
(PRO, PRO),
(ADVANCED, ADVANCED),
(ENTERPRISE, ENTERPRISE),
(PAUSED, PAUSED),
(RESELLER, RESELLER),
(MANAGED_HOSTING, MANAGED_HOSTING),
)
SELF_SERVICE_ORDER = [
PAUSED,
COMMUNITY,
STANDARD,
PRO,
ADVANCED,
]
class SoftwarePlanVisibility(object):
PUBLIC = "PUBLIC"
INTERNAL = "INTERNAL"
TRIAL = "TRIAL"
CHOICES = (
(PUBLIC, "Anyone can subscribe"),
(INTERNAL, "Dimagi must create subscription"),
(TRIAL, "This is a Trial Plan"),
)
class CreditAdjustmentReason(object):
DIRECT_PAYMENT = "DIRECT_PAYMENT"
SALESFORCE = "SALESFORCE"
INVOICE = "INVOICE"
LINE_ITEM = "LINE_ITEM"
TRANSFER = "TRANSFER"
MANUAL = "MANUAL"
CHOICES = (
(MANUAL, "manual"),
(SALESFORCE, "via Salesforce"),
(INVOICE, "invoice generated"),
(LINE_ITEM, "line item generated"),
(TRANSFER, "transfer from another credit line"),
(DIRECT_PAYMENT, "payment from client received"),
)
class SubscriptionAdjustmentReason(object):
CREATE = "CREATE"
MODIFY = "MODIFY"
CANCEL = "CANCEL"
UPGRADE = "UPGRADE"
DOWNGRADE = "DOWNGRADE"
SWITCH = "SWITCH"
REACTIVATE = "REACTIVATE"
RENEW = "RENEW"
CHOICES = (
(CREATE, "A new subscription created from scratch."),
(MODIFY, "Some part of the subscription was modified...likely a date."),
(CANCEL, "The subscription was cancelled with no followup subscription."),
(UPGRADE, "The subscription was upgraded to the related subscription."),
(DOWNGRADE, "The subscription was downgraded to the related subscription."),
(SWITCH, "The plan was changed to the related subscription and "
"was neither an upgrade or downgrade."),
(REACTIVATE, "The subscription was reactivated."),
(RENEW, "The subscription was renewed."),
)
class SubscriptionAdjustmentMethod(object):
USER = "USER"
INTERNAL = "INTERNAL"
TASK = "TASK"
TRIAL = "TRIAL"
AUTOMATIC_DOWNGRADE = 'AUTOMATIC_DOWNGRADE'
DEFAULT_COMMUNITY = 'DEFAULT_COMMUNITY'
INVOICING = 'INVOICING'
CHOICES = (
(USER, "User"),
(INTERNAL, "Ops"),
(TASK, "[Deprecated] Task (Invoicing)"),
(TRIAL, "30 Day Trial"),
(AUTOMATIC_DOWNGRADE, "Automatic Downgrade"),
(DEFAULT_COMMUNITY, 'Default to Community'),
(INVOICING, 'Invoicing')
)
class PaymentMethodType(object):
STRIPE = "Stripe"
CHOICES = (
(STRIPE, STRIPE),
)
class SubscriptionType(object):
IMPLEMENTATION = "IMPLEMENTATION"
PRODUCT = "PRODUCT"
TRIAL = "TRIAL"
EXTENDED_TRIAL = "EXTENDED_TRIAL"
SANDBOX = "SANDBOX"
INTERNAL = "INTERNAL"
NOT_SET = "NOT_SET"
CHOICES = (
(IMPLEMENTATION, "Implementation"),
(PRODUCT, "Product"),
(TRIAL, "Trial"),
(EXTENDED_TRIAL, "Extended Trial"),
(SANDBOX, "Sandbox"),
(INTERNAL, "Internal"),
)
class ProBonoStatus(object):
YES = "PRO_BONO"
NO = "FULL_PRICE"
DISCOUNTED = "DISCOUNTED"
CHOICES = (
(NO, "Full Price"),
(DISCOUNTED, "Discounted"),
(YES, "Pro Bono"),
)
class FundingSource(object):
DIMAGI = "DIMAGI"
CLIENT = "CLIENT"
EXTERNAL = "EXTERNAL"
CHOICES = (
(DIMAGI, "Dimagi"),
(CLIENT, "Client Funding"),
(EXTERNAL, "External Funding"),
)
class EntryPoint(object):
CONTRACTED = "CONTRACTED"
SELF_STARTED = "SELF_STARTED"
NOT_SET = "NOT_SET"
CHOICES = (
(CONTRACTED, "Contracted"),
(SELF_STARTED, "Self-started"),
(NOT_SET, "Not Set"),
)
class LastPayment(object):
CC_ONE_TIME = "CC_ONE_TIME"
CC_AUTO = "CC_AUTO"
WIRE = "WIRE"
ACH = "ACH"
OTHER = "OTHER"
BU_PAYMENT = "BU_PAYMENT"
NONE = "NONE"
CHOICES = (
(CC_ONE_TIME, "Credit Card - One Time"),
(CC_AUTO, "Credit Card - Autopay"),
(WIRE, "Wire"),
(ACH, "ACH"),
(OTHER, "Other"),
(BU_PAYMENT, "Payment to local BU"),
(NONE, "None"),
)
class PreOrPostPay(object):
PREPAY = "PREPAY"
POSTPAY = "POSTPAY"
NOT_SET = "NOT_SET"
CHOICES = (
(PREPAY, "Prepay"),
(POSTPAY, "Postpay"),
(NOT_SET, "Not Set"),
)
class Currency(models.Model):
code = models.CharField(max_length=3, unique=True)
name = models.CharField(max_length=25, db_index=True)
symbol = models.CharField(max_length=10)
rate_to_default = models.DecimalField(
default=Decimal('1.0'), max_digits=20,
decimal_places=EXCHANGE_RATE_DECIMAL_PLACES,
)
date_updated = models.DateField(auto_now=True)
class Meta(object):
app_label = 'accounting'
@classmethod
def get_default(cls):
default, _ = cls.objects.get_or_create(code=settings.DEFAULT_CURRENCY)
return default
DEFAULT_ACCOUNT_FORMAT = 'Account for Project %s'
class BillingAccount(ValidateModelMixin, models.Model):
name = models.CharField(max_length=200, db_index=True, unique=True)
salesforce_account_id = models.CharField(
db_index=True,
max_length=80,
blank=True,
null=True,
help_text="This is how we link to the salesforce account",
)
created_by = models.CharField(max_length=80, blank=True)
created_by_domain = models.CharField(max_length=256, null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True)
dimagi_contact = models.EmailField(blank=True)
currency = models.ForeignKey(Currency, on_delete=models.PROTECT)
is_auto_invoiceable = models.BooleanField(default=False)
date_confirmed_extra_charges = models.DateTimeField(null=True, blank=True)
account_type = models.CharField(
max_length=25,
default=BillingAccountType.CONTRACT,
choices=BillingAccountType.CHOICES,
)
is_active = models.BooleanField(default=True)
is_customer_billing_account = models.BooleanField(default=False, db_index=True)
enterprise_admin_emails = ArrayField(models.EmailField(), default=list, blank=True)
enterprise_restricted_signup_domains = ArrayField(models.CharField(max_length=128), default=list, blank=True)
invoicing_plan = models.CharField(
max_length=25,
default=InvoicingPlan.MONTHLY,
choices=InvoicingPlan.CHOICES
)
entry_point = models.CharField(
max_length=25,
default=EntryPoint.NOT_SET,
choices=EntryPoint.CHOICES,
)
auto_pay_user = models.CharField(max_length=80, null=True, blank=True)
last_modified = models.DateTimeField(auto_now=True)
last_payment_method = models.CharField(
max_length=25,
default=LastPayment.NONE,
choices=LastPayment.CHOICES,
)
pre_or_post_pay = models.CharField(
max_length=25,
default=PreOrPostPay.NOT_SET,
choices=PreOrPostPay.CHOICES,
)
restrict_domain_creation = models.BooleanField(default=False)
restrict_signup = models.BooleanField(default=False, db_index=True)
restrict_signup_message = models.CharField(max_length=512, null=True, blank=True)
class Meta(object):
app_label = 'accounting'
@property
def auto_pay_enabled(self):
return self.auto_pay_user is not None
@classmethod
def create_account_for_domain(cls, domain,
created_by=None, account_type=None,
entry_point=None, last_payment_method=None,
pre_or_post_pay=None):
account_type = account_type or BillingAccountType.INVOICE_GENERATED
entry_point = entry_point or EntryPoint.NOT_SET
last_payment_method = last_payment_method or LastPayment.NONE
pre_or_post_pay = pre_or_post_pay or PreOrPostPay.POSTPAY
default_name = DEFAULT_ACCOUNT_FORMAT % domain
name = get_account_name_from_default_name(default_name)
return BillingAccount.objects.create(
name=name,
created_by=created_by,
created_by_domain=domain,
currency=Currency.get_default(),
account_type=account_type,
entry_point=entry_point,
last_payment_method=last_payment_method,
pre_or_post_pay=pre_or_post_pay
)
@classmethod
def get_or_create_account_by_domain(cls, domain,
created_by=None, account_type=None,
entry_point=None, last_payment_method=None,
pre_or_post_pay=None):
account = cls.get_account_by_domain(domain)
if account:
return account, False
return cls.create_account_for_domain(
domain,
created_by=created_by,
account_type=account_type,
entry_point=entry_point,
last_payment_method=last_payment_method,
pre_or_post_pay=pre_or_post_pay,
), True
@classmethod
def get_account_by_domain(cls, domain):
current_subscription = Subscription.get_active_subscription_by_domain(domain)
if current_subscription is not None:
return current_subscription.account
else:
return cls._get_account_by_created_by_domain(domain)
@classmethod
def _get_account_by_created_by_domain(cls, domain):
try:
return cls.objects.get(created_by_domain=domain)
except cls.DoesNotExist:
return None
except cls.MultipleObjectsReturned:
log_accounting_error(
f"Multiple billing accounts showed up for the domain '{domain}'. The "
"latest one was served, but you should reconcile very soon.",
show_stack_trace=True,
)
return cls.objects.filter(created_by_domain=domain).latest('date_created')
return None
@classmethod
@quickcache([], timeout=60 * 60)
def get_enterprise_restricted_signup_accounts(cls):
return BillingAccount.objects.filter(is_customer_billing_account=True, restrict_signup=True)
@property
def autopay_card(self):
if not self.auto_pay_enabled:
return None
return StripePaymentMethod.objects.get(web_user=self.auto_pay_user).get_autopay_card(self)
def has_enterprise_admin(self, email):
return self.is_customer_billing_account and email in self.enterprise_admin_emails
def update_autopay_user(self, new_user, domain):
if self.auto_pay_enabled and new_user != self.auto_pay_user:
self._send_autopay_card_removed_email(new_user=new_user, domain=domain)
self.auto_pay_user = new_user
self.save()
self._send_autopay_card_added_email(domain)
def remove_autopay_user(self):
self.auto_pay_user = None
self.save()
def _send_autopay_card_removed_email(self, new_user, domain):
from corehq.apps.domain.views.accounting import EditExistingBillingAccountView
old_user = self.auto_pay_user
subject = _("Your card is no longer being used to auto-pay for {billing_account}").format(
billing_account=self.name)
old_web_user = WebUser.get_by_username(old_user)
if old_web_user:
old_user_name = old_web_user.first_name
else:
old_user_name = old_user
context = {
'new_user': new_user,
'old_user_name': old_user_name,
'billing_account_name': self.name,
'billing_info_url': absolute_reverse(EditExistingBillingAccountView.urlname,
args=[domain]),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
}
send_html_email_async(
subject,
old_user,
render_to_string('accounting/email/autopay_card_removed.html', context),
text_content=strip_tags(render_to_string('accounting/email/autopay_card_removed.html', context)),
)
def _send_autopay_card_added_email(self, domain):
from corehq.apps.domain.views.accounting import EditExistingBillingAccountView
subject = _("Your card is being used to auto-pay for {billing_account}").format(
billing_account=self.name)
web_user = WebUser.get_by_username(self.auto_pay_user)
new_user_name = web_user.first_name if web_user else self.auto_pay_user
try:
last_4 = self.autopay_card.last4
except StripePaymentMethod.DoesNotExist:
last_4 = None
context = {
'name': new_user_name,
'email': self.auto_pay_user,
'domain': domain,
'last_4': last_4,
'billing_account_name': self.name,
'billing_info_url': absolute_reverse(EditExistingBillingAccountView.urlname,
args=[domain]),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
}
send_html_email_async(
subject,
self.auto_pay_user,
render_to_string('accounting/email/invoice_autopay_setup.html', context),
text_content=strip_tags(render_to_string('accounting/email/invoice_autopay_setup.html', context)),
)
class BillingContactInfo(models.Model):
account = models.OneToOneField(BillingAccount, primary_key=True, null=False, on_delete=models.CASCADE)
first_name = models.CharField(
max_length=50, null=True, blank=True, verbose_name=_("First Name")
)
last_name = models.CharField(
max_length=50, null=True, blank=True, verbose_name=_("Last Name")
)
email_list = jsonfield.JSONField(
default=list,
verbose_name=_("Contact Emails"),
help_text=_("We will email communications regarding your account "
"to the emails specified here.")
)
phone_number = models.CharField(
max_length=20, null=True, blank=True, verbose_name=_("Phone Number")
)
company_name = models.CharField(
max_length=50, null=True, blank=True,
verbose_name=_("Company / Organization")
)
first_line = models.CharField(
max_length=50, null=False,
verbose_name=_("Address First Line")
)
second_line = models.CharField(
max_length=50, null=True, blank=True,
verbose_name=_("Address Second Line")
)
city = models.CharField(
max_length=50, null=False, verbose_name=_("City")
)
state_province_region = models.CharField(
max_length=50, null=False,
verbose_name=_("State / Province / Region"),
)
postal_code = models.CharField(
max_length=20, null=False, verbose_name=_("Postal Code")
)
country = models.CharField(
max_length=50, null=False, verbose_name=_("Country")
)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __init__(self, *args, **kwargs):
super(BillingContactInfo, self).__init__(*args, **kwargs)
if self.email_list == '[]':
self.email_list = []
@property
def full_name(self):
if not self.first_name:
return self.last_name
elif not self.last_name:
return self.first_name
else:
return "%s %s" % (self.first_name, self.last_name)
class SoftwareProductRate(models.Model):
name = models.CharField(max_length=40)
monthly_fee = models.DecimalField(default=Decimal('0.00'), max_digits=10, decimal_places=2)
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return '%s @ $%s /month' % (self.name, self.monthly_fee)
def __eq__(self, other):
if not isinstance(other, self.__class__) or not self.name == other.name:
return False
for field in ['monthly_fee', 'is_active']:
if not getattr(self, field) == getattr(other, field):
return False
return True
@classmethod
def new_rate(cls, product_name, monthly_fee, save=True):
rate = SoftwareProductRate(name=product_name, monthly_fee=monthly_fee)
if save:
rate.save()
return rate
class Feature(models.Model):
name = models.CharField(max_length=40, unique=True)
feature_type = models.CharField(max_length=10, db_index=True, choices=FeatureType.CHOICES)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return "Feature '%s' of type '%s'" % (self.name, self.feature_type)
def get_rate(self, default_instance=True):
try:
return self.featurerate_set.filter(is_active=True).latest('date_created')
except FeatureRate.DoesNotExist:
return FeatureRate() if default_instance else None
class FeatureRate(models.Model):
feature = models.ForeignKey(Feature, on_delete=models.PROTECT)
monthly_fee = models.DecimalField(default=Decimal('0.00'), max_digits=10, decimal_places=2,
verbose_name="Monthly Fee")
monthly_limit = models.IntegerField(default=0,
verbose_name="Monthly Included Limit",
validators=integer_field_validators)
per_excess_fee = models.DecimalField(default=Decimal('0.00'), max_digits=10, decimal_places=2,
verbose_name="Fee Per Excess of Limit")
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return '%s @ $%s /month, $%s /excess, limit: %d' % (
self.feature.name, self.monthly_fee, self.per_excess_fee, self.monthly_limit
)
def __eq__(self, other):
if not isinstance(other, self.__class__) or not self.feature.pk == other.feature.pk:
return False
for field in ['monthly_fee', 'monthly_limit', 'per_excess_fee', 'is_active']:
if not getattr(self, field) == getattr(other, field):
return False
return True
@classmethod
def new_rate(cls, feature_name, feature_type,
monthly_fee=None, monthly_limit=None, per_excess_fee=None, save=True):
feature, _ = Feature.objects.get_or_create(name=feature_name, feature_type=feature_type)
rate = FeatureRate(feature=feature)
if monthly_fee is not None:
rate.monthly_fee = monthly_fee
if monthly_limit is not None:
rate.monthly_limit = monthly_limit
if per_excess_fee is not None:
rate.per_excess_fee = per_excess_fee
if save:
rate.save()
return rate
class SoftwarePlan(models.Model):
name = models.CharField(max_length=80, unique=True)
description = models.TextField(blank=True,
help_text="If the visibility is INTERNAL, this description field will be used.")
edition = models.CharField(
max_length=25,
default=SoftwarePlanEdition.ENTERPRISE,
choices=SoftwarePlanEdition.CHOICES,
)
visibility = models.CharField(
max_length=10,
default=SoftwarePlanVisibility.INTERNAL,
choices=SoftwarePlanVisibility.CHOICES,
)
last_modified = models.DateTimeField(auto_now=True)
is_customer_software_plan = models.BooleanField(default=False)
max_domains = models.IntegerField(blank=True, null=True)
is_annual_plan = models.BooleanField(default=False)
class Meta(object):
app_label = 'accounting'
@quickcache(vary_on=['self.pk'], timeout=10)
def get_version(self):
try:
return self.softwareplanversion_set.filter(is_active=True).latest('date_created')
except SoftwarePlanVersion.DoesNotExist:
return None
def at_max_domains(self):
if not self.max_domains:
return False
subscription_count = 0
for version in self.softwareplanversion_set.all():
subscription_count += Subscription.visible_objects.filter(plan_version=version, is_active=True).count()
return subscription_count >= self.max_domains
class DefaultProductPlan(models.Model):
edition = models.CharField(
default=SoftwarePlanEdition.COMMUNITY,
choices=SoftwarePlanEdition.CHOICES,
max_length=25,
)
plan = models.ForeignKey(SoftwarePlan, on_delete=models.PROTECT)
is_trial = models.BooleanField(default=False)
is_report_builder_enabled = models.BooleanField(default=False)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
unique_together = ('edition', 'is_trial', 'is_report_builder_enabled')
@classmethod
@quickcache(['edition', 'is_trial', 'is_report_builder_enabled'],
skip_arg=lambda *args, **kwargs: not settings.ENTERPRISE_MODE or settings.UNIT_TESTING)
def get_default_plan_version(cls, edition=None, is_trial=False,
is_report_builder_enabled=False):
if not edition:
edition = (SoftwarePlanEdition.ENTERPRISE if settings.ENTERPRISE_MODE
else SoftwarePlanEdition.COMMUNITY)
try:
default_product_plan = DefaultProductPlan.objects.select_related('plan').get(
edition=edition, is_trial=is_trial,
is_report_builder_enabled=is_report_builder_enabled
)
return default_product_plan.plan.get_version()
except DefaultProductPlan.DoesNotExist:
raise AccountingError(
"No default product plan was set up, did you forget to run migrations?"
)
@classmethod
def get_lowest_edition(cls, requested_privileges, return_plan=False):
for edition in SoftwarePlanEdition.SELF_SERVICE_ORDER:
plan_version = cls.get_default_plan_version(edition)
privileges = get_privileges(plan_version) - REPORT_BUILDER_ADD_ON_PRIVS
if privileges.issuperset(requested_privileges):
return (plan_version if return_plan
else plan_version.plan.edition)
return None if return_plan else SoftwarePlanEdition.ENTERPRISE
class SoftwarePlanVersion(models.Model):
plan = models.ForeignKey(SoftwarePlan, on_delete=models.PROTECT)
product_rate = models.ForeignKey(SoftwareProductRate, on_delete=models.CASCADE)
feature_rates = models.ManyToManyField(FeatureRate, blank=True)
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
role = models.ForeignKey(Role, on_delete=models.CASCADE)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
return "%(plan_name)s (v%(version_num)d)" % {
'plan_name': self.plan.name,
'version_num': self.version,
}
def save(self, *args, **kwargs):
super(SoftwarePlanVersion, self).save(*args, **kwargs)
SoftwarePlan.get_version.clear(self.plan)
@property
def version(self):
return (self.plan.softwareplanversion_set.count() -
self.plan.softwareplanversion_set.filter(
date_created__gt=self.date_created).count())
@property
def user_facing_description(self):
from corehq.apps.accounting.user_text import DESC_BY_EDITION, FEATURE_TYPE_TO_NAME
def _default_description(plan, monthly_limit):
if plan.edition in [
SoftwarePlanEdition.COMMUNITY,
SoftwarePlanEdition.STANDARD,
SoftwarePlanEdition.PRO,
SoftwarePlanEdition.ADVANCED,
]:
return DESC_BY_EDITION[plan.edition]['description'].format(monthly_limit)
else:
return DESC_BY_EDITION[plan.edition]['description']
desc = {
'name': self.plan.name,
}
if (
self.plan.visibility == SoftwarePlanVisibility.PUBLIC
or self.plan.visibility == SoftwarePlanVisibility.TRIAL
) or not self.plan.description:
desc['description'] = _default_description(self.plan, self.user_feature.monthly_limit)
else:
desc['description'] = self.plan.description
desc.update({
'monthly_fee': 'USD %s' % self.product_rate.monthly_fee,
'rates': [{'name': FEATURE_TYPE_TO_NAME[r.feature.feature_type],
'included': 'Infinite' if r.monthly_limit == UNLIMITED_FEATURE_USAGE else r.monthly_limit}
for r in self.feature_rates.all()],
'edition': self.plan.edition,
})
return desc
@property
@memoized
def user_feature(self):
user_features = self.feature_rates.filter(feature__feature_type=FeatureType.USER)
try:
user_feature = user_features.order_by('monthly_limit')[0]
if not user_feature.monthly_limit == UNLIMITED_FEATURE_USAGE:
user_feature = user_features.order_by('-monthly_limit')[0]
return user_feature
except IndexError:
pass
@property
def user_limit(self):
if self.user_feature is not None:
return self.user_feature.monthly_limit
return UNLIMITED_FEATURE_USAGE
@property
def user_fee(self):
if self.user_feature is not None:
return "USD %d" % self.user_feature.per_excess_fee
def feature_charges_exist_for_domain(self, domain, start_date=None, end_date=None):
domain_obj = ensure_domain_instance(domain)
if domain_obj is None:
return False
from corehq.apps.accounting.usage import FeatureUsageCalculator
for feature_rate in self.feature_rates.all():
if feature_rate.monthly_limit != UNLIMITED_FEATURE_USAGE:
calc = FeatureUsageCalculator(
feature_rate, domain_obj.name, start_date=start_date,
end_date=end_date
)
if calc.get_usage() > feature_rate.monthly_limit:
return True
return False
@property
def is_paused(self):
return self.plan.edition == SoftwarePlanEdition.PAUSED
class SubscriberManager(models.Manager):
def safe_get(self, *args, **kwargs):
try:
return self.get(*args, **kwargs)
except Subscriber.DoesNotExist:
return None
class Subscriber(models.Model):
domain = models.CharField(max_length=256, unique=True, db_index=True)
last_modified = models.DateTimeField(auto_now=True)
objects = SubscriberManager()
class Meta(object):
app_label = 'accounting'
def __str__(self):
return "DOMAIN %s" % self.domain
def create_subscription(self, new_plan_version, new_subscription, is_internal_change):
assert new_plan_version
assert new_subscription
return self._apply_upgrades_and_downgrades(
new_plan_version=new_plan_version,
new_subscription=new_subscription,
internal_change=is_internal_change,
)
def change_subscription(self, downgraded_privileges, upgraded_privileges, new_plan_version,
old_subscription, new_subscription, internal_change):
return self._apply_upgrades_and_downgrades(
downgraded_privileges=downgraded_privileges,
upgraded_privileges=upgraded_privileges,
new_plan_version=new_plan_version,
old_subscription=old_subscription,
new_subscription=new_subscription,
internal_change=internal_change,
)
def activate_subscription(self, upgraded_privileges, subscription):
return self._apply_upgrades_and_downgrades(
upgraded_privileges=upgraded_privileges,
new_subscription=subscription,
)
def deactivate_subscription(self, downgraded_privileges, upgraded_privileges,
old_subscription, new_subscription):
return self._apply_upgrades_and_downgrades(
downgraded_privileges=downgraded_privileges,
upgraded_privileges=upgraded_privileges,
old_subscription=old_subscription,
new_subscription=new_subscription,
)
def reactivate_subscription(self, new_plan_version, subscription):
return self._apply_upgrades_and_downgrades(
new_plan_version=new_plan_version,
old_subscription=subscription,
new_subscription=subscription,
)
def _apply_upgrades_and_downgrades(self, new_plan_version=None,
downgraded_privileges=None,
upgraded_privileges=None,
old_subscription=None,
new_subscription=None,
internal_change=False):
if new_plan_version is None:
new_plan_version = DefaultProductPlan.get_default_plan_version()
if downgraded_privileges is None or upgraded_privileges is None:
change_status_result = get_change_status(None, new_plan_version)
downgraded_privileges = downgraded_privileges or change_status_result.downgraded_privs
upgraded_privileges = upgraded_privileges or change_status_result.upgraded_privs
if downgraded_privileges:
Subscriber._process_downgrade(self.domain, downgraded_privileges, new_plan_version)
if upgraded_privileges:
Subscriber._process_upgrade(self.domain, upgraded_privileges, new_plan_version)
if Subscriber.should_send_subscription_notification(old_subscription, new_subscription):
send_subscription_change_alert(self.domain, new_subscription, old_subscription, internal_change)
subscription_upgrade_or_downgrade.send_robust(None, domain=self.domain)
@staticmethod
def should_send_subscription_notification(old_subscription, new_subscription):
if not old_subscription:
return False
is_new_trial = new_subscription and new_subscription.is_trial
expired_trial = old_subscription.is_trial and not new_subscription
return not is_new_trial and not expired_trial
@staticmethod
def _process_downgrade(domain, downgraded_privileges, new_plan_version):
downgrade_handler = DomainDowngradeActionHandler(
domain, new_plan_version, downgraded_privileges,
)
if not downgrade_handler.get_response():
raise SubscriptionChangeError("The downgrade was not successful.")
@staticmethod
def _process_upgrade(domain, upgraded_privileges, new_plan_version):
upgrade_handler = DomainUpgradeActionHandler(
domain, new_plan_version, upgraded_privileges,
)
if not upgrade_handler.get_response():
raise SubscriptionChangeError("The upgrade was not successful.")
class VisibleSubscriptionManager(models.Manager):
use_in_migrations = True
def get_queryset(self):
return super(VisibleSubscriptionManager, self).get_queryset().filter(is_hidden_to_ops=False)
class DisabledManager(models.Manager):
def get_queryset(self):
raise NotImplementedError
class Subscription(models.Model):
account = models.ForeignKey(BillingAccount, on_delete=models.PROTECT)
plan_version = models.ForeignKey(SoftwarePlanVersion, on_delete=models.PROTECT)
subscriber = models.ForeignKey(Subscriber, on_delete=models.PROTECT)
salesforce_contract_id = models.CharField(blank=True, max_length=80)
date_start = models.DateField()
date_end = models.DateField(blank=True, null=True)
date_created = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=False)
do_not_invoice = models.BooleanField(default=False)
no_invoice_reason = models.CharField(blank=True, max_length=256)
do_not_email_invoice = models.BooleanField(default=False)
do_not_email_reminder = models.BooleanField(default=False)
auto_generate_credits = models.BooleanField(default=False)
is_trial = models.BooleanField(default=False)
skip_invoicing_if_no_feature_charges = models.BooleanField(default=False)
service_type = models.CharField(
max_length=25,
choices=SubscriptionType.CHOICES,
default=SubscriptionType.NOT_SET
)
pro_bono_status = models.CharField(
max_length=25,
choices=ProBonoStatus.CHOICES,
default=ProBonoStatus.NO,
)
funding_source = models.CharField(
max_length=25,
choices=FundingSource.CHOICES,
default=FundingSource.CLIENT
)
last_modified = models.DateTimeField(auto_now=True)
is_hidden_to_ops = models.BooleanField(default=False)
skip_auto_downgrade = models.BooleanField(default=False)
skip_auto_downgrade_reason = models.CharField(blank=True, max_length=256)
visible_objects = VisibleSubscriptionManager()
visible_and_suppressed_objects = models.Manager()
objects = DisabledManager()
class Meta(object):
app_label = 'accounting'
def __str__(self):
return ("Subscription to %(plan_version)s for %(subscriber)s. "
"[%(date_start)s - %(date_end)s]" % {
'plan_version': self.plan_version,
'subscriber': self.subscriber,
'date_start': self.date_start.strftime(USER_DATE_FORMAT),
'date_end': (self.date_end.strftime(USER_DATE_FORMAT)
if self.date_end is not None else "--"),
})
def __eq__(self, other):
return (
other is not None
and other.__class__.__name__ == self.__class__.__name__
and other.plan_version.pk == self.plan_version.pk
and other.date_start == self.date_start
and other.date_end == self.date_end
and other.subscriber.pk == self.subscriber.pk
and other.account.pk == self.account.pk
)
def save(self, *args, **kwargs):
from corehq.apps.accounting.mixins import get_overdue_invoice
super(Subscription, self).save(*args, **kwargs)
Subscription._get_active_subscription_by_domain.clear(Subscription, self.subscriber.domain)
get_overdue_invoice.clear(self.subscriber.domain)
domain = Domain.get_by_name(self.subscriber.domain)
# we don't care the pillow won't be updated
if domain:
publish_domain_saved(domain)
def delete(self, *args, **kwargs):
super(Subscription, self).delete(*args, **kwargs)
Subscription._get_active_subscription_by_domain.clear(Subscription, self.subscriber.domain)
@property
def is_community(self):
return self.plan_version.plan.edition == SoftwarePlanEdition.COMMUNITY
@property
def allowed_attr_changes(self):
return ['do_not_invoice', 'no_invoice_reason',
'salesforce_contract_id', 'skip_auto_downgrade']
@property
def next_subscription_filter(self):
return (Subscription.visible_objects.
filter(subscriber=self.subscriber, date_start__gt=self.date_start).
exclude(pk=self.pk).
filter(Q(date_end__isnull=True) | ~Q(date_start=F('date_end'))))
@property
def previous_subscription_filter(self):
return Subscription.visible_objects.filter(
subscriber=self.subscriber,
date_start__lt=self.date_start - datetime.timedelta(days=1)
).exclude(pk=self.pk)
@property
def is_renewed(self):
return self.next_subscription_filter.exists()
@property
def next_subscription(self):
try:
return self.next_subscription_filter.order_by('date_start')[0]
except (Subscription.DoesNotExist, IndexError):
return None
@property
def previous_subscription(self):
try:
return self.previous_subscription_filter.order_by('-date_end')[0]
except (Subscription.DoesNotExist, IndexError):
return None
def raise_conflicting_dates(self, date_start, date_end):
assert date_start is not None
for sub in Subscription.visible_objects.filter(
Q(date_end__isnull=True) | Q(date_end__gt=F('date_start')),
subscriber=self.subscriber,
).exclude(
id=self.id,
):
related_has_no_end = sub.date_end is None
current_has_no_end = date_end is None
start_before_related_end = sub.date_end is not None and date_start < sub.date_end
start_before_related_start = date_start < sub.date_start
start_after_related_start = date_start > sub.date_start
end_before_related_end = (
date_end is not None and sub.date_end is not None
and date_end < sub.date_end
)
end_after_related_end = (
date_end is not None and sub.date_end is not None
and date_end > sub.date_end
)
end_after_related_start = date_end is not None and date_end > sub.date_start
if (
(start_before_related_end and start_after_related_start)
or (start_after_related_start and related_has_no_end)
or (end_after_related_start and end_before_related_end)
or (end_after_related_start and related_has_no_end)
or (start_before_related_start and end_after_related_end)
or (start_before_related_end and current_has_no_end)
or (current_has_no_end and related_has_no_end)
):
raise SubscriptionAdjustmentError(
"The start date of %(start_date)s conflicts with the "
"subscription dates to %(related_sub)s." % {
'start_date': self.date_start.strftime(USER_DATE_FORMAT),
'related_sub': sub,
}
)
def update_subscription(self, date_start, date_end,
do_not_invoice=None,
no_invoice_reason=None, do_not_email_invoice=None,
do_not_email_reminder=None, salesforce_contract_id=None,
auto_generate_credits=None,
web_user=None, note=None, adjustment_method=None,
service_type=None, pro_bono_status=None, funding_source=None,
skip_invoicing_if_no_feature_charges=None, skip_auto_downgrade=None,
skip_auto_downgrade_reason=None):
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
self._update_dates(date_start, date_end)
self._update_properties(
do_not_invoice=do_not_invoice,
no_invoice_reason=no_invoice_reason,
skip_invoicing_if_no_feature_charges=skip_invoicing_if_no_feature_charges,
do_not_email_invoice=do_not_email_invoice,
do_not_email_reminder=do_not_email_reminder,
auto_generate_credits=auto_generate_credits,
salesforce_contract_id=salesforce_contract_id,
service_type=service_type,
pro_bono_status=pro_bono_status,
funding_source=funding_source,
skip_auto_downgrade=skip_auto_downgrade,
skip_auto_downgrade_reason=skip_auto_downgrade_reason,
)
self.save()
SubscriptionAdjustment.record_adjustment(
self, method=adjustment_method, note=note, web_user=web_user,
reason=SubscriptionAdjustmentReason.MODIFY
)
def _update_dates(self, date_start, date_end):
if not date_start:
raise SubscriptionAdjustmentError('Start date must be provided')
if date_end is not None and date_start > date_end:
raise SubscriptionAdjustmentError(
"Can't have a subscription start after the end date."
)
self.raise_conflicting_dates(date_start, date_end)
self.date_start = date_start
self.date_end = date_end
is_active_dates = is_active_subscription(self.date_start, self.date_end)
if self.is_active != is_active_dates:
if is_active_dates:
self.is_active = True
self.subscriber.activate_subscription(get_privileges(self.plan_version), self)
else:
raise SubscriptionAdjustmentError(
'Cannot deactivate a subscription here. Cancel subscription instead.'
)
def _update_properties(self, **kwargs):
property_names = {
'do_not_invoice',
'no_invoice_reason',
'skip_invoicing_if_no_feature_charges',
'do_not_email_invoice',
'do_not_email_reminder',
'auto_generate_credits',
'salesforce_contract_id',
'service_type',
'pro_bono_status',
'funding_source',
'skip_auto_downgrade',
'skip_auto_downgrade_reason',
}
assert property_names >= set(kwargs.keys())
for property_name, property_value in kwargs.items():
if property_value is not None:
setattr(self, property_name, property_value)
@transaction.atomic
def change_plan(self, new_plan_version, date_end=None,
note=None, web_user=None, adjustment_method=None,
service_type=None, pro_bono_status=None, funding_source=None,
transfer_credits=True, internal_change=False, account=None,
do_not_invoice=None, no_invoice_reason=None,
auto_generate_credits=False, is_trial=False):
from corehq.apps.analytics.tasks import track_workflow
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
today = datetime.date.today()
assert self.is_active
assert date_end is None or date_end >= today
if new_plan_version.plan.at_max_domains() and self.plan_version.plan != new_plan_version.plan:
raise SubscriptionAdjustmentError(
'The maximum number of project spaces has been reached for %(new_plan_version)s. ' % {
'new_plan_version': new_plan_version,
}
)
self.date_end = today
self.is_active = False
self.save()
new_subscription = Subscription(
account=account if account else self.account,
plan_version=new_plan_version,
subscriber=self.subscriber,
salesforce_contract_id=self.salesforce_contract_id,
date_start=today,
date_end=date_end,
is_active=True,
do_not_invoice=do_not_invoice if do_not_invoice is not None else self.do_not_invoice,
no_invoice_reason=no_invoice_reason if no_invoice_reason is not None else self.no_invoice_reason,
auto_generate_credits=auto_generate_credits,
is_trial=is_trial,
service_type=(service_type or SubscriptionType.NOT_SET),
pro_bono_status=(pro_bono_status or ProBonoStatus.NO),
funding_source=(funding_source or FundingSource.CLIENT),
skip_auto_downgrade=False,
skip_auto_downgrade_reason='',
)
new_subscription.save()
new_subscription.raise_conflicting_dates(new_subscription.date_start, new_subscription.date_end)
new_subscription.set_billing_account_entry_point()
change_status_result = get_change_status(self.plan_version, new_plan_version)
self.subscriber.change_subscription(
downgraded_privileges=change_status_result.downgraded_privs,
upgraded_privileges=change_status_result.upgraded_privs,
new_plan_version=new_plan_version,
old_subscription=self,
new_subscription=new_subscription,
internal_change=internal_change,
)
if transfer_credits:
self.transfer_credits(new_subscription)
SubscriptionAdjustment.record_adjustment(
self, method=adjustment_method, note=note, web_user=web_user,
reason=change_status_result.adjustment_reason, related_subscription=new_subscription
)
SubscriptionAdjustment.record_adjustment(
new_subscription, method=adjustment_method, note=note, web_user=web_user,
reason=SubscriptionAdjustmentReason.CREATE
)
upgrade_reasons = [SubscriptionAdjustmentReason.UPGRADE, SubscriptionAdjustmentReason.CREATE]
if web_user and adjustment_method == SubscriptionAdjustmentMethod.USER:
if change_status_result.adjustment_reason in upgrade_reasons:
track_workflow(web_user, 'Changed Plan: Upgrade')
if change_status_result.adjustment_reason == SubscriptionAdjustmentReason.DOWNGRADE:
track_workflow(web_user, 'Changed Plan: Downgrade')
return new_subscription
def reactivate_subscription(self, date_end=None, note=None, web_user=None,
adjustment_method=None, **kwargs):
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
self.date_end = date_end
self.is_active = True
for allowed_attr in self.allowed_attr_changes:
if allowed_attr in kwargs:
setattr(self, allowed_attr, kwargs[allowed_attr])
self.save()
self.subscriber.reactivate_subscription(
new_plan_version=self.plan_version,
subscription=self,
)
SubscriptionAdjustment.record_adjustment(
self, reason=SubscriptionAdjustmentReason.REACTIVATE,
method=adjustment_method, note=note, web_user=web_user,
)
def renew_subscription(self, note=None, web_user=None,
adjustment_method=None,
service_type=None, pro_bono_status=None,
funding_source=None, new_version=None):
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
if self.date_end is None:
raise SubscriptionRenewalError(
"Cannot renew a subscription with no date_end set."
)
if new_version is None:
current_privileges = get_privileges(self.plan_version)
new_version = DefaultProductPlan.get_lowest_edition(
current_privileges, return_plan=True,
)
if new_version is None:
raise SubscriptionRenewalError(
"There was an issue renewing your subscription. Someone "
"from Dimagi will get back to you shortly."
)
renewed_subscription = Subscription(
account=self.account,
plan_version=new_version,
subscriber=self.subscriber,
salesforce_contract_id=self.salesforce_contract_id,
date_start=self.date_end,
date_end=None,
)
if service_type is not None:
renewed_subscription.service_type = service_type
if pro_bono_status is not None:
renewed_subscription.pro_bono_status = pro_bono_status
if funding_source is not None:
renewed_subscription.funding_source = funding_source
if datetime.date.today() == self.date_end:
renewed_subscription.is_active = True
renewed_subscription.save()
SubscriptionAdjustment.record_adjustment(
self, method=adjustment_method, note=note, web_user=web_user,
reason=SubscriptionAdjustmentReason.RENEW,
)
return renewed_subscription
def transfer_credits(self, subscription=None):
if subscription is not None and self.account.pk != subscription.account.pk:
raise CreditLineError(
"Can only transfer subscription credits under the same "
"Billing Account."
)
source_credits = CreditLine.objects.filter(
account=self.account,
subscription=self,
).all()
for credit_line in source_credits:
transferred_credit = CreditLine.add_credit(
credit_line.balance,
account=self.account,
subscription=subscription,
feature_type=credit_line.feature_type,
is_product=credit_line.is_product,
related_credit=credit_line
)
credit_line.is_active = False
credit_line.adjust_credit_balance(
credit_line.balance * Decimal('-1'),
related_credit=transferred_credit,
)
def send_ending_reminder_email(self):
if self.date_end is None:
raise SubscriptionReminderError(
"This subscription has no end date."
)
today = datetime.date.today()
num_days_left = (self.date_end - today).days
domain_name = self.subscriber.domain
context = self.ending_reminder_context
subject = context['subject']
template = self.ending_reminder_email_html
template_plaintext = self.ending_reminder_email_text
email_html = render_to_string(template, context)
email_plaintext = render_to_string(template_plaintext, context)
bcc = [settings.ACCOUNTS_EMAIL] if not self.is_trial else []
if self.account.dimagi_contact is not None:
bcc.append(self.account.dimagi_contact)
for email in self._reminder_email_contacts(domain_name):
send_html_email_async.delay(
subject, email, email_html,
text_content=email_plaintext,
email_from=get_dimagi_from_email(),
bcc=bcc,
)
log_accounting_info(
"Sent %(days_left)s-day subscription reminder "
"email for %(domain)s to %(email)s." % {
'days_left': num_days_left,
'domain': domain_name,
'email': email,
}
)
@property
def ending_reminder_email_html(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder.html'
elif self.is_trial:
return 'accounting/email/trial_ending_reminder.html'
else:
return 'accounting/email/subscription_ending_reminder.html'
@property
def ending_reminder_email_text(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder.txt'
elif self.is_trial:
return 'accounting/email/trial_ending_reminder.txt'
else:
return 'accounting/email/subscription_ending_reminder.txt'
@property
def ending_reminder_context(self):
from corehq.apps.domain.views.accounting import DomainSubscriptionView
today = datetime.date.today()
num_days_left = (self.date_end - today).days
if num_days_left == 1:
ending_on = _("tomorrow!")
else:
ending_on = _("on %s." % self.date_end.strftime(USER_DATE_FORMAT))
user_desc = self.plan_version.user_facing_description
plan_name = user_desc['name']
domain_name = self.subscriber.domain
context = {
'domain': domain_name,
'plan_name': plan_name,
'account': self.account.name,
'ending_on': ending_on,
'subscription_url': absolute_reverse(
DomainSubscriptionView.urlname, args=[self.subscriber.domain]),
'base_url': get_site_domain(),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
'sales_email': settings.SALES_EMAIL,
}
if self.account.is_customer_billing_account:
subject = _(
"CommCare Alert: %(account_name)s's subscription to "
"%(plan_name)s ends %(ending_on)s"
) % {
'account_name': self.account.name,
'plan_name': plan_name,
'ending_on': ending_on,
}
elif self.is_trial:
subject = _("CommCare Alert: 30 day trial for '%(domain)s' "
"ends %(ending_on)s") % {
'domain': domain_name,
'ending_on': ending_on,
}
else:
subject = _(
"CommCare Alert: %(domain)s's subscription to "
"%(plan_name)s ends %(ending_on)s"
) % {
'plan_name': plan_name,
'domain': domain_name,
'ending_on': ending_on,
}
context.update({'subject': subject})
return context
def send_dimagi_ending_reminder_email(self):
if self.date_end is None:
raise SubscriptionReminderError(
"This subscription has no end date."
)
if self.account.dimagi_contact is None:
raise SubscriptionReminderError(
"This subscription has no Dimagi contact."
)
subject = self.dimagi_ending_reminder_subject
context = self.dimagi_ending_reminder_context
email_html = render_to_string(self.dimagi_ending_reminder_email_html, context)
email_plaintext = render_to_string(self.dimagi_ending_reminder_email_text, context)
send_html_email_async.delay(
subject, self.account.dimagi_contact, email_html,
text_content=email_plaintext,
email_from=settings.DEFAULT_FROM_EMAIL,
)
@property
def dimagi_ending_reminder_email_html(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder_dimagi.html'
else:
return 'accounting/email/subscription_ending_reminder_dimagi.html'
@property
def dimagi_ending_reminder_email_text(self):
if self.account.is_customer_billing_account:
return 'accounting/email/customer_subscription_ending_reminder_dimagi.txt'
else:
return 'accounting/email/subscription_ending_reminder_dimagi.txt'
@property
def dimagi_ending_reminder_subject(self):
if self.account.is_customer_billing_account:
return "Alert: {account}'s subscriptions are ending on {end_date}".format(
account=self.account.name,
end_date=self.date_end.strftime(USER_DATE_FORMAT))
else:
return "Alert: {domain}'s subscription is ending on {end_date}".format(
domain=self.subscriber.domain,
end_date=self.date_end.strftime(USER_DATE_FORMAT))
@property
def dimagi_ending_reminder_context(self):
end_date = self.date_end.strftime(USER_DATE_FORMAT)
email = self.account.dimagi_contact
if self.account.is_customer_billing_account:
account = self.account.name
plan = self.plan_version.plan.edition
context = {
'account': account,
'plan': plan,
'end_date': end_date,
'client_reminder_email_date': (self.date_end - datetime.timedelta(days=30)).strftime(
USER_DATE_FORMAT),
'contacts': ', '.join(self._reminder_email_contacts(self.subscriber.domain)),
'dimagi_contact': email,
'accounts_email': settings.ACCOUNTS_EMAIL
}
else:
domain = self.subscriber.domain
context = {
'domain': domain,
'end_date': end_date,
'client_reminder_email_date': (self.date_end - datetime.timedelta(days=30)).strftime(
USER_DATE_FORMAT),
'contacts': ', '.join(self._reminder_email_contacts(domain)),
'dimagi_contact': email,
}
return context
def _reminder_email_contacts(self, domain_name):
emails = {a.username for a in WebUser.get_admins_by_domain(domain_name)}
emails |= {e for e in WebUser.get_dimagi_emails_by_domain(domain_name)}
if not self.is_trial:
billing_contact_emails = (
self.account.billingcontactinfo.email_list
if BillingContactInfo.objects.filter(account=self.account).exists() else []
)
if not billing_contact_emails:
from corehq.apps.accounting.views import ManageBillingAccountView
_soft_assert_contact_emails_missing(
False,
'Billing Account for project %s is missing client contact emails: %s' % (
domain_name,
absolute_reverse(ManageBillingAccountView.urlname, args=[self.account.id])
)
)
emails |= {billing_contact_email for billing_contact_email in billing_contact_emails}
if self.account.is_customer_billing_account:
enterprise_admin_emails = self.account.enterprise_admin_emails
emails |= {enterprise_admin_email for enterprise_admin_email in enterprise_admin_emails}
return emails
def set_billing_account_entry_point(self):
no_current_entry_point = self.account.entry_point == EntryPoint.NOT_SET
self_serve = self.service_type == SubscriptionType.PRODUCT
if no_current_entry_point and self_serve and not self.is_trial:
self.account.entry_point = EntryPoint.SELF_STARTED
self.account.save()
@classmethod
def get_active_subscription_by_domain(cls, domain_name_or_obj):
if settings.ENTERPRISE_MODE:
return None
if isinstance(domain_name_or_obj, Domain):
return cls._get_active_subscription_by_domain(domain_name_or_obj.name)
return cls._get_active_subscription_by_domain(domain_name_or_obj)
@classmethod
@quickcache(['domain_name'], timeout=60 * 60)
def _get_active_subscription_by_domain(cls, domain_name):
try:
return cls.visible_objects.select_related(
'plan_version__role'
).get(
is_active=True,
subscriber__domain=domain_name,
)
except cls.DoesNotExist:
return None
@classmethod
def get_subscribed_plan_by_domain(cls, domain):
domain_obj = ensure_domain_instance(domain)
if domain_obj is None:
try:
return DefaultProductPlan.get_default_plan_version()
except DefaultProductPlan.DoesNotExist:
raise ProductPlanNotFoundError
else:
active_subscription = cls.get_active_subscription_by_domain(domain_obj.name)
if active_subscription is not None:
return active_subscription.plan_version
else:
return DefaultProductPlan.get_default_plan_version()
@classmethod
def new_domain_subscription(cls, account, domain, plan_version,
date_start=None, date_end=None, note=None,
web_user=None, adjustment_method=None, internal_change=False,
**kwargs):
if plan_version.plan.at_max_domains():
raise NewSubscriptionError(
'The maximum number of project spaces has been reached for %(plan_version)s. ' % {
'plan_version': plan_version,
}
)
if plan_version.plan.is_customer_software_plan != account.is_customer_billing_account:
if plan_version.plan.is_customer_software_plan:
raise NewSubscriptionError(
'You are trying to add a Customer Software Plan to a regular Billing Account. '
'Both or neither must be customer-level.'
)
else:
raise NewSubscriptionError(
'You are trying to add a regular Software Plan to a Customer Billing Account. '
'Both or neither must be customer-level.'
)
subscriber = Subscriber.objects.get_or_create(domain=domain)[0]
today = datetime.date.today()
date_start = date_start or today
available_subs = Subscription.visible_objects.filter(
subscriber=subscriber,
)
future_subscription_no_end = available_subs.filter(
date_end__exact=None,
)
if date_end is not None:
future_subscription_no_end = future_subscription_no_end.filter(date_start__lt=date_end)
if future_subscription_no_end.count() > 0:
raise NewSubscriptionError(_(
"There is already a subscription '%s' with no end date "
"that conflicts with the start and end dates of this "
"subscription.") %
future_subscription_no_end.latest('date_created')
)
future_subscriptions = available_subs.filter(
date_end__gt=date_start
)
if date_end is not None:
future_subscriptions = future_subscriptions.filter(date_start__lt=date_end)
if future_subscriptions.count() > 0:
raise NewSubscriptionError(str(
_(
"There is already a subscription '%(sub)s' that has an end date "
"that conflicts with the start and end dates of this "
"subscription %(start)s - %(end)s."
) % {
'sub': future_subscriptions.latest('date_created'),
'start': date_start,
'end': date_end
}
))
can_reactivate, last_subscription = cls.can_reactivate_domain_subscription(
account, domain, plan_version, date_start=date_start
)
if can_reactivate:
last_subscription.reactivate_subscription(
date_end=date_end, note=note, web_user=web_user,
adjustment_method=adjustment_method,
**kwargs
)
return last_subscription
adjustment_method = adjustment_method or SubscriptionAdjustmentMethod.INTERNAL
subscription = Subscription.visible_objects.create(
account=account,
plan_version=plan_version,
subscriber=subscriber,
date_start=date_start,
date_end=date_end,
**kwargs
)
subscription.is_active = is_active_subscription(date_start, date_end)
if subscription.is_active:
subscriber.create_subscription(
new_plan_version=plan_version,
new_subscription=subscription,
is_internal_change=internal_change,
)
SubscriptionAdjustment.record_adjustment(
subscription, method=adjustment_method, note=note,
web_user=web_user
)
subscription.save()
subscription.set_billing_account_entry_point()
return subscription
@classmethod
def can_reactivate_domain_subscription(cls, account, domain, plan_version,
date_start=None):
subscriber = Subscriber.objects.get_or_create(domain=domain)[0]
date_start = date_start or datetime.date.today()
last_subscription = Subscription.visible_objects.filter(
subscriber=subscriber, date_end=date_start
)
if not last_subscription.exists():
return False, None
last_subscription = last_subscription.latest('date_created')
return (
last_subscription.account.pk == account.pk and
last_subscription.plan_version.pk == plan_version.pk
), last_subscription
@property
def is_below_minimum_subscription(self):
if self.is_trial:
return False
elif self.date_start < datetime.date(2018, 9, 5):
return False
elif self.date_start + datetime.timedelta(days=MINIMUM_SUBSCRIPTION_LENGTH) >= datetime.date.today():
return True
else:
return False
def user_can_change_subscription(self, user):
if user.is_superuser:
return True
elif self.account.is_customer_billing_account:
return self.account.has_enterprise_admin(user.email)
else:
return True
class InvoiceBaseManager(models.Manager):
def get_queryset(self):
return super(InvoiceBaseManager, self).get_queryset().filter(is_hidden_to_ops=False)
class InvoiceBase(models.Model):
date_created = models.DateTimeField(auto_now_add=True)
is_hidden = models.BooleanField(default=False)
tax_rate = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
balance = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
date_due = models.DateField(db_index=True, null=True)
date_paid = models.DateField(blank=True, null=True)
date_start = models.DateField()
date_end = models.DateField()
is_hidden_to_ops = models.BooleanField(default=False)
last_modified = models.DateTimeField(auto_now=True)
objects = InvoiceBaseManager()
api_objects = Manager()
class Meta(object):
abstract = True
@property
def is_customer_invoice(self):
return False
@property
def invoice_number(self):
ops_num = settings.INVOICE_STARTING_NUMBER + self.id
return "%s%d" % (settings.INVOICE_PREFIX, ops_num)
@property
def is_wire(self):
return False
def get_domain(self):
raise NotImplementedError()
@property
def account(self):
raise NotImplementedError()
@property
def is_paid(self):
return bool(self.date_paid)
@property
def email_recipients(self):
raise NotImplementedError
class WireInvoice(InvoiceBase):
domain = models.CharField(max_length=100)
class Meta(object):
app_label = 'accounting'
@property
@memoized
def account(self):
return BillingAccount.get_account_by_domain(self.domain)
@property
def subtotal(self):
return self.balance
@property
def is_wire(self):
return True
@property
def is_prepayment(self):
return False
def get_domain(self):
return self.domain
def get_total(self):
return self.balance
@property
def email_recipients(self):
try:
original_record = WireBillingRecord.objects.filter(invoice=self).order_by('-date_created')[0]
return original_record.emailed_to_list
except IndexError:
log_accounting_error(
"Strange that WireInvoice %d has no associated WireBillingRecord. "
"Should investigate."
% self.id
)
return []
class WirePrepaymentInvoice(WireInvoice):
class Meta(object):
app_label = 'accounting'
proxy = True
items = []
@property
def is_prepayment(self):
return True
class Invoice(InvoiceBase):
subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT)
class Meta(object):
app_label = 'accounting'
def save(self, *args, **kwargs):
from corehq.apps.accounting.mixins import get_overdue_invoice
super(Invoice, self).save(*args, **kwargs)
get_overdue_invoice.clear(self.subscription.subscriber.domain)
@property
def email_recipients(self):
if self.subscription.service_type == SubscriptionType.IMPLEMENTATION:
return [settings.ACCOUNTS_EMAIL]
else:
return self.contact_emails
@property
def contact_emails(self):
try:
billing_contact_info = BillingContactInfo.objects.get(account=self.account)
contact_emails = billing_contact_info.email_list
except BillingContactInfo.DoesNotExist:
contact_emails = []
if not contact_emails:
from corehq.apps.accounting.views import ManageBillingAccountView
admins = WebUser.get_admins_by_domain(self.get_domain())
contact_emails = [admin.email if admin.email else admin.username for admin in admins]
if not settings.UNIT_TESTING:
_soft_assert_contact_emails_missing(
False,
"Could not find an email to send the invoice "
"email to for the domain %s. Sending to domain admins instead: %s."
" Add client contact emails here: %s" % (
self.get_domain(),
', '.join(contact_emails),
absolute_reverse(ManageBillingAccountView.urlname, args=[self.account.id]),
)
)
return contact_emails
@property
def subtotal(self):
if self.lineitem_set.count() == 0:
return Decimal('0.0000')
return sum([line_item.total for line_item in self.lineitem_set.all()])
@property
def applied_tax(self):
return Decimal('%.4f' % round(self.tax_rate * self.subtotal, 4))
@property
@memoized
def account(self):
return self.subscription.account
@property
def applied_credit(self):
if self.creditadjustment_set.count() == 0:
return Decimal('0.0000')
return sum([credit.amount for credit in self.creditadjustment_set.all()])
def get_total(self):
return self.subtotal + self.applied_tax + self.applied_credit
def update_balance(self):
self.balance = self.get_total()
if self.balance <= 0:
self.date_paid = datetime.date.today()
else:
self.date_paid = None
def calculate_credit_adjustments(self):
for line_item in self.lineitem_set.all():
line_item.calculate_credit_adjustments()
current_total = self.get_total()
credit_lines = CreditLine.get_credits_for_invoice(self)
CreditLine.apply_credits_toward_balance(credit_lines, current_total, invoice=self)
@classmethod
def exists_for_domain(cls, domain):
return cls.objects.filter(
subscription__subscriber__domain=domain, is_hidden=False
).count() > 0
def get_domain(self):
return self.subscription.subscriber.domain
@classmethod
def autopayable_invoices(cls, date_due):
invoices = cls.objects.select_related('subscription__account').filter(
date_due=date_due,
is_hidden=False,
subscription__account__auto_pay_user__isnull=False,
)
return invoices
def pay_invoice(self, payment_record):
CreditLine.make_payment_towards_invoice(
invoice=self,
payment_record=payment_record,
)
self.update_balance()
self.save()
class CustomerInvoice(InvoiceBase):
account = models.ForeignKey(BillingAccount, on_delete=models.PROTECT)
subscriptions = models.ManyToManyField(Subscription, default=list, blank=True)
class Meta(object):
app_label = 'accounting'
@property
def is_customer_invoice(self):
return True
def get_domain(self):
return None
@property
def email_recipients(self):
try:
billing_contact_info = BillingContactInfo.objects.get(account=self.account)
contact_emails = billing_contact_info.email_list
except BillingContactInfo.DoesNotExist:
contact_emails = []
return contact_emails
@property
def contact_emails(self):
return self.account.enterprise_admin_emails
@property
def subtotal(self):
if self.lineitem_set.count() == 0:
return Decimal('0.0000')
return sum([line_item.total for line_item in self.lineitem_set.all()])
@property
def applied_tax(self):
return Decimal('%.4f' % round(self.tax_rate * self.subtotal, 4))
@property
def applied_credit(self):
if self.creditadjustment_set.count() == 0:
return Decimal('0.0000')
return sum([credit.amount for credit in self.creditadjustment_set.all()])
def get_total(self):
return self.subtotal + self.applied_tax + self.applied_credit
def update_balance(self):
self.balance = self.get_total()
if self.balance <= 0:
self.date_paid = datetime.date.today()
else:
self.date_paid = None
def calculate_credit_adjustments(self):
for line_item in self.lineitem_set.all():
line_item.calculate_credit_adjustments()
current_total = self.get_total()
credit_lines = CreditLine.get_credits_for_customer_invoice(self)
CreditLine.apply_credits_toward_balance(credit_lines, current_total, customer_invoice=self)
def pay_invoice(self, payment_record):
CreditLine.make_payment_towards_invoice(
invoice=self,
payment_record=payment_record,
)
self.update_balance()
self.save()
@classmethod
def exists_for_domain(cls, domain):
invoices = cls.objects.filter(is_hidden=False)
for subscription in invoices.subscriptions.filter(is_hidden=False):
if subscription.subscriber.domain == domain:
return True
return False
@classmethod
def autopayable_invoices(cls, date_due):
invoices = cls.objects.select_related('account').filter(
date_due=date_due,
is_hidden=False,
account__auto_pay_user__isnull=False
)
return invoices
class SubscriptionAdjustment(models.Model):
subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT)
reason = models.CharField(max_length=50, default=SubscriptionAdjustmentReason.CREATE,
choices=SubscriptionAdjustmentReason.CHOICES)
method = models.CharField(max_length=50, default=SubscriptionAdjustmentMethod.INTERNAL,
choices=SubscriptionAdjustmentMethod.CHOICES)
note = models.TextField(null=True)
web_user = models.CharField(max_length=80, null=True)
invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT, null=True)
related_subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT, null=True,
related_name='subscriptionadjustment_related')
date_created = models.DateTimeField(auto_now_add=True)
new_date_start = models.DateField()
new_date_end = models.DateField(blank=True, null=True)
new_date_delay_invoicing = models.DateField(blank=True, null=True)
new_salesforce_contract_id = models.CharField(blank=True, null=True, max_length=80)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
@classmethod
def record_adjustment(cls, subscription, **kwargs):
adjustment = SubscriptionAdjustment(
subscription=subscription,
new_date_start=subscription.date_start,
new_date_end=subscription.date_end,
new_salesforce_contract_id=subscription.salesforce_contract_id,
**kwargs
)
adjustment.save()
return adjustment
class BillingRecordBase(models.Model):
date_created = models.DateTimeField(auto_now_add=True, db_index=True)
emailed_to_list = ArrayField(models.EmailField(), default=list)
skipped_email = models.BooleanField(default=False)
pdf_data_id = models.CharField(max_length=48)
last_modified = models.DateTimeField(auto_now=True)
INVOICE_HTML_TEMPLATE = 'accounting/email/invoice.html'
INVOICE_TEXT_TEMPLATE = 'accounting/email/invoice.txt'
class Meta(object):
abstract = True
_pdf = None
@property
def pdf(self):
if self._pdf is None:
return InvoicePdf.get(self.pdf_data_id)
return self._pdf
@property
def html_template(self):
return self.INVOICE_HTML_TEMPLATE
@property
def text_template(self):
return self.INVOICE_TEXT_TEMPLATE
@property
def should_send_email(self):
raise NotImplementedError("should_send_email is required")
@classmethod
def generate_record(cls, invoice):
record = cls(invoice=invoice)
invoice_pdf = InvoicePdf()
invoice_pdf.generate_pdf(record.invoice)
record.pdf_data_id = invoice_pdf._id
record._pdf = invoice_pdf
record.save()
return record
def handle_throttled_email(self, contact_emails):
self.skipped_email = True
month_name = self.invoice.date_start.strftime("%B")
self.save()
log_accounting_info(
"Throttled billing statements for domain %(domain)s "
"to %(emails)s." % {
'domain': self.invoice.get_domain(),
'emails': ', '.join(contact_emails),
}
)
raise InvoiceEmailThrottledError(
"Invoice communications exceeded the maximum limit of "
"%(max_limit)d for domain %(domain)s for the month of "
"%(month_name)s." % {
'max_limit': MAX_INVOICE_COMMUNICATIONS,
'domain': self.invoice.get_domain(),
'month_name': month_name,
})
def email_context(self):
from corehq.apps.domain.views.accounting import DomainBillingStatementsView
from corehq.apps.domain.views.settings import DefaultProjectSettingsView
month_name = self.invoice.date_start.strftime("%B")
domain = self.invoice.get_domain()
context = {
'month_name': month_name,
'domain': domain,
'domain_url': absolute_reverse(DefaultProjectSettingsView.urlname,
args=[domain]),
'statement_number': self.invoice.invoice_number,
'payment_status': (_("Paid") if self.invoice.is_paid
else _("Payment Required")),
'amount_due': fmt_dollar_amount(self.invoice.balance),
'statements_url': absolute_reverse(
DomainBillingStatementsView.urlname, args=[domain]),
'invoicing_contact_email': settings.INVOICING_CONTACT_EMAIL,
'accounts_email': settings.ACCOUNTS_EMAIL,
}
return context
def email_subject(self):
raise NotImplementedError()
def can_view_statement(self, web_user):
raise NotImplementedError()
def send_email(self, contact_email=None, cc_emails=None):
pdf_attachment = {
'title': self.pdf.get_filename(self.invoice),
'file_obj': BytesIO(self.pdf.get_data(self.invoice)),
'mimetype': 'application/pdf',
}
domain = self.invoice.get_domain()
subject = self.email_subject()
context = self.email_context()
email_from = self.email_from()
greeting = _("Hello,")
can_view_statement = False
web_user = WebUser.get_by_username(contact_email)
if web_user is not None:
if web_user.first_name:
greeting = _("Dear %s,") % web_user.first_name
can_view_statement = self.can_view_statement(web_user)
context['greeting'] = greeting
context['can_view_statement'] = can_view_statement
email_html = render_to_string(self.html_template, context)
email_plaintext = render_to_string(self.text_template, context)
send_html_email_async.delay(
subject, contact_email, email_html,
text_content=email_plaintext,
email_from=email_from,
file_attachments=[pdf_attachment],
cc=cc_emails
)
self.emailed_to_list.extend([contact_email])
if cc_emails:
self.emailed_to_list.extend(cc_emails)
self.save()
if self.invoice.is_customer_invoice:
log_message = "Sent billing statements for account %(account)s to %(emails)s." % {
'account': self.invoice.account,
'emails': contact_email,
}
else:
log_message = "Sent billing statements for domain %(domain)s to %(emails)s." % {
'domain': domain,
'emails': contact_email,
}
log_accounting_info(log_message)
class WireBillingRecord(BillingRecordBase):
invoice = models.ForeignKey(WireInvoice, on_delete=models.PROTECT)
INVOICE_HTML_TEMPLATE = 'accounting/email/wire_invoice.html'
INVOICE_TEXT_TEMPLATE = 'accounting/email/wire_invoice.txt'
class Meta(object):
app_label = 'accounting'
@property
def should_send_email(self):
hidden = self.invoice.is_hidden
return not hidden
@staticmethod
def is_email_throttled():
return False
def email_subject(self):
month_name = self.invoice.date_start.strftime("%B")
return "Your %(month)s Bulk Billing Statement for Project Space %(domain)s" % {
'month': month_name,
'domain': self.invoice.get_domain(),
}
@staticmethod
def email_from():
return "Dimagi Accounting <{email}>".format(email=settings.INVOICING_CONTACT_EMAIL)
def can_view_statement(self, web_user):
return web_user.is_domain_admin(self.invoice.get_domain())
class WirePrepaymentBillingRecord(WireBillingRecord):
class Meta(object):
app_label = 'accounting'
proxy = True
def email_subject(self):
return _("Your prepayment invoice")
def can_view_statement(self, web_user):
return web_user.is_domain_admin(self.invoice.get_domain())
class BillingRecord(BillingRecordBase):
invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT)
INVOICE_CONTRACTED_HTML_TEMPLATE = 'accounting/email/invoice_contracted.html'
INVOICE_CONTRACTED_TEXT_TEMPLATE = 'accounting/email/invoice_contracted.txt'
INVOICE_AUTOPAY_HTML_TEMPLATE = 'accounting/email/invoice_autopayment.html'
INVOICE_AUTOPAY_TEXT_TEMPLATE = 'accounting/email/invoice_autopayment.txt'
class Meta(object):
app_label = 'accounting'
@property
def html_template(self):
if self.invoice.subscription.service_type == SubscriptionType.IMPLEMENTATION:
return self.INVOICE_CONTRACTED_HTML_TEMPLATE
if self.invoice.subscription.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_HTML_TEMPLATE
return self.INVOICE_HTML_TEMPLATE
@property
def text_template(self):
if self.invoice.subscription.service_type == SubscriptionType.IMPLEMENTATION:
return self.INVOICE_CONTRACTED_TEXT_TEMPLATE
if self.invoice.subscription.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_TEXT_TEMPLATE
return self.INVOICE_TEXT_TEMPLATE
@property
def should_send_email(self):
subscription = self.invoice.subscription
autogenerate = (subscription.auto_generate_credits and not self.invoice.balance)
small_contracted = (self.invoice.balance <= SMALL_INVOICE_THRESHOLD and
subscription.service_type == SubscriptionType.IMPLEMENTATION)
hidden = self.invoice.is_hidden
do_not_email_invoice = self.invoice.subscription.do_not_email_invoice
return not (autogenerate or small_contracted or hidden or do_not_email_invoice)
def is_email_throttled(self):
month = self.invoice.date_start.month
year = self.invoice.date_start.year
date_start, date_end = get_first_last_days(year, month)
return self.__class__.objects.filter(
invoice__date_start__lte=date_end, invoice__date_end__gte=date_start,
invoice__subscription__subscriber=self.invoice.subscription.subscriber,
invoice__is_hidden_to_ops=False,
).count() > MAX_INVOICE_COMMUNICATIONS
def email_context(self):
context = super(BillingRecord, self).email_context()
total_balance = sum(invoice.balance for invoice in Invoice.objects.filter(
is_hidden=False,
subscription__subscriber__domain=self.invoice.get_domain(),
))
is_small_invoice = self.invoice.balance < SMALL_INVOICE_THRESHOLD
payment_status = (_("Paid")
if self.invoice.is_paid or total_balance == 0
else _("Payment Required"))
context.update({
'plan_name': self.invoice.subscription.plan_version.plan.name,
'date_due': self.invoice.date_due,
'is_small_invoice': is_small_invoice,
'total_balance': total_balance,
'is_total_balance_due': total_balance >= SMALL_INVOICE_THRESHOLD,
'payment_status': payment_status,
})
if self.invoice.subscription.service_type == SubscriptionType.IMPLEMENTATION:
from corehq.apps.accounting.dispatcher import AccountingAdminInterfaceDispatcher
context.update({
'salesforce_contract_id': self.invoice.subscription.salesforce_contract_id,
'billing_account': self.invoice.subscription.account.name,
'billing_contacts': self.invoice.contact_emails,
'admin_invoices_url': "{url}?subscriber={domain}".format(
url=absolute_reverse(AccountingAdminInterfaceDispatcher.name(), args=['invoices']),
domain=self.invoice.get_domain()
)
})
if self.invoice.subscription.account.auto_pay_enabled:
try:
last_4 = getattr(self.invoice.subscription.account.autopay_card, 'last4', None)
except StripePaymentMethod.DoesNotExist:
last_4 = None
context.update({
'auto_pay_user': self.invoice.subscription.account.auto_pay_user,
'last_4': last_4,
})
context.update({
'credits': self.credits,
})
return context
def credits(self):
credits = {
'account': {},
'subscription': {},
}
self._add_product_credits(credits)
self._add_user_credits(credits)
self._add_sms_credits(credits)
self._add_general_credits(credits)
return credits
def _add_product_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__product_rate__isnull=False,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
is_product=True,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'product': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
is_product=True,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'product': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def _add_user_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.USER,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
feature_type=FeatureType.USER,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'user': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
feature_type=FeatureType.USER,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'user': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def _add_sms_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.SMS,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
feature_type=FeatureType.SMS,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'sms': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
feature_type=FeatureType.SMS,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'sms': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def _add_general_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
invoice=self.invoice,
line_item__feature_rate=None,
line_item__product_rate=None,
)
subscription_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_by_subscription_and_features(
self.invoice.subscription,
)
)
if subscription_credits or credit_adjustments.filter(
credit_line__subscription=self.invoice.subscription,
):
credits['subscription'].update({
'general': {
'amount': quantize_accounting_decimal(subscription_credits),
}
})
account_credits = BillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.subscription.account,
)
)
if account_credits or credit_adjustments.filter(
credit_line__subscription=None,
):
credits['account'].update({
'general': {
'amount': quantize_accounting_decimal(account_credits),
}
})
return credits
def email_subject(self):
month_name = self.invoice.date_start.strftime("%B")
return "Your %(month)s CommCare Billing Statement for Project Space %(domain)s" % {
'month': month_name,
'domain': self.invoice.subscription.subscriber.domain,
}
def email_from(self):
return get_dimagi_from_email()
@staticmethod
def _get_total_balance(credit_lines):
return (
sum([credit_line.balance for credit_line in credit_lines])
if credit_lines else Decimal('0.0')
)
def can_view_statement(self, web_user):
return web_user.is_domain_admin(self.invoice.get_domain())
class CustomerBillingRecord(BillingRecordBase):
invoice = models.ForeignKey(CustomerInvoice, on_delete=models.PROTECT)
INVOICE_AUTOPAY_HTML_TEMPLATE = 'accounting/email/invoice_autopayment.html'
INVOICE_AUTOPAY_TEXT_TEMPLATE = 'accounting/email/invoice_autopayment.txt'
INVOICE_HTML_TEMPLATE = 'accounting/email/customer_invoice.html'
INVOICE_TEXT_TEMPLATE = 'accounting/email/customer_invoice.txt'
class Meta(object):
app_label = 'accounting'
@property
def html_template(self):
if self.invoice.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_HTML_TEMPLATE
return self.INVOICE_HTML_TEMPLATE
@property
def text_template(self):
if self.invoice.account.auto_pay_enabled:
return self.INVOICE_AUTOPAY_TEXT_TEMPLATE
return self.INVOICE_TEXT_TEMPLATE
@property
def should_send_email(self):
return not self.invoice.is_hidden
def email_context(self):
from corehq.apps.accounting.views import EnterpriseBillingStatementsView
context = super(CustomerBillingRecord, self).email_context()
is_small_invoice = self.invoice.balance < SMALL_INVOICE_THRESHOLD
payment_status = (_("Paid")
if self.invoice.is_paid or self.invoice.balance == 0
else _("Payment Required"))
domain = self.invoice.subscriptions.first().subscriber.domain
context.update({
'account_name': self.invoice.account.name,
'date_due': self.invoice.date_due,
'is_small_invoice': is_small_invoice,
'total_balance': '{:.2f}'.format(self.invoice.balance),
'is_total_balance_due': self.invoice.balance >= SMALL_INVOICE_THRESHOLD,
'payment_status': payment_status,
'statements_url': absolute_reverse(
EnterpriseBillingStatementsView.urlname, args=[domain]),
})
if self.invoice.account.auto_pay_enabled:
try:
last_4 = getattr(self.invoice.account.autopay_card, 'last4', None)
except StripePaymentMethod.DoesNotExist:
last_4 = None
context.update({
'auto_pay_user': self.invoice.account.auto_pay_user,
'last_4': last_4,
})
context.update({
'credits': self.credits,
})
return context
def credits(self):
credits = {
'account': {},
'subscription': {},
}
self._add_product_credits(credits)
self._add_user_credits(credits)
self._add_sms_credits(credits)
self._add_general_credits(credits)
return credits
def _add_product_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__product_rate__isnull=False
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions,
is_product=True
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credit_adjustments['subscription'].update({
'product': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account,
is_product=True
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'product': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _add_user_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.USER
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions,
feature_type=FeatureType.USER
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credits['subscription'].update({
'user': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account,
feature_type=FeatureType.USER
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'user': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _add_sms_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__feature_rate__feature__feature_type=FeatureType.SMS
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions,
feature_type=FeatureType.SMS
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credits['subscription'].update({
'sms': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account,
feature_type=FeatureType.SMS
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'sms': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _add_general_credits(self, credits):
credit_adjustments = CreditAdjustment.objects.filter(
customer_invoice=self.invoice,
line_item__feature_rate=None,
line_item__product_rate=None
)
subscription_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_subscriptions(
self.invoice.subscriptions
)
)
if subscription_credits or self._subscriptions_in_credit_adjustments(credit_adjustments):
credits['subscription'].update({
'general': {
'amount': quantize_accounting_decimal(subscription_credits)
}
})
account_credits = CustomerBillingRecord._get_total_balance(
CreditLine.get_credits_for_account(
self.invoice.account
)
)
if account_credits or credit_adjustments.filter(credit_line__subscription=None):
credits['account'].update({
'general': {
'amount': quantize_accounting_decimal(account_credits)
}
})
return credits
def _subscriptions_in_credit_adjustments(self, credit_adjustments):
for subscription in self.invoice.subscriptions.all():
if credit_adjustments.filter(
credit_line__subscription=subscription
):
return True
return False
def email_subject(self):
month_name = self.invoice.date_start.strftime("%B")
return "Your %(month)s CommCare Billing Statement for Customer Account %(account_name)s" % {
'month': month_name,
'account_name': self.invoice.account.name,
}
def email_from(self):
return get_dimagi_from_email()
@staticmethod
def _get_total_balance(credit_lines):
return (
sum([credit_line.balance for credit_line in credit_lines])
if credit_lines else Decimal('0.0')
)
def can_view_statement(self, web_user):
for subscription in self.invoice.subscriptions.all():
if web_user.is_domain_admin(subscription.subscriber.domain):
return True
return False
class InvoicePdf(BlobMixin, SafeSaveDocument):
invoice_id = StringProperty()
date_created = DateTimeProperty()
is_wire = BooleanProperty(default=False)
is_customer = BooleanProperty(default=False)
_blobdb_type_code = CODES.invoice
def generate_pdf(self, invoice):
self.save()
domain = invoice.get_domain()
pdf_data = NamedTemporaryFile()
account_name = ''
if invoice.is_customer_invoice:
account_name = invoice.account.name
template = InvoiceTemplate(
pdf_data.name,
invoice_number=invoice.invoice_number,
to_address=get_address_from_invoice(invoice),
project_name=domain,
invoice_date=invoice.date_created.date(),
due_date=invoice.date_due,
date_start=invoice.date_start,
date_end=invoice.date_end,
subtotal=invoice.subtotal,
tax_rate=invoice.tax_rate,
applied_tax=getattr(invoice, 'applied_tax', Decimal('0.000')),
applied_credit=getattr(invoice, 'applied_credit', Decimal('0.000')),
total=invoice.get_total(),
is_wire=invoice.is_wire,
is_customer=invoice.is_customer_invoice,
is_prepayment=invoice.is_wire and invoice.is_prepayment,
account_name=account_name
)
if not invoice.is_wire:
if invoice.is_customer_invoice:
line_items = LineItem.objects.filter(customer_invoice=invoice)
else:
line_items = LineItem.objects.filter(subscription_invoice=invoice)
for line_item in line_items:
is_unit = line_item.unit_description is not None
is_quarterly = line_item.invoice.is_customer_invoice and \
line_item.invoice.account.invoicing_plan != InvoicingPlan.MONTHLY
unit_cost = line_item.subtotal
if is_unit:
unit_cost = line_item.unit_cost
if is_quarterly and line_item.base_description is not None:
unit_cost = line_item.product_rate.monthly_fee
description = line_item.base_description or line_item.unit_description
if line_item.quantity > 0:
template.add_item(
description,
line_item.quantity if is_unit or is_quarterly else 1,
unit_cost,
line_item.subtotal,
line_item.applied_credit,
line_item.total
)
if invoice.is_wire and invoice.is_prepayment:
unit_cost = 1
applied_credit = 0
for item in invoice.items:
template.add_item(item['type'],
item['amount'],
unit_cost,
item['amount'],
applied_credit,
item['amount'])
template.get_pdf()
filename = self.get_filename(invoice)
blob_domain = domain or UNKNOWN_DOMAIN
if not settings.UNIT_TESTING:
self.put_attachment(pdf_data, filename, 'application/pdf', domain=blob_domain)
else:
self.put_attachment('', filename, 'application/pdf', domain=blob_domain)
pdf_data.close()
self.invoice_id = str(invoice.id)
self.date_created = datetime.datetime.utcnow()
self.is_wire = invoice.is_wire
self.is_customer = invoice.is_customer_invoice
self.save()
@staticmethod
def get_filename(invoice):
return "statement_%(year)d_%(month)d.pdf" % {
'year': invoice.date_start.year,
'month': invoice.date_start.month,
}
def get_data(self, invoice):
with self.fetch_attachment(self.get_filename(invoice), stream=True) as fh:
return fh.read()
class LineItemManager(models.Manager):
def get_products(self):
return self.get_queryset().filter(feature_rate__exact=None)
def get_features(self):
return self.get_queryset().filter(product_rate__exact=None)
def get_feature_by_type(self, feature_type):
return self.get_queryset().filter(feature_rate__feature__feature_type=feature_type)
class LineItem(models.Model):
subscription_invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT, null=True)
customer_invoice = models.ForeignKey(CustomerInvoice, on_delete=models.PROTECT, null=True)
feature_rate = models.ForeignKey(FeatureRate, on_delete=models.PROTECT, null=True)
product_rate = models.ForeignKey(SoftwareProductRate, on_delete=models.PROTECT, null=True)
base_description = models.TextField(blank=True, null=True)
base_cost = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
unit_description = models.TextField(blank=True, null=True)
unit_cost = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
quantity = models.IntegerField(default=1, validators=integer_field_validators)
last_modified = models.DateTimeField(auto_now=True)
objects = LineItemManager()
class Meta(object):
app_label = 'accounting'
@property
def invoice(self):
if self.subscription_invoice:
return self.subscription_invoice
else:
return self.customer_invoice
@invoice.setter
def invoice(self, invoice):
if invoice.is_customer_invoice:
self.customer_invoice = invoice
else:
self.subscription_invoice = invoice
@property
def subtotal(self):
if self.customer_invoice and self.customer_invoice.account.invoicing_plan != InvoicingPlan.MONTHLY:
return self.base_cost * self.quantity + self.unit_cost * self.quantity
return self.base_cost + self.unit_cost * self.quantity
@property
def applied_credit(self):
if self.creditadjustment_set.count() == 0:
return Decimal('0.0000')
return sum([credit.amount for credit in self.creditadjustment_set.all()])
@property
def total(self):
return self.subtotal + self.applied_credit
def calculate_credit_adjustments(self):
current_total = self.total
credit_lines = CreditLine.get_credits_for_line_item(self)
CreditLine.apply_credits_toward_balance(credit_lines, current_total, line_item=self)
class CreditLine(models.Model):
account = models.ForeignKey(BillingAccount, on_delete=models.PROTECT)
subscription = models.ForeignKey(Subscription, on_delete=models.PROTECT, null=True, blank=True)
is_product = models.BooleanField(default=False)
feature_type = models.CharField(max_length=10, null=True, blank=True,
choices=FeatureType.CHOICES)
date_created = models.DateTimeField(auto_now_add=True)
balance = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
is_active = models.BooleanField(default=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def __str__(self):
credit_level = ("Account-Level" if self.subscription is None
else "Subscription-Level")
return ("%(level)s credit [Account %(account_id)d]%(feature)s"
"%(product)s, balance %(balance)s" % {
'level': credit_level,
'account_id': self.account.id,
'feature': (' for Feature %s' % self.feature_type
if self.feature_type is not None else ""),
'product': (' for Product'
if self.is_product else ""),
'balance': self.balance,
})
def save(self, *args, **kwargs):
from corehq.apps.accounting.mixins import (
get_credits_available_for_product_in_account,
get_credits_available_for_product_in_subscription,
)
super(CreditLine, self).save(*args, **kwargs)
if self.account:
get_credits_available_for_product_in_account.clear(self.account)
if self.subscription:
get_credits_available_for_product_in_subscription.clear(self.subscription)
def adjust_credit_balance(self, amount, is_new=False, note=None,
line_item=None, invoice=None, customer_invoice=None,
payment_record=None, related_credit=None,
reason=None, web_user=None):
note = note or ""
if line_item is not None and (invoice is not None or customer_invoice is not None):
raise CreditLineError("You may only have an invoice OR a line item making this adjustment.")
if reason is None:
reason = CreditAdjustmentReason.MANUAL
if payment_record is not None:
reason = CreditAdjustmentReason.DIRECT_PAYMENT
elif related_credit is not None:
reason = CreditAdjustmentReason.TRANSFER
elif invoice is not None:
reason = CreditAdjustmentReason.INVOICE
elif customer_invoice is not None:
reason = CreditAdjustmentReason.INVOICE
elif line_item is not None:
reason = CreditAdjustmentReason.LINE_ITEM
if is_new:
note = "Initialization of credit line. %s" % note
credit_adjustment = CreditAdjustment(
credit_line=self,
note=note,
amount=amount,
reason=reason,
payment_record=payment_record,
line_item=line_item,
invoice=invoice,
customer_invoice=customer_invoice,
related_credit=related_credit,
web_user=web_user,
)
credit_adjustment.save()
self.balance = F('balance') + amount
self.save()
self.refresh_from_db()
@classmethod
def get_credits_for_line_item(cls, line_item):
is_product = line_item.product_rate is not None
feature_type = (
line_item.feature_rate.feature.feature_type
if line_item.feature_rate is not None else None
)
assert is_product or feature_type
assert not (is_product and feature_type)
if line_item.invoice.is_customer_invoice:
return cls.get_credits_for_line_item_in_customer_invoice(line_item, feature_type, is_product)
else:
return cls.get_credits_for_line_item_in_invoice(line_item, feature_type, is_product)
@classmethod
def get_credits_for_line_item_in_invoice(cls, line_item, feature_type, is_product):
if feature_type:
return itertools.chain(
cls.get_credits_by_subscription_and_features(
line_item.invoice.subscription,
feature_type=feature_type,
),
cls.get_credits_for_account(
line_item.invoice.subscription.account,
feature_type=feature_type,
)
)
if is_product:
return itertools.chain(
cls.get_credits_by_subscription_and_features(
line_item.invoice.subscription,
is_product=True,
),
cls.get_credits_for_account(
line_item.invoice.subscription.account,
is_product=True,
)
)
@classmethod
def get_credits_for_line_item_in_customer_invoice(cls, line_item, feature_type, is_product):
if feature_type:
return itertools.chain(
cls.get_credits_for_subscriptions(
subscriptions=line_item.invoice.subscriptions.all(),
feature_type=feature_type
),
cls.get_credits_for_account(
account=line_item.invoice.account,
feature_type=feature_type
)
)
if is_product:
return itertools.chain(
cls.get_credits_for_subscriptions(
subscriptions=line_item.invoice.subscriptions.all(),
is_product=is_product
),
cls.get_credits_for_account(
account=line_item.invoice.account,
is_product=is_product
)
)
@classmethod
def get_credits_for_invoice(cls, invoice):
relevant_credits = [
cls.get_credits_by_subscription_and_features(invoice.subscription),
cls.get_credits_for_account(invoice.subscription.account)
]
if invoice.subscription.next_subscription:
active_sub = Subscription.get_active_subscription_by_domain(
invoice.subscription.subscriber.domain
)
if active_sub.account == invoice.subscription.account:
relevant_credits.append(
cls.get_credits_by_subscription_and_features(active_sub)
)
elif (invoice.subscription.next_subscription.account
== invoice.subscription.account):
relevant_credits.append(
cls.get_credits_by_subscription_and_features(
invoice.subscription.next_subscription
)
)
return itertools.chain(*relevant_credits)
@classmethod
def get_credits_for_customer_invoice(cls, invoice):
return itertools.chain(
cls.get_credits_for_subscriptions(invoice.subscriptions.all()),
cls.get_credits_for_account(invoice.account)
)
@classmethod
def get_credits_for_subscriptions(cls, subscriptions, feature_type=None, is_product=False):
credit_list = cls.objects.none()
for subscription in subscriptions.all():
credit_list = credit_list.union(cls.get_credits_by_subscription_and_features(
subscription,
feature_type=feature_type,
is_product=is_product
))
return credit_list
@classmethod
def get_credits_for_account(cls, account, feature_type=None, is_product=False):
assert not (feature_type and is_product)
return cls.objects.filter(
account=account, subscription__exact=None, is_active=True
).filter(
is_product=is_product, feature_type__exact=feature_type
).all()
@classmethod
def get_credits_by_subscription_and_features(cls, subscription,
feature_type=None,
is_product=False):
assert not (feature_type and is_product)
return cls.objects.filter(
subscription=subscription,
feature_type__exact=feature_type,
is_product=is_product,
is_active=True
).all()
@classmethod
def get_non_general_credits_by_subscription(cls, subscription):
return cls.objects.filter(subscription=subscription, is_active=True).filter(
Q(is_product=True) |
Q(feature_type__in=[f[0] for f in FeatureType.CHOICES])
).all()
@classmethod
def add_credit(cls, amount, account=None, subscription=None,
is_product=False, feature_type=None, payment_record=None,
invoice=None, customer_invoice=None, line_item=None, related_credit=None,
note=None, reason=None, web_user=None, permit_inactive=False):
if account is None and subscription is None:
raise CreditLineError(
"You must specify either a subscription "
"or account to add this credit to."
)
if feature_type is not None and is_product:
raise CreditLineError(
"Can only add credit for a product OR a feature, but not both."
)
account = account or subscription.account
try:
credit_line = cls.objects.get(
account__exact=account,
subscription__exact=subscription,
is_product=is_product,
feature_type__exact=feature_type,
is_active=True
)
if not permit_inactive and not credit_line.is_active and not invoice:
raise CreditLineError(
"Could not add credit to CreditLine %s because it is "
"inactive." % str(credit_line)
)
is_new = False
except cls.MultipleObjectsReturned as e:
raise CreditLineError(
"Could not find a unique credit line for %(account)s"
"%(subscription)s%(feature)s%(product)s. %(error)s"
"instead." % {
'account': "Account ID %d" % account.id,
'subscription': (" | Subscription ID %d" % subscription.id
if subscription is not None else ""),
'feature': (" | Feature %s" % feature_type
if feature_type is not None else ""),
'product': (" | Product" if is_product else ""),
'error': str(e),
}
)
except cls.DoesNotExist:
credit_line = cls.objects.create(
account=account,
subscription=subscription,
is_product=is_product,
feature_type=feature_type,
)
is_new = True
credit_line.adjust_credit_balance(amount, is_new=is_new, note=note,
payment_record=payment_record,
invoice=invoice, customer_invoice=customer_invoice, line_item=line_item,
related_credit=related_credit,
reason=reason, web_user=web_user)
return credit_line
@classmethod
def apply_credits_toward_balance(cls, credit_lines, balance, **kwargs):
for credit_line in credit_lines:
if balance == Decimal('0.0000'):
return
if balance <= Decimal('0.0000'):
raise CreditLineError(
"A balance went below zero dollars when applying credits "
"to credit line %d." % credit_line.pk
)
adjustment_amount = min(credit_line.balance, balance)
if adjustment_amount > Decimal('0.0000'):
credit_line.adjust_credit_balance(-adjustment_amount, **kwargs)
balance -= adjustment_amount
@classmethod
def make_payment_towards_invoice(cls, invoice, payment_record):
if invoice.is_customer_invoice:
billing_account = invoice.account
else:
billing_account = invoice.subscription.account
cls.add_credit(
payment_record.amount,
account=billing_account,
payment_record=payment_record,
)
cls.add_credit(
-payment_record.amount,
account=billing_account,
invoice=invoice,
)
class PaymentMethod(models.Model):
web_user = models.CharField(max_length=80, db_index=True)
method_type = models.CharField(max_length=50,
default=PaymentMethodType.STRIPE,
choices=PaymentMethodType.CHOICES,
db_index=True)
customer_id = models.CharField(max_length=255, null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
unique_together = ('web_user', 'method_type')
class StripePaymentMethod(PaymentMethod):
class Meta(object):
proxy = True
app_label = 'accounting'
STRIPE_GENERIC_ERROR = (stripe.error.AuthenticationError,
stripe.error.InvalidRequestError,
stripe.error.APIConnectionError,
stripe.error.StripeError,)
@property
def customer(self):
return self._get_or_create_stripe_customer()
def _get_or_create_stripe_customer(self):
customer = None
if self.customer_id is not None:
try:
customer = self._get_stripe_customer()
except stripe.InvalidRequestError:
pass
if customer is None:
customer = self._create_stripe_customer()
return customer
def _create_stripe_customer(self):
customer = stripe.Customer.create(
description="{}'s cards".format(self.web_user),
email=self.web_user,
)
self.customer_id = customer.id
self.save()
return customer
def _get_stripe_customer(self):
return stripe.Customer.retrieve(self.customer_id)
@property
def all_cards(self):
try:
return [card for card in self.customer.cards.data if card is not None]
except stripe.error.AuthenticationError:
if not settings.STRIPE_PRIVATE_KEY:
log_accounting_info("Private key is not defined in settings")
return []
else:
raise
def all_cards_serialized(self, billing_account):
return [{
'brand': card.brand,
'last4': card.last4,
'exp_month': card.exp_month,
'exp_year': card.exp_year,
'token': card.id,
'is_autopay': self._is_autopay(card, billing_account),
} for card in self.all_cards]
def get_card(self, card_token):
return self.customer.cards.retrieve(card_token)
def get_autopay_card(self, billing_account):
return next((
card for card in self.all_cards
if self._is_autopay(card, billing_account)
), None)
def remove_card(self, card_token):
card = self.get_card(card_token)
self._remove_card_from_all_accounts(card)
card.delete()
def _remove_card_from_all_accounts(self, card):
accounts = BillingAccount.objects.filter(auto_pay_user=self.web_user)
for account in accounts:
if account.autopay_card == card:
account.remove_autopay_user()
def create_card(self, stripe_token, billing_account, domain, autopay=False):
customer = self.customer
card = customer.cards.create(card=stripe_token)
self.set_default_card(card)
if autopay:
self.set_autopay(card, billing_account, domain)
return card
def set_default_card(self, card):
self.customer.default_card = card
self.customer.save()
return card
def set_autopay(self, card, billing_account, domain):
if billing_account.auto_pay_enabled:
self._remove_other_auto_pay_cards(billing_account)
self._update_autopay_status(card, billing_account, autopay=True)
billing_account.update_autopay_user(self.web_user, domain)
def unset_autopay(self, card, billing_account):
if self._is_autopay(card, billing_account):
self._update_autopay_status(card, billing_account, autopay=False)
billing_account.remove_autopay_user()
def _update_autopay_status(self, card, billing_account, autopay):
metadata = card.metadata.copy()
metadata.update({self._auto_pay_card_metadata_key(billing_account): autopay})
card.metadata = metadata
card.save()
def _remove_autopay_card(self, billing_account):
autopay_card = self.get_autopay_card(billing_account)
if autopay_card is not None:
self._update_autopay_status(autopay_card, billing_account, autopay=False)
@staticmethod
def _remove_other_auto_pay_cards(billing_account):
user = billing_account.auto_pay_user
try:
other_payment_method = StripePaymentMethod.objects.get(web_user=user)
other_payment_method._remove_autopay_card(billing_account)
except StripePaymentMethod.DoesNotExist:
pass
@staticmethod
def _is_autopay(card, billing_account):
return card.metadata.get(StripePaymentMethod._auto_pay_card_metadata_key(billing_account)) == 'True'
@staticmethod
def _auto_pay_card_metadata_key(billing_account):
return 'auto_pay_{billing_account_id}'.format(billing_account_id=billing_account.id)
def create_charge(self, card, amount_in_dollars, description):
amount_in_cents = int((amount_in_dollars * Decimal('100')).quantize(Decimal(10)))
transaction_record = stripe.Charge.create(
card=card,
customer=self.customer,
amount=amount_in_cents,
currency=settings.DEFAULT_CURRENCY,
description=description,
)
return transaction_record.id
class PaymentRecord(models.Model):
payment_method = models.ForeignKey(PaymentMethod, on_delete=models.PROTECT,
db_index=True)
date_created = models.DateTimeField(auto_now_add=True)
transaction_id = models.CharField(max_length=255, unique=True)
amount = models.DecimalField(default=Decimal('0.0000'),
max_digits=10, decimal_places=4)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
@property
def public_transaction_id(self):
ops_num = settings.INVOICE_STARTING_NUMBER + self.id
return "%sP-%d" % (settings.INVOICE_PREFIX, ops_num)
@classmethod
def create_record(cls, payment_method, transaction_id, amount):
return cls.objects.create(
payment_method=payment_method,
transaction_id=transaction_id,
amount=amount,
)
class CreditAdjustment(ValidateModelMixin, models.Model):
credit_line = models.ForeignKey(CreditLine, on_delete=models.PROTECT)
reason = models.CharField(max_length=25, default=CreditAdjustmentReason.MANUAL,
choices=CreditAdjustmentReason.CHOICES)
note = models.TextField(blank=True)
amount = models.DecimalField(default=Decimal('0.0000'), max_digits=10, decimal_places=4)
line_item = models.ForeignKey(LineItem, on_delete=models.PROTECT, null=True, blank=True)
invoice = models.ForeignKey(Invoice, on_delete=models.PROTECT, null=True, blank=True)
customer_invoice = models.ForeignKey(CustomerInvoice, on_delete=models.PROTECT, null=True, blank=True)
payment_record = models.ForeignKey(PaymentRecord,
on_delete=models.PROTECT, null=True, blank=True)
related_credit = models.ForeignKey(CreditLine, on_delete=models.PROTECT,
null=True, blank=True, related_name='creditadjustment_related')
date_created = models.DateTimeField(auto_now_add=True)
web_user = models.CharField(max_length=80, null=True, blank=True)
last_modified = models.DateTimeField(auto_now=True)
class Meta(object):
app_label = 'accounting'
def clean(self):
if self.line_item and self.invoice:
raise ValidationError(_("You can't specify both an invoice and a line item."))
class DomainUserHistory(models.Model):
domain = models.CharField(max_length=256)
record_date = models.DateField()
num_users = models.IntegerField(default=0)
class Meta:
unique_together = ('domain', 'record_date')
| true | true |
1c45b1afd70b4b9e6a45db74c842c66174c7a49a | 2,103 | py | Python | guild/main_bootstrap.py | wheatdog/guildai | 817cf179d0b6910d3d4fca522045a8139aef6c9e | [
"Apache-2.0"
] | null | null | null | guild/main_bootstrap.py | wheatdog/guildai | 817cf179d0b6910d3d4fca522045a8139aef6c9e | [
"Apache-2.0"
] | null | null | null | guild/main_bootstrap.py | wheatdog/guildai | 817cf179d0b6910d3d4fca522045a8139aef6c9e | [
"Apache-2.0"
] | null | null | null | # Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bootstraps env for guild.main.
The primary bootstrap task is to configure sys.path with the location
of Guild's external dependencies. This module supports two modes:
distribution and dev.
External dependencies in distribution mode are assumed to be located
in a single `GUILD_PKG_HOME/external` directory where `GUILD_PKG_HOME`
is the `guild` directory within the Guild distribution location.
As the bootstrap process is used for every Guild command, it must
execute as quickly as possible.
"""
from __future__ import absolute_import
from __future__ import division
import os
import sys
def main():
ensure_external_path()
import guild.main
guild.main.main()
def ensure_external_path():
path = _external_libs_path()
if path not in sys.path:
sys.path.insert(0, path)
def _external_libs_path():
guild_pkg_dir = os.path.dirname(__file__)
path = os.path.abspath(os.path.join(guild_pkg_dir, "external"))
if not os.path.exists(path):
import textwrap
sys.stderr.write("guild: {} does not exist\n".format(path))
sys.stderr.write(
textwrap.fill(
"If you're a Guild developer, run 'python setup.py build' "
"in the Guild project directory and try again. Otherwise "
"please report this as a bug at "
"https://github.com/guildai/guildai/issues."
)
)
sys.stderr.write("\n")
sys.exit(1)
return path
if __name__ == "__main__":
main()
| 29.619718 | 75 | 0.701854 |
from __future__ import absolute_import
from __future__ import division
import os
import sys
def main():
ensure_external_path()
import guild.main
guild.main.main()
def ensure_external_path():
path = _external_libs_path()
if path not in sys.path:
sys.path.insert(0, path)
def _external_libs_path():
guild_pkg_dir = os.path.dirname(__file__)
path = os.path.abspath(os.path.join(guild_pkg_dir, "external"))
if not os.path.exists(path):
import textwrap
sys.stderr.write("guild: {} does not exist\n".format(path))
sys.stderr.write(
textwrap.fill(
"If you're a Guild developer, run 'python setup.py build' "
"in the Guild project directory and try again. Otherwise "
"please report this as a bug at "
"https://github.com/guildai/guildai/issues."
)
)
sys.stderr.write("\n")
sys.exit(1)
return path
if __name__ == "__main__":
main()
| true | true |
1c45b1e58aee7713ff142e357f97c11aaa11ed05 | 1,246 | py | Python | discord/utils.py | rf20008/nextcord | 48ae815f226e9f7f2f4076c68b6589563144d67b | [
"MIT"
] | null | null | null | discord/utils.py | rf20008/nextcord | 48ae815f226e9f7f2f4076c68b6589563144d67b | [
"MIT"
] | null | null | null | discord/utils.py | rf20008/nextcord | 48ae815f226e9f7f2f4076c68b6589563144d67b | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2021-present tag-epic
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
Module to allow for backwards compatibility for existing code and extensions
"""
from nextcord.utils import *
from nextcord.utils import MISSING, DISCORD_EPOCH
| 47.923077 | 76 | 0.804173 |
from nextcord.utils import *
from nextcord.utils import MISSING, DISCORD_EPOCH
| true | true |
1c45b20896b287eedc789388d42830cf74be6fa6 | 25,192 | py | Python | trac/admin/web_ui.py | mikiec84/trac | d51a7119b9fcb9061d7fe135c7d648fa671555dd | [
"BSD-3-Clause"
] | null | null | null | trac/admin/web_ui.py | mikiec84/trac | d51a7119b9fcb9061d7fe135c7d648fa671555dd | [
"BSD-3-Clause"
] | null | null | null | trac/admin/web_ui.py | mikiec84/trac | d51a7119b9fcb9061d7fe135c7d648fa671555dd | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2020 Edgewall Software
# Copyright (C) 2005 Jonas Borgström <jonas@edgewall.com>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/.
#
# Author: Jonas Borgström <jonas@edgewall.com>
import os
import pkg_resources
import re
import shutil
from functools import partial
from trac import log
from trac.admin.api import IAdminPanelProvider
from trac.core import *
from trac.loader import get_plugin_info
from trac.log import LOG_LEVELS, LOG_LEVEL_ALIASES, LOG_LEVEL_ALIASES_MAP
from trac.perm import IPermissionRequestor, PermissionExistsError, \
PermissionSystem
from trac.util.datefmt import all_timezones, pytz
from trac.util.html import tag
from trac.util.text import exception_to_unicode, unicode_from_base64, \
unicode_to_base64
from trac.util.translation import _, Locale, get_available_locales, \
ngettext, tag_
from trac.web.api import HTTPNotFound, IRequestHandler, \
is_valid_default_handler
from trac.web.chrome import Chrome, INavigationContributor, \
ITemplateProvider, add_notice, add_stylesheet, \
add_warning
from trac.wiki.formatter import format_to_html
_valid_log_levels = set()
_valid_log_levels.update(log.LOG_LEVELS)
_valid_log_levels.update(log.LOG_LEVEL_ALIASES)
class AdminModule(Component):
"""Web administration interface provider and panel manager."""
implements(INavigationContributor, IRequestHandler, ITemplateProvider)
panel_providers = ExtensionPoint(IAdminPanelProvider)
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'admin'
def get_navigation_items(self, req):
# The 'Admin' navigation item is only visible if at least one
# admin panel is available
panels, providers = self._get_panels(req)
if panels:
yield 'mainnav', 'admin', tag.a(_("Admin"), href=req.href.admin())
# IRequestHandler methods
def match_request(self, req):
match = re.match('/admin(?:/([^/]+)(?:/([^/]+)(?:/(.+))?)?)?$',
req.path_info)
if match:
req.args['cat_id'] = match.group(1)
req.args['panel_id'] = match.group(2)
req.args['path_info'] = match.group(3)
return True
def process_request(self, req):
panels, providers = self._get_panels(req)
if not panels:
raise HTTPNotFound(_("No administration panels available"))
def _panel_order(panel):
items = panel[::2]
return items[0] != 'general', items != ('general', 'basics'), items
panels.sort(key=_panel_order)
cat_id = req.args.get('cat_id') or panels[0][0]
panel_id = req.args.get('panel_id')
path_info = req.args.get('path_info')
if not panel_id:
try:
panel_id = \
filter(lambda panel: panel[0] == cat_id, panels)[0][2]
except IndexError:
raise HTTPNotFound(_("Unknown administration panel"))
provider = providers.get((cat_id, panel_id))
if not provider:
raise HTTPNotFound(_("Unknown administration panel"))
resp = provider.render_admin_panel(req, cat_id, panel_id, path_info)
template, data = resp[:2]
data.update({
'active_cat': cat_id, 'active_panel': panel_id,
'panel_href': partial(req.href, 'admin', cat_id, panel_id),
'panels': [{
'category': {'id': panel[0], 'label': panel[1]},
'panel': {'id': panel[2], 'label': panel[3]}
} for panel in panels]
})
add_stylesheet(req, 'common/css/admin.css')
return resp
# ITemplateProvider methods
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.admin', 'templates')]
# Internal methods
def _get_panels(self, req):
"""Return a list of available admin panels."""
panels = []
providers = {}
for provider in self.panel_providers:
p = list(provider.get_admin_panels(req) or [])
for panel in p:
providers[(panel[0], panel[2])] = provider
panels += p
return panels, providers
def _save_config(config, req, log, notices=None):
"""Try to save the config, and display either a success notice or a
failure warning.
"""
try:
config.save()
if notices is None:
notices = [_("Your changes have been saved.")]
for notice in notices:
add_notice(req, notice)
except Exception as e:
log.error("Error writing to trac.ini: %s", exception_to_unicode(e))
add_warning(req, _("Error writing to trac.ini, make sure it is "
"writable by the web server. Your changes have "
"not been saved."))
class BasicsAdminPanel(Component):
implements(IAdminPanelProvider)
request_handlers = ExtensionPoint(IRequestHandler)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm('admin', 'general/basics'):
yield ('general', _("General"), 'basics', _("Basic Settings"))
def render_admin_panel(self, req, cat, page, path_info):
valid_default_handlers = [handler.__class__.__name__
for handler in self.request_handlers
if is_valid_default_handler(handler)]
if Locale:
locale_ids = get_available_locales()
locales = [Locale.parse(locale) for locale in locale_ids]
# don't use str(locale) to prevent storing expanded locale
# identifier, see #11258
languages = sorted((id, locale.display_name)
for id, locale in zip(locale_ids, locales))
else:
locale_ids, locales, languages = [], [], []
if req.method == 'POST':
for option in ('name', 'url', 'descr'):
self.config.set('project', option, req.args.get(option))
default_handler = req.args.get('default_handler')
self.config.set('trac', 'default_handler', default_handler)
default_timezone = req.args.get('default_timezone')
if default_timezone not in all_timezones:
default_timezone = ''
self.config.set('trac', 'default_timezone', default_timezone)
default_language = req.args.get('default_language')
if default_language not in locale_ids:
default_language = ''
self.config.set('trac', 'default_language', default_language)
default_date_format = req.args.get('default_date_format')
if default_date_format != 'iso8601':
default_date_format = ''
self.config.set('trac', 'default_date_format',
default_date_format)
default_dateinfo_format = req.args.get('default_dateinfo_format')
if default_dateinfo_format not in ('relative', 'absolute'):
default_dateinfo_format = 'relative'
self.config.set('trac', 'default_dateinfo_format',
default_dateinfo_format)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
default_handler = self.config.get('trac', 'default_handler')
default_timezone = self.config.get('trac', 'default_timezone')
default_language = self.config.get('trac', 'default_language')
default_date_format = self.config.get('trac', 'default_date_format')
default_dateinfo_format = self.config.get('trac',
'default_dateinfo_format')
data = {
'default_handler': default_handler,
'valid_default_handlers': sorted(valid_default_handlers),
'default_timezone': default_timezone,
'timezones': all_timezones,
'has_pytz': pytz is not None,
'default_language': default_language.replace('-', '_'),
'languages': languages,
'default_date_format': default_date_format,
'default_dateinfo_format': default_dateinfo_format,
'has_babel': Locale is not None,
}
Chrome(self.env).add_textarea_grips(req)
return 'admin_basics.html', data
class LoggingAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm('admin', 'general/logging'):
yield ('general', _("General"), 'logging', _("Logging"))
def render_admin_panel(self, req, cat, page, path_info):
log_type = self.env.log_type
log_level = self.env.log_level
log_file = self.env.log_file
log_dir = self.env.log_dir
log_types = [
dict(name='none', label=_("None"),
selected=log_type == 'none', disabled=False),
dict(name='stderr', label=_("Console"),
selected=log_type == 'stderr', disabled=False),
dict(name='file', label=_("File"),
selected=log_type == 'file', disabled=False),
dict(name='syslog', label=_("Syslog"),
selected=log_type in ('unix', 'syslog'),
disabled=os.name != 'posix'),
dict(name='eventlog', label=_("Windows event log"),
selected=log_type in ('winlog', 'eventlog', 'nteventlog'),
disabled=os.name != 'nt'),
]
if req.method == 'POST':
changed = False
new_type = req.args.get('log_type')
if new_type not in [t['name'] for t in log_types]:
raise TracError(
_("Unknown log type %(type)s", type=new_type),
_("Invalid log type")
)
new_file = req.args.get('log_file', log_file)
if not new_file:
raise TracError(_("You must specify a log file"),
_("Missing field"))
new_level = req.args.get('log_level', log_level)
if new_level not in _valid_log_levels:
raise TracError(
_("Unknown log level %(level)s", level=new_level),
_("Invalid log level"))
# Create logger to be sure the configuration is valid.
new_file_path = new_file
if not os.path.isabs(new_file_path):
new_file_path = os.path.join(self.env.log_dir, new_file)
try:
logger, handler = \
self.env.create_logger(new_type, new_file_path, new_level,
self.env.log_format)
except Exception as e:
add_warning(req,
tag_("Changes not saved. Logger configuration "
"error: %(error)s. Inspect the log for more "
"information.",
error=tag.code(exception_to_unicode(e))))
self.log.error("Logger configuration error: %s",
exception_to_unicode(e, traceback=True))
else:
handler.close()
if new_type != log_type:
self.config.set('logging', 'log_type', new_type)
changed = True
log_type = new_type
if new_level != log_level:
self.config.set('logging', 'log_level', new_level)
changed = True
log_level = new_level
if new_file != log_file:
self.config.set('logging', 'log_file', new_file)
changed = True
log_file = new_file
if changed:
_save_config(self.config, req, self.log),
req.redirect(req.href.admin(cat, page))
# Order log levels by priority value, with aliases excluded.
all_levels = sorted(log.LOG_LEVEL_MAP, key=log.LOG_LEVEL_MAP.get,
reverse=True)
log_levels = [level for level in all_levels if level in log.LOG_LEVELS]
log_level = LOG_LEVEL_ALIASES_MAP.get(log_level, log_level)
data = {
'type': log_type, 'types': log_types,
'level': log_level, 'levels': log_levels,
'file': log_file, 'dir': log_dir
}
return 'admin_logging.html', {'log': data}
class PermissionAdminPanel(Component):
implements(IAdminPanelProvider, IPermissionRequestor)
# IPermissionRequestor methods
def get_permission_actions(self):
actions = ['PERMISSION_GRANT', 'PERMISSION_REVOKE']
return actions + [('PERMISSION_ADMIN', actions)]
# IAdminPanelProvider methods
def get_admin_panels(self, req):
perm = req.perm('admin', 'general/perm')
if 'PERMISSION_GRANT' in perm or 'PERMISSION_REVOKE' in perm:
yield ('general', _("General"), 'perm', _("Permissions"))
def render_admin_panel(self, req, cat, page, path_info):
perm = PermissionSystem(self.env)
all_actions = perm.get_actions()
if req.method == 'POST':
subject = req.args.get('subject', '').strip()
target = req.args.get('target', '').strip()
action = req.args.get('action')
group = req.args.get('group', '').strip()
if subject and subject.isupper() or \
group and group.isupper() or \
target and target.isupper():
raise TracError(_("All upper-cased tokens are reserved for "
"permission names."))
# Grant permission to subject
if 'add' in req.args and subject and action:
req.perm('admin', 'general/perm').require('PERMISSION_GRANT')
if action not in all_actions:
raise TracError(_("Unknown action"))
req.perm.require(action)
try:
perm.grant_permission(subject, action)
except TracError as e:
add_warning(req, e)
else:
add_notice(req, _("The subject %(subject)s has been "
"granted the permission %(action)s.",
subject=subject, action=action))
# Add subject to group
elif 'add' in req.args and subject and group:
req.perm('admin', 'general/perm').require('PERMISSION_GRANT')
for action in sorted(
perm.get_user_permissions(group, expand_meta=False)):
req.perm.require(action,
message=tag_(
"The subject %(subject)s was not added to the "
"group %(group)s. The group has %(perm)s "
"permission and you cannot grant permissions you "
"don't possess.", subject=tag.strong(subject),
group=tag.strong(group), perm=tag.strong(action)))
try:
perm.grant_permission(subject, group)
except TracError as e:
add_warning(req, e)
else:
add_notice(req, _("The subject %(subject)s has been "
"added to the group %(group)s.",
subject=subject, group=group))
# Copy permissions to subject
elif 'copy' in req.args and subject and target:
req.perm('admin', 'general/perm').require('PERMISSION_GRANT')
subject_permissions = perm.get_users_dict().get(subject, [])
if not subject_permissions:
add_warning(req, _("The subject %(subject)s does not "
"have any permissions.",
subject=subject))
for action in subject_permissions:
if action not in all_actions: # plugin disabled?
self.log.warning("Skipped granting %s to %s: "
"permission unavailable.",
action, target)
else:
if action not in req.perm:
add_warning(req,
_("The permission %(action)s was "
"not granted to %(subject)s "
"because users cannot grant "
"permissions they don't possess.",
action=action, subject=subject))
continue
try:
perm.grant_permission(target, action)
except PermissionExistsError:
pass
else:
add_notice(req, _("The subject %(subject)s has "
"been granted the permission "
"%(action)s.",
subject=target, action=action))
req.redirect(req.href.admin(cat, page))
# Remove permissions action
elif 'remove' in req.args and 'sel' in req.args:
req.perm('admin', 'general/perm').require('PERMISSION_REVOKE')
for key in req.args.getlist('sel'):
subject, action = key.split(':', 1)
subject = unicode_from_base64(subject)
action = unicode_from_base64(action)
if (subject, action) in perm.get_all_permissions():
perm.revoke_permission(subject, action)
add_notice(req, _("The selected permissions have been "
"revoked."))
req.redirect(req.href.admin(cat, page))
return 'admin_perms.html', {
'actions': all_actions,
'allowed_actions': [a for a in all_actions if a in req.perm],
'perms': perm.get_users_dict(),
'groups': perm.get_groups_dict(),
'unicode_to_base64': unicode_to_base64
}
class PluginAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm('admin', 'general/plugin'):
yield ('general', _("General"), 'plugin', _("Plugins"))
def render_admin_panel(self, req, cat, page, path_info):
if req.method == 'POST':
if 'install' in req.args:
self._do_install(req)
elif 'uninstall' in req.args:
self._do_uninstall(req)
else:
self._do_update(req)
anchor = ''
if 'plugin' in req.args:
anchor = '#no%d' % (req.args.getint('plugin') + 1)
req.redirect(req.href.admin(cat, page) + anchor)
return self._render_view(req)
# Internal methods
def _do_install(self, req):
"""Install a plugin."""
if 'plugin_file' not in req.args:
raise TracError(_("No file uploaded"))
upload = req.args['plugin_file']
if isinstance(upload, unicode) or not upload.filename:
raise TracError(_("No file uploaded"))
plugin_filename = upload.filename.replace('\\', '/').replace(':', '/')
plugin_filename = os.path.basename(plugin_filename)
if not plugin_filename:
raise TracError(_("No file uploaded"))
if not plugin_filename.endswith('.egg') and \
not plugin_filename.endswith('.py'):
raise TracError(_("Uploaded file is not a Python source file or "
"egg"))
target_path = os.path.join(self.env.plugins_dir, plugin_filename)
if os.path.isfile(target_path):
raise TracError(_("Plugin %(name)s already installed",
name=plugin_filename))
self.log.info("Installing plugin %s", plugin_filename)
flags = os.O_CREAT + os.O_WRONLY + os.O_EXCL
try:
flags += os.O_BINARY
except AttributeError:
# OS_BINARY not available on every platform
pass
with os.fdopen(os.open(target_path, flags, 0o666), 'w') as target_file:
shutil.copyfileobj(upload.file, target_file)
self.log.info("Plugin %s installed to %s", plugin_filename,
target_path)
# TODO: Validate that the uploaded file is a valid Trac plugin
# Make the environment reset itself on the next request
self.env.config.touch()
def _do_uninstall(self, req):
"""Uninstall a plugin."""
plugin_filename = req.args.get('plugin_filename')
if not plugin_filename:
return
plugin_path = os.path.join(self.env.plugins_dir, plugin_filename)
if not os.path.isfile(plugin_path):
return
self.log.info("Uninstalling plugin %s", plugin_filename)
os.remove(plugin_path)
# Make the environment reset itself on the next request
self.env.config.touch()
def _do_update(self, req):
"""Update component enable state."""
components = req.args.getlist('component')
enabled = req.args.getlist('enable')
added, removed = [], []
# FIXME: this needs to be more intelligent and minimize multiple
# component names to prefix rules
for component in components:
is_enabled = bool(self.env.is_component_enabled(component))
must_enable = component in enabled
if is_enabled != must_enable:
self.config.set('components', component,
'disabled' if is_enabled else 'enabled')
self.log.info("%sabling component %s",
"Dis" if is_enabled else "En", component)
if must_enable:
added.append(component)
else:
removed.append(component)
if added or removed:
def make_list(items):
parts = [item.rsplit('.', 1) for item in items]
return tag.table(tag.tbody(
tag.tr(tag.td(c, class_='trac-name'),
tag.td('(%s.*)' % m, class_='trac-name'))
for m, c in parts), class_='trac-pluglist')
added.sort()
removed.sort()
notices = []
if removed:
msg = ngettext("The following component has been disabled:",
"The following components have been disabled:",
len(removed))
notices.append(tag(msg, make_list(removed)))
if added:
msg = ngettext("The following component has been enabled:",
"The following components have been enabled:",
len(added))
notices.append(tag(msg, make_list(added)))
# set the default value of options for only the enabled components
for component in added:
self.config.set_defaults(component=component)
_save_config(self.config, req, self.log, notices)
def _render_view(self, req):
plugins = get_plugin_info(self.env, include_core=True)
def safe_wiki_to_html(context, text):
try:
return format_to_html(self.env, context, text)
except Exception as e:
self.log.error("Unable to render component documentation: %s",
exception_to_unicode(e, traceback=True))
return tag.pre(text)
data = {
'plugins': plugins, 'show': req.args.get('show'),
'readonly': not os.access(self.env.plugins_dir,
os.F_OK + os.W_OK),
'safe_wiki_to_html': safe_wiki_to_html,
}
return 'admin_plugins.html', data
| 41.230769 | 79 | 0.551286 |
import os
import pkg_resources
import re
import shutil
from functools import partial
from trac import log
from trac.admin.api import IAdminPanelProvider
from trac.core import *
from trac.loader import get_plugin_info
from trac.log import LOG_LEVELS, LOG_LEVEL_ALIASES, LOG_LEVEL_ALIASES_MAP
from trac.perm import IPermissionRequestor, PermissionExistsError, \
PermissionSystem
from trac.util.datefmt import all_timezones, pytz
from trac.util.html import tag
from trac.util.text import exception_to_unicode, unicode_from_base64, \
unicode_to_base64
from trac.util.translation import _, Locale, get_available_locales, \
ngettext, tag_
from trac.web.api import HTTPNotFound, IRequestHandler, \
is_valid_default_handler
from trac.web.chrome import Chrome, INavigationContributor, \
ITemplateProvider, add_notice, add_stylesheet, \
add_warning
from trac.wiki.formatter import format_to_html
_valid_log_levels = set()
_valid_log_levels.update(log.LOG_LEVELS)
_valid_log_levels.update(log.LOG_LEVEL_ALIASES)
class AdminModule(Component):
implements(INavigationContributor, IRequestHandler, ITemplateProvider)
panel_providers = ExtensionPoint(IAdminPanelProvider)
def get_active_navigation_item(self, req):
return 'admin'
def get_navigation_items(self, req):
panels, providers = self._get_panels(req)
if panels:
yield 'mainnav', 'admin', tag.a(_("Admin"), href=req.href.admin())
def match_request(self, req):
match = re.match('/admin(?:/([^/]+)(?:/([^/]+)(?:/(.+))?)?)?$',
req.path_info)
if match:
req.args['cat_id'] = match.group(1)
req.args['panel_id'] = match.group(2)
req.args['path_info'] = match.group(3)
return True
def process_request(self, req):
panels, providers = self._get_panels(req)
if not panels:
raise HTTPNotFound(_("No administration panels available"))
def _panel_order(panel):
items = panel[::2]
return items[0] != 'general', items != ('general', 'basics'), items
panels.sort(key=_panel_order)
cat_id = req.args.get('cat_id') or panels[0][0]
panel_id = req.args.get('panel_id')
path_info = req.args.get('path_info')
if not panel_id:
try:
panel_id = \
filter(lambda panel: panel[0] == cat_id, panels)[0][2]
except IndexError:
raise HTTPNotFound(_("Unknown administration panel"))
provider = providers.get((cat_id, panel_id))
if not provider:
raise HTTPNotFound(_("Unknown administration panel"))
resp = provider.render_admin_panel(req, cat_id, panel_id, path_info)
template, data = resp[:2]
data.update({
'active_cat': cat_id, 'active_panel': panel_id,
'panel_href': partial(req.href, 'admin', cat_id, panel_id),
'panels': [{
'category': {'id': panel[0], 'label': panel[1]},
'panel': {'id': panel[2], 'label': panel[3]}
} for panel in panels]
})
add_stylesheet(req, 'common/css/admin.css')
return resp
def get_htdocs_dirs(self):
return []
def get_templates_dirs(self):
return [pkg_resources.resource_filename('trac.admin', 'templates')]
def _get_panels(self, req):
panels = []
providers = {}
for provider in self.panel_providers:
p = list(provider.get_admin_panels(req) or [])
for panel in p:
providers[(panel[0], panel[2])] = provider
panels += p
return panels, providers
def _save_config(config, req, log, notices=None):
try:
config.save()
if notices is None:
notices = [_("Your changes have been saved.")]
for notice in notices:
add_notice(req, notice)
except Exception as e:
log.error("Error writing to trac.ini: %s", exception_to_unicode(e))
add_warning(req, _("Error writing to trac.ini, make sure it is "
"writable by the web server. Your changes have "
"not been saved."))
class BasicsAdminPanel(Component):
implements(IAdminPanelProvider)
request_handlers = ExtensionPoint(IRequestHandler)
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm('admin', 'general/basics'):
yield ('general', _("General"), 'basics', _("Basic Settings"))
def render_admin_panel(self, req, cat, page, path_info):
valid_default_handlers = [handler.__class__.__name__
for handler in self.request_handlers
if is_valid_default_handler(handler)]
if Locale:
locale_ids = get_available_locales()
locales = [Locale.parse(locale) for locale in locale_ids]
# identifier, see #11258
languages = sorted((id, locale.display_name)
for id, locale in zip(locale_ids, locales))
else:
locale_ids, locales, languages = [], [], []
if req.method == 'POST':
for option in ('name', 'url', 'descr'):
self.config.set('project', option, req.args.get(option))
default_handler = req.args.get('default_handler')
self.config.set('trac', 'default_handler', default_handler)
default_timezone = req.args.get('default_timezone')
if default_timezone not in all_timezones:
default_timezone = ''
self.config.set('trac', 'default_timezone', default_timezone)
default_language = req.args.get('default_language')
if default_language not in locale_ids:
default_language = ''
self.config.set('trac', 'default_language', default_language)
default_date_format = req.args.get('default_date_format')
if default_date_format != 'iso8601':
default_date_format = ''
self.config.set('trac', 'default_date_format',
default_date_format)
default_dateinfo_format = req.args.get('default_dateinfo_format')
if default_dateinfo_format not in ('relative', 'absolute'):
default_dateinfo_format = 'relative'
self.config.set('trac', 'default_dateinfo_format',
default_dateinfo_format)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
default_handler = self.config.get('trac', 'default_handler')
default_timezone = self.config.get('trac', 'default_timezone')
default_language = self.config.get('trac', 'default_language')
default_date_format = self.config.get('trac', 'default_date_format')
default_dateinfo_format = self.config.get('trac',
'default_dateinfo_format')
data = {
'default_handler': default_handler,
'valid_default_handlers': sorted(valid_default_handlers),
'default_timezone': default_timezone,
'timezones': all_timezones,
'has_pytz': pytz is not None,
'default_language': default_language.replace('-', '_'),
'languages': languages,
'default_date_format': default_date_format,
'default_dateinfo_format': default_dateinfo_format,
'has_babel': Locale is not None,
}
Chrome(self.env).add_textarea_grips(req)
return 'admin_basics.html', data
class LoggingAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm('admin', 'general/logging'):
yield ('general', _("General"), 'logging', _("Logging"))
def render_admin_panel(self, req, cat, page, path_info):
log_type = self.env.log_type
log_level = self.env.log_level
log_file = self.env.log_file
log_dir = self.env.log_dir
log_types = [
dict(name='none', label=_("None"),
selected=log_type == 'none', disabled=False),
dict(name='stderr', label=_("Console"),
selected=log_type == 'stderr', disabled=False),
dict(name='file', label=_("File"),
selected=log_type == 'file', disabled=False),
dict(name='syslog', label=_("Syslog"),
selected=log_type in ('unix', 'syslog'),
disabled=os.name != 'posix'),
dict(name='eventlog', label=_("Windows event log"),
selected=log_type in ('winlog', 'eventlog', 'nteventlog'),
disabled=os.name != 'nt'),
]
if req.method == 'POST':
changed = False
new_type = req.args.get('log_type')
if new_type not in [t['name'] for t in log_types]:
raise TracError(
_("Unknown log type %(type)s", type=new_type),
_("Invalid log type")
)
new_file = req.args.get('log_file', log_file)
if not new_file:
raise TracError(_("You must specify a log file"),
_("Missing field"))
new_level = req.args.get('log_level', log_level)
if new_level not in _valid_log_levels:
raise TracError(
_("Unknown log level %(level)s", level=new_level),
_("Invalid log level"))
# Create logger to be sure the configuration is valid.
new_file_path = new_file
if not os.path.isabs(new_file_path):
new_file_path = os.path.join(self.env.log_dir, new_file)
try:
logger, handler = \
self.env.create_logger(new_type, new_file_path, new_level,
self.env.log_format)
except Exception as e:
add_warning(req,
tag_("Changes not saved. Logger configuration "
"error: %(error)s. Inspect the log for more "
"information.",
error=tag.code(exception_to_unicode(e))))
self.log.error("Logger configuration error: %s",
exception_to_unicode(e, traceback=True))
else:
handler.close()
if new_type != log_type:
self.config.set('logging', 'log_type', new_type)
changed = True
log_type = new_type
if new_level != log_level:
self.config.set('logging', 'log_level', new_level)
changed = True
log_level = new_level
if new_file != log_file:
self.config.set('logging', 'log_file', new_file)
changed = True
log_file = new_file
if changed:
_save_config(self.config, req, self.log),
req.redirect(req.href.admin(cat, page))
# Order log levels by priority value, with aliases excluded.
all_levels = sorted(log.LOG_LEVEL_MAP, key=log.LOG_LEVEL_MAP.get,
reverse=True)
log_levels = [level for level in all_levels if level in log.LOG_LEVELS]
log_level = LOG_LEVEL_ALIASES_MAP.get(log_level, log_level)
data = {
'type': log_type, 'types': log_types,
'level': log_level, 'levels': log_levels,
'file': log_file, 'dir': log_dir
}
return 'admin_logging.html', {'log': data}
class PermissionAdminPanel(Component):
implements(IAdminPanelProvider, IPermissionRequestor)
# IPermissionRequestor methods
def get_permission_actions(self):
actions = ['PERMISSION_GRANT', 'PERMISSION_REVOKE']
return actions + [('PERMISSION_ADMIN', actions)]
# IAdminPanelProvider methods
def get_admin_panels(self, req):
perm = req.perm('admin', 'general/perm')
if 'PERMISSION_GRANT' in perm or 'PERMISSION_REVOKE' in perm:
yield ('general', _("General"), 'perm', _("Permissions"))
def render_admin_panel(self, req, cat, page, path_info):
perm = PermissionSystem(self.env)
all_actions = perm.get_actions()
if req.method == 'POST':
subject = req.args.get('subject', '').strip()
target = req.args.get('target', '').strip()
action = req.args.get('action')
group = req.args.get('group', '').strip()
if subject and subject.isupper() or \
group and group.isupper() or \
target and target.isupper():
raise TracError(_("All upper-cased tokens are reserved for "
"permission names."))
# Grant permission to subject
if 'add' in req.args and subject and action:
req.perm('admin', 'general/perm').require('PERMISSION_GRANT')
if action not in all_actions:
raise TracError(_("Unknown action"))
req.perm.require(action)
try:
perm.grant_permission(subject, action)
except TracError as e:
add_warning(req, e)
else:
add_notice(req, _("The subject %(subject)s has been "
"granted the permission %(action)s.",
subject=subject, action=action))
# Add subject to group
elif 'add' in req.args and subject and group:
req.perm('admin', 'general/perm').require('PERMISSION_GRANT')
for action in sorted(
perm.get_user_permissions(group, expand_meta=False)):
req.perm.require(action,
message=tag_(
"The subject %(subject)s was not added to the "
"group %(group)s. The group has %(perm)s "
"permission and you cannot grant permissions you "
"don't possess.", subject=tag.strong(subject),
group=tag.strong(group), perm=tag.strong(action)))
try:
perm.grant_permission(subject, group)
except TracError as e:
add_warning(req, e)
else:
add_notice(req, _("The subject %(subject)s has been "
"added to the group %(group)s.",
subject=subject, group=group))
elif 'copy' in req.args and subject and target:
req.perm('admin', 'general/perm').require('PERMISSION_GRANT')
subject_permissions = perm.get_users_dict().get(subject, [])
if not subject_permissions:
add_warning(req, _("The subject %(subject)s does not "
"have any permissions.",
subject=subject))
for action in subject_permissions:
if action not in all_actions:
self.log.warning("Skipped granting %s to %s: "
"permission unavailable.",
action, target)
else:
if action not in req.perm:
add_warning(req,
_("The permission %(action)s was "
"not granted to %(subject)s "
"because users cannot grant "
"permissions they don't possess.",
action=action, subject=subject))
continue
try:
perm.grant_permission(target, action)
except PermissionExistsError:
pass
else:
add_notice(req, _("The subject %(subject)s has "
"been granted the permission "
"%(action)s.",
subject=target, action=action))
req.redirect(req.href.admin(cat, page))
# Remove permissions action
elif 'remove' in req.args and 'sel' in req.args:
req.perm('admin', 'general/perm').require('PERMISSION_REVOKE')
for key in req.args.getlist('sel'):
subject, action = key.split(':', 1)
subject = unicode_from_base64(subject)
action = unicode_from_base64(action)
if (subject, action) in perm.get_all_permissions():
perm.revoke_permission(subject, action)
add_notice(req, _("The selected permissions have been "
"revoked."))
req.redirect(req.href.admin(cat, page))
return 'admin_perms.html', {
'actions': all_actions,
'allowed_actions': [a for a in all_actions if a in req.perm],
'perms': perm.get_users_dict(),
'groups': perm.get_groups_dict(),
'unicode_to_base64': unicode_to_base64
}
class PluginAdminPanel(Component):
implements(IAdminPanelProvider)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TRAC_ADMIN' in req.perm('admin', 'general/plugin'):
yield ('general', _("General"), 'plugin', _("Plugins"))
def render_admin_panel(self, req, cat, page, path_info):
if req.method == 'POST':
if 'install' in req.args:
self._do_install(req)
elif 'uninstall' in req.args:
self._do_uninstall(req)
else:
self._do_update(req)
anchor = ''
if 'plugin' in req.args:
anchor = '
req.redirect(req.href.admin(cat, page) + anchor)
return self._render_view(req)
# Internal methods
def _do_install(self, req):
if 'plugin_file' not in req.args:
raise TracError(_("No file uploaded"))
upload = req.args['plugin_file']
if isinstance(upload, unicode) or not upload.filename:
raise TracError(_("No file uploaded"))
plugin_filename = upload.filename.replace('\\', '/').replace(':', '/')
plugin_filename = os.path.basename(plugin_filename)
if not plugin_filename:
raise TracError(_("No file uploaded"))
if not plugin_filename.endswith('.egg') and \
not plugin_filename.endswith('.py'):
raise TracError(_("Uploaded file is not a Python source file or "
"egg"))
target_path = os.path.join(self.env.plugins_dir, plugin_filename)
if os.path.isfile(target_path):
raise TracError(_("Plugin %(name)s already installed",
name=plugin_filename))
self.log.info("Installing plugin %s", plugin_filename)
flags = os.O_CREAT + os.O_WRONLY + os.O_EXCL
try:
flags += os.O_BINARY
except AttributeError:
# OS_BINARY not available on every platform
pass
with os.fdopen(os.open(target_path, flags, 0o666), 'w') as target_file:
shutil.copyfileobj(upload.file, target_file)
self.log.info("Plugin %s installed to %s", plugin_filename,
target_path)
# TODO: Validate that the uploaded file is a valid Trac plugin
# Make the environment reset itself on the next request
self.env.config.touch()
def _do_uninstall(self, req):
plugin_filename = req.args.get('plugin_filename')
if not plugin_filename:
return
plugin_path = os.path.join(self.env.plugins_dir, plugin_filename)
if not os.path.isfile(plugin_path):
return
self.log.info("Uninstalling plugin %s", plugin_filename)
os.remove(plugin_path)
# Make the environment reset itself on the next request
self.env.config.touch()
def _do_update(self, req):
components = req.args.getlist('component')
enabled = req.args.getlist('enable')
added, removed = [], []
# FIXME: this needs to be more intelligent and minimize multiple
# component names to prefix rules
for component in components:
is_enabled = bool(self.env.is_component_enabled(component))
must_enable = component in enabled
if is_enabled != must_enable:
self.config.set('components', component,
'disabled' if is_enabled else 'enabled')
self.log.info("%sabling component %s",
"Dis" if is_enabled else "En", component)
if must_enable:
added.append(component)
else:
removed.append(component)
if added or removed:
def make_list(items):
parts = [item.rsplit('.', 1) for item in items]
return tag.table(tag.tbody(
tag.tr(tag.td(c, class_='trac-name'),
tag.td('(%s.*)' % m, class_='trac-name'))
for m, c in parts), class_='trac-pluglist')
added.sort()
removed.sort()
notices = []
if removed:
msg = ngettext("The following component has been disabled:",
"The following components have been disabled:",
len(removed))
notices.append(tag(msg, make_list(removed)))
if added:
msg = ngettext("The following component has been enabled:",
"The following components have been enabled:",
len(added))
notices.append(tag(msg, make_list(added)))
# set the default value of options for only the enabled components
for component in added:
self.config.set_defaults(component=component)
_save_config(self.config, req, self.log, notices)
def _render_view(self, req):
plugins = get_plugin_info(self.env, include_core=True)
def safe_wiki_to_html(context, text):
try:
return format_to_html(self.env, context, text)
except Exception as e:
self.log.error("Unable to render component documentation: %s",
exception_to_unicode(e, traceback=True))
return tag.pre(text)
data = {
'plugins': plugins, 'show': req.args.get('show'),
'readonly': not os.access(self.env.plugins_dir,
os.F_OK + os.W_OK),
'safe_wiki_to_html': safe_wiki_to_html,
}
return 'admin_plugins.html', data
| true | true |
1c45b215becc81148e7aeae262a82262f980a51d | 2,641 | py | Python | render.py | araistrick/camera_pan_renderer | 900c6c064ac7d2b460087a16be49204276679e04 | [
"BSD-3-Clause"
] | 2 | 2021-10-15T22:49:05.000Z | 2022-02-28T20:26:53.000Z | render.py | araistrick/camera_pan_renderer | 900c6c064ac7d2b460087a16be49204276679e04 | [
"BSD-3-Clause"
] | null | null | null | render.py | araistrick/camera_pan_renderer | 900c6c064ac7d2b460087a16be49204276679e04 | [
"BSD-3-Clause"
] | null | null | null | import os
import argparse
from pathlib import Path
import bpy
import numpy as np
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
def use_cuda():
bpy.context.preferences.addons["cycles"].preferences.compute_device_type = "CUDA"
print(bpy.context.preferences.addons["cycles"].preferences.get_devices())
bpy.context.preferences.addons["cycles"].preferences.devices[0].use = True
bpy.context.scene.cycles.device = "GPU"
bpy.context.scene.render.tile_x = 128
bpy.context.scene.render.tile_x = 128
print('Using GPU device:', bpy.context.preferences.addons["cycles"].preferences.devices[0])
def select_none():
for obj in bpy.data.objects:
obj.select_set(False)
def render_ply(args, ply_path):
ply_name = ply_path.parts[-1]
ply_id = '_'.join(list(ply_name.split('_'))[1:])
# import the requisite ply
select_none()
print(f"Importing {ply_path}")
bpy.ops.import_mesh.ply(filepath=str(ply_path))
imported_ply = bpy.context.selected_objects[0]
# rotate it correctly
imported_ply.rotation_euler = np.radians(np.array(args.override_ply_euler))
# make it colored according to vertex colors
material = next(m for m in bpy.data.materials if m.name == args.template_material_name)
if imported_ply.data.materials:
imported_ply.data.materials[0] = material
else:
imported_ply.data.materials.append(material)
# configure render output location
outpath = Path(args.output_folder)/ply_id
outpath.mkdir(exist_ok=True, parents=True)
bpy.context.scene.render.filepath = str(outpath) + '/'
bpy.ops.render.render(animation=True, write_still=True)
# clean up
select_none()
imported_ply.select_set(True)
bpy.ops.object.delete()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input_folder', type=str)
parser.add_argument('output_folder', type=str)
parser.add_argument('--template_file', type=str, default='template.blend')
parser.add_argument('--override_ply_euler', type=int, nargs='+', default=[90, 0, 0])
parser.add_argument('--template_material_name', type=str, default='vertex color')
parser.add_argument('--cuda', action='store_true')
args = parser.parse_args()
bpy.ops.wm.open_mainfile(filepath=args.template_file)
if args.cuda:
use_cuda()
input_paths = list(Path(args.input_folder).glob('*.ply'))
print(f"Starting processing of {len(input_paths)} .plys from {args.input_folder}")
for ply_path in input_paths:
render_ply(args, ply_path)
if __name__ == '__main__':
main() | 33.43038 | 95 | 0.710716 | import os
import argparse
from pathlib import Path
import bpy
import numpy as np
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
def use_cuda():
bpy.context.preferences.addons["cycles"].preferences.compute_device_type = "CUDA"
print(bpy.context.preferences.addons["cycles"].preferences.get_devices())
bpy.context.preferences.addons["cycles"].preferences.devices[0].use = True
bpy.context.scene.cycles.device = "GPU"
bpy.context.scene.render.tile_x = 128
bpy.context.scene.render.tile_x = 128
print('Using GPU device:', bpy.context.preferences.addons["cycles"].preferences.devices[0])
def select_none():
for obj in bpy.data.objects:
obj.select_set(False)
def render_ply(args, ply_path):
ply_name = ply_path.parts[-1]
ply_id = '_'.join(list(ply_name.split('_'))[1:])
select_none()
print(f"Importing {ply_path}")
bpy.ops.import_mesh.ply(filepath=str(ply_path))
imported_ply = bpy.context.selected_objects[0]
imported_ply.rotation_euler = np.radians(np.array(args.override_ply_euler))
material = next(m for m in bpy.data.materials if m.name == args.template_material_name)
if imported_ply.data.materials:
imported_ply.data.materials[0] = material
else:
imported_ply.data.materials.append(material)
outpath = Path(args.output_folder)/ply_id
outpath.mkdir(exist_ok=True, parents=True)
bpy.context.scene.render.filepath = str(outpath) + '/'
bpy.ops.render.render(animation=True, write_still=True)
select_none()
imported_ply.select_set(True)
bpy.ops.object.delete()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input_folder', type=str)
parser.add_argument('output_folder', type=str)
parser.add_argument('--template_file', type=str, default='template.blend')
parser.add_argument('--override_ply_euler', type=int, nargs='+', default=[90, 0, 0])
parser.add_argument('--template_material_name', type=str, default='vertex color')
parser.add_argument('--cuda', action='store_true')
args = parser.parse_args()
bpy.ops.wm.open_mainfile(filepath=args.template_file)
if args.cuda:
use_cuda()
input_paths = list(Path(args.input_folder).glob('*.ply'))
print(f"Starting processing of {len(input_paths)} .plys from {args.input_folder}")
for ply_path in input_paths:
render_ply(args, ply_path)
if __name__ == '__main__':
main() | true | true |
1c45b269ee0360c0a0e853445b9985838bcb82f4 | 1,210 | py | Python | examples/tutorials/pong/steps/step4/main.py | xinmingzhang/kivy | 86b6e19d8a02788fe8850b690bcecdff848f3c4e | [
"MIT"
] | 9 | 2016-09-03T07:20:01.000Z | 2020-05-21T14:44:48.000Z | examples/tutorials/pong/steps/step4/main.py | xinmingzhang/kivy | 86b6e19d8a02788fe8850b690bcecdff848f3c4e | [
"MIT"
] | 1 | 2017-05-30T20:45:15.000Z | 2017-05-30T20:45:15.000Z | examples/tutorials/pong/steps/step4/main.py | xinmingzhang/kivy | 86b6e19d8a02788fe8850b690bcecdff848f3c4e | [
"MIT"
] | 4 | 2016-09-10T15:27:54.000Z | 2020-03-27T22:05:31.000Z | from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty,\
ObjectProperty
from kivy.vector import Vector
from kivy.clock import Clock
from random import randint
class PongBall(Widget):
velocity_x = NumericProperty(0)
velocity_y = NumericProperty(0)
velocity = ReferenceListProperty(velocity_x, velocity_y)
def move(self):
self.pos = Vector(*self.velocity) + self.pos
class PongGame(Widget):
ball = ObjectProperty(None)
def serve_ball(self):
self.ball.center = self.center
self.ball.velocity = Vector(4, 0).rotate(randint(0, 360))
def update(self, dt):
self.ball.move()
# bounce off top and bottom
if (self.ball.y < 0) or (self.ball.top > self.height):
self.ball.velocity_y *= -1
# bounce off left and right
if (self.ball.x < 0) or (self.ball.right > self.width):
self.ball.velocity_x *= -1
class PongApp(App):
def build(self):
game = PongGame()
game.serve_ball()
Clock.schedule_interval(game.update, 1.0 / 60.0)
return game
if __name__ == '__main__':
PongApp().run()
| 25.208333 | 68 | 0.65124 | from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty,\
ObjectProperty
from kivy.vector import Vector
from kivy.clock import Clock
from random import randint
class PongBall(Widget):
velocity_x = NumericProperty(0)
velocity_y = NumericProperty(0)
velocity = ReferenceListProperty(velocity_x, velocity_y)
def move(self):
self.pos = Vector(*self.velocity) + self.pos
class PongGame(Widget):
ball = ObjectProperty(None)
def serve_ball(self):
self.ball.center = self.center
self.ball.velocity = Vector(4, 0).rotate(randint(0, 360))
def update(self, dt):
self.ball.move()
if (self.ball.y < 0) or (self.ball.top > self.height):
self.ball.velocity_y *= -1
if (self.ball.x < 0) or (self.ball.right > self.width):
self.ball.velocity_x *= -1
class PongApp(App):
def build(self):
game = PongGame()
game.serve_ball()
Clock.schedule_interval(game.update, 1.0 / 60.0)
return game
if __name__ == '__main__':
PongApp().run()
| true | true |
1c45b33cf72198aec1df6487bbfca595bdaf3951 | 1,552 | py | Python | SimpleCV/examples/detection/MotionTracker.py | nikhilgk/SimpleCV | ee64451c16db1f40b4da221115273020a6a7b01a | [
"BSD-3-Clause"
] | 2 | 2016-04-30T12:23:05.000Z | 2022-03-02T00:01:10.000Z | SimpleCV/examples/detection/MotionTracker.py | nikhilgk/SimpleCV | ee64451c16db1f40b4da221115273020a6a7b01a | [
"BSD-3-Clause"
] | null | null | null | SimpleCV/examples/detection/MotionTracker.py | nikhilgk/SimpleCV | ee64451c16db1f40b4da221115273020a6a7b01a | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# This SimpleCV example uses a technique called frame differencing to determine
# if motion has occured. You take an initial image, then another, subtract
# the difference, what is left over is what has changed between those two images
# this are typically blobs on the images, so we do a blob search to count
# the number of blobs and if they exist then motion has occured
import sys, time, socket
from SimpleCV import *
#setup the camera
cam = Camera()
#settings for the project
min_size = 0.1*cam.getProperty("width")*cam.getProperty("height") #make the threshold adapatable for various camera sizes
thresh = 10 # frame diff threshold
show_message_for = 2 # the amount of seconds to show the motion detected message
motion_timestamp = int(time.time())
message_text = "Motion detected"
draw_message = False
lastImg = cam.getImage()
lastImg.show()
while(True):
newImg = cam.getImage()
trackImg = newImg - lastImg # diff the images
blobs = trackImg.findBlobs(-1, threshblocksize=99) #use adapative blob detection
now = int(time.time())
#If blobs are found then motion has occured
if blobs:
motion_timestamp = now
draw_message = True
#See if the time has exceeded to display the message
if (now - motion_timestamp) > show_message_for:
draw_message = False
#Draw the message on the screen
if(draw_message):
newImg.drawText(message_text, 5,5)
print message_text
lastImg = newImg # update the image
newImg.show()
time.sleep(0.01)
| 31.673469 | 121 | 0.721005 |
import sys, time, socket
from SimpleCV import *
cam = Camera()
min_size = 0.1*cam.getProperty("width")*cam.getProperty("height")
thresh = 10
show_message_for = 2
motion_timestamp = int(time.time())
message_text = "Motion detected"
draw_message = False
lastImg = cam.getImage()
lastImg.show()
while(True):
newImg = cam.getImage()
trackImg = newImg - lastImg
blobs = trackImg.findBlobs(-1, threshblocksize=99)
now = int(time.time())
if blobs:
motion_timestamp = now
draw_message = True
if (now - motion_timestamp) > show_message_for:
draw_message = False
if(draw_message):
newImg.drawText(message_text, 5,5)
print message_text
lastImg = newImg
newImg.show()
time.sleep(0.01)
| false | true |
1c45b360ed6e478c667bfe1ca7f4f430632593d1 | 10,978 | py | Python | packages/python/plotly/plotly/validators/_candlestick.py | c-chaitanya/plotly.py | 7bda89c77559747e67fb1608bf9309e97505a4f2 | [
"MIT"
] | 7 | 2021-09-29T09:46:36.000Z | 2022-03-24T08:30:41.000Z | packages/python/plotly/plotly/validators/_candlestick.py | c-chaitanya/plotly.py | 7bda89c77559747e67fb1608bf9309e97505a4f2 | [
"MIT"
] | 1 | 2021-09-30T16:56:21.000Z | 2021-10-15T09:14:12.000Z | packages/python/plotly/plotly/validators/_candlestick.py | c-chaitanya/plotly.py | 7bda89c77559747e67fb1608bf9309e97505a4f2 | [
"MIT"
] | 1 | 2021-09-29T22:34:05.000Z | 2021-09-29T22:34:05.000Z | import _plotly_utils.basevalidators
class CandlestickValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="candlestick", parent_name="", **kwargs):
super(CandlestickValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Candlestick"),
data_docs=kwargs.pop(
"data_docs",
"""
close
Sets the close values.
closesrc
Sets the source reference on Chart Studio Cloud
for close .
customdata
Assigns extra data each datum. This may be
useful when listening to hover, click and
selection events. Note that, "scatter" traces
also appends customdata items in the markers
DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud
for customdata .
decreasing
:class:`plotly.graph_objects.candlestick.Decrea
sing` instance or dict with compatible
properties
high
Sets the high values.
highsrc
Sets the source reference on Chart Studio Cloud
for high .
hoverinfo
Determines which trace information appear on
hover. If `none` or `skip` are set, no
information is displayed upon hovering. But, if
`none` is set, click and hover events are still
fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud
for hoverinfo .
hoverlabel
:class:`plotly.graph_objects.candlestick.Hoverl
abel` instance or dict with compatible
properties
hovertext
Same as `text`.
hovertextsrc
Sets the source reference on Chart Studio Cloud
for hovertext .
ids
Assigns id labels to each datum. These ids for
object constancy of data points during
animation. Should be an array of strings, not
numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud
for ids .
increasing
:class:`plotly.graph_objects.candlestick.Increa
sing` instance or dict with compatible
properties
legendgroup
Sets the legend group for this trace. Traces
part of the same legend group hide/show at the
same time when toggling legend items.
legendrank
Sets the legend rank for this trace. Items and
groups with smaller ranks are presented on
top/left side while with `*reversed*
`legend.traceorder` they are on bottom/right
side. The default legendrank is 1000, so that
you can use ranks less than 1000 to place
certain items before all unranked items, and
ranks greater than 1000 to go after all
unranked items.
line
:class:`plotly.graph_objects.candlestick.Line`
instance or dict with compatible properties
low
Sets the low values.
lowsrc
Sets the source reference on Chart Studio Cloud
for low .
meta
Assigns extra meta information associated with
this trace that can be used in various text
attributes. Attributes such as trace `name`,
graph, axis and colorbar `title.text`,
annotation `text` `rangeselector`,
`updatemenues` and `sliders` `label` text all
support `meta`. To access the trace `meta`
values in an attribute in the same trace,
simply use `%{meta[i]}` where `i` is the index
or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or
key of the `meta` and `n` is the trace index.
metasrc
Sets the source reference on Chart Studio Cloud
for meta .
name
Sets the trace name. The trace name appear as
the legend item and on hover.
opacity
Sets the opacity of the trace.
open
Sets the open values.
opensrc
Sets the source reference on Chart Studio Cloud
for open .
selectedpoints
Array containing integer indices of selected
points. Has an effect only for traces that
support selections. Note that an empty array
means an empty selection where the `unselected`
are turned on for all points, whereas, any
other non-array values means no selection all
where the `selected` and `unselected` styles
have no effect.
showlegend
Determines whether or not an item corresponding
to this trace is shown in the legend.
stream
:class:`plotly.graph_objects.candlestick.Stream
` instance or dict with compatible properties
text
Sets hover text elements associated with each
sample point. If a single string, the same
string appears over all the data points. If an
array of string, the items are mapped in order
to this trace's sample points.
textsrc
Sets the source reference on Chart Studio Cloud
for text .
uid
Assign an id to this trace, Use this to provide
object constancy between traces during
animations and transitions.
uirevision
Controls persistence of some user-driven
changes to the trace: `constraintrange` in
`parcoords` traces, as well as some `editable:
true` modifications such as `name` and
`colorbar.title`. Defaults to
`layout.uirevision`. Note that other user-
driven trace attribute changes are controlled
by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and
`colorbar.(x|y)` (accessible with `config:
{editable: true}`) is controlled by
`layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on
trace index if no `uid` is provided. So if your
app can add/remove traces before the end of the
`data` array, such that the same trace has a
different index, you can still preserve user-
driven changes if you give each trace a `uid`
that stays with it as it moves.
visible
Determines whether or not this trace is
visible. If "legendonly", the trace is not
drawn, but can appear as a legend item
(provided that the legend itself is visible).
whiskerwidth
Sets the width of the whiskers relative to the
box' width. For example, with 1, the whiskers
are as wide as the box(es).
x
Sets the x coordinates. If absent, linear
coordinate will be generated.
xaxis
Sets a reference between this trace's x
coordinates and a 2D cartesian x axis. If "x"
(the default value), the x coordinates refer to
`layout.xaxis`. If "x2", the x coordinates
refer to `layout.xaxis2`, and so on.
xcalendar
Sets the calendar system to use with `x` date
data.
xhoverformat
Sets the hover text formatting rule for `x`
using d3 formatting mini-languages which are
very similar to those in Python. See:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-time-
format#locale_format By default the values are
formatted using `xaxis.hoverformat`.
xperiod
Only relevant when the axis `type` is "date".
Sets the period positioning in milliseconds or
"M<n>" on the x axis. Special values in the
form of "M<n>" could be used to declare the
number of months. In this case `n` must be a
positive integer.
xperiod0
Only relevant when the axis `type` is "date".
Sets the base for period positioning in
milliseconds or date string on the x0 axis.
When `x0period` is round number of weeks, the
`x0period0` by default would be on a Sunday
i.e. 2000-01-02, otherwise it would be at
2000-01-01.
xperiodalignment
Only relevant when the axis `type` is "date".
Sets the alignment of data points on the x
axis.
xsrc
Sets the source reference on Chart Studio Cloud
for x .
yaxis
Sets a reference between this trace's y
coordinates and a 2D cartesian y axis. If "y"
(the default value), the y coordinates refer to
`layout.yaxis`. If "y2", the y coordinates
refer to `layout.yaxis2`, and so on.
yhoverformat
Sets the hover text formatting rule for `y`
using d3 formatting mini-languages which are
very similar to those in Python. See:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-time-
format#locale_format By default the values are
formatted using `yaxis.hoverformat`.
""",
),
**kwargs
)
| 46.12605 | 76 | 0.538167 | import _plotly_utils.basevalidators
class CandlestickValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="candlestick", parent_name="", **kwargs):
super(CandlestickValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Candlestick"),
data_docs=kwargs.pop(
"data_docs",
"""
close
Sets the close values.
closesrc
Sets the source reference on Chart Studio Cloud
for close .
customdata
Assigns extra data each datum. This may be
useful when listening to hover, click and
selection events. Note that, "scatter" traces
also appends customdata items in the markers
DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud
for customdata .
decreasing
:class:`plotly.graph_objects.candlestick.Decrea
sing` instance or dict with compatible
properties
high
Sets the high values.
highsrc
Sets the source reference on Chart Studio Cloud
for high .
hoverinfo
Determines which trace information appear on
hover. If `none` or `skip` are set, no
information is displayed upon hovering. But, if
`none` is set, click and hover events are still
fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud
for hoverinfo .
hoverlabel
:class:`plotly.graph_objects.candlestick.Hoverl
abel` instance or dict with compatible
properties
hovertext
Same as `text`.
hovertextsrc
Sets the source reference on Chart Studio Cloud
for hovertext .
ids
Assigns id labels to each datum. These ids for
object constancy of data points during
animation. Should be an array of strings, not
numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud
for ids .
increasing
:class:`plotly.graph_objects.candlestick.Increa
sing` instance or dict with compatible
properties
legendgroup
Sets the legend group for this trace. Traces
part of the same legend group hide/show at the
same time when toggling legend items.
legendrank
Sets the legend rank for this trace. Items and
groups with smaller ranks are presented on
top/left side while with `*reversed*
`legend.traceorder` they are on bottom/right
side. The default legendrank is 1000, so that
you can use ranks less than 1000 to place
certain items before all unranked items, and
ranks greater than 1000 to go after all
unranked items.
line
:class:`plotly.graph_objects.candlestick.Line`
instance or dict with compatible properties
low
Sets the low values.
lowsrc
Sets the source reference on Chart Studio Cloud
for low .
meta
Assigns extra meta information associated with
this trace that can be used in various text
attributes. Attributes such as trace `name`,
graph, axis and colorbar `title.text`,
annotation `text` `rangeselector`,
`updatemenues` and `sliders` `label` text all
support `meta`. To access the trace `meta`
values in an attribute in the same trace,
simply use `%{meta[i]}` where `i` is the index
or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or
key of the `meta` and `n` is the trace index.
metasrc
Sets the source reference on Chart Studio Cloud
for meta .
name
Sets the trace name. The trace name appear as
the legend item and on hover.
opacity
Sets the opacity of the trace.
open
Sets the open values.
opensrc
Sets the source reference on Chart Studio Cloud
for open .
selectedpoints
Array containing integer indices of selected
points. Has an effect only for traces that
support selections. Note that an empty array
means an empty selection where the `unselected`
are turned on for all points, whereas, any
other non-array values means no selection all
where the `selected` and `unselected` styles
have no effect.
showlegend
Determines whether or not an item corresponding
to this trace is shown in the legend.
stream
:class:`plotly.graph_objects.candlestick.Stream
` instance or dict with compatible properties
text
Sets hover text elements associated with each
sample point. If a single string, the same
string appears over all the data points. If an
array of string, the items are mapped in order
to this trace's sample points.
textsrc
Sets the source reference on Chart Studio Cloud
for text .
uid
Assign an id to this trace, Use this to provide
object constancy between traces during
animations and transitions.
uirevision
Controls persistence of some user-driven
changes to the trace: `constraintrange` in
`parcoords` traces, as well as some `editable:
true` modifications such as `name` and
`colorbar.title`. Defaults to
`layout.uirevision`. Note that other user-
driven trace attribute changes are controlled
by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and
`colorbar.(x|y)` (accessible with `config:
{editable: true}`) is controlled by
`layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on
trace index if no `uid` is provided. So if your
app can add/remove traces before the end of the
`data` array, such that the same trace has a
different index, you can still preserve user-
driven changes if you give each trace a `uid`
that stays with it as it moves.
visible
Determines whether or not this trace is
visible. If "legendonly", the trace is not
drawn, but can appear as a legend item
(provided that the legend itself is visible).
whiskerwidth
Sets the width of the whiskers relative to the
box' width. For example, with 1, the whiskers
are as wide as the box(es).
x
Sets the x coordinates. If absent, linear
coordinate will be generated.
xaxis
Sets a reference between this trace's x
coordinates and a 2D cartesian x axis. If "x"
(the default value), the x coordinates refer to
`layout.xaxis`. If "x2", the x coordinates
refer to `layout.xaxis2`, and so on.
xcalendar
Sets the calendar system to use with `x` date
data.
xhoverformat
Sets the hover text formatting rule for `x`
using d3 formatting mini-languages which are
very similar to those in Python. See:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-time-
format#locale_format By default the values are
formatted using `xaxis.hoverformat`.
xperiod
Only relevant when the axis `type` is "date".
Sets the period positioning in milliseconds or
"M<n>" on the x axis. Special values in the
form of "M<n>" could be used to declare the
number of months. In this case `n` must be a
positive integer.
xperiod0
Only relevant when the axis `type` is "date".
Sets the base for period positioning in
milliseconds or date string on the x0 axis.
When `x0period` is round number of weeks, the
`x0period0` by default would be on a Sunday
i.e. 2000-01-02, otherwise it would be at
2000-01-01.
xperiodalignment
Only relevant when the axis `type` is "date".
Sets the alignment of data points on the x
axis.
xsrc
Sets the source reference on Chart Studio Cloud
for x .
yaxis
Sets a reference between this trace's y
coordinates and a 2D cartesian y axis. If "y"
(the default value), the y coordinates refer to
`layout.yaxis`. If "y2", the y coordinates
refer to `layout.yaxis2`, and so on.
yhoverformat
Sets the hover text formatting rule for `y`
using d3 formatting mini-languages which are
very similar to those in Python. See:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
And for dates see:
https://github.com/d3/d3-time-
format#locale_format By default the values are
formatted using `yaxis.hoverformat`.
""",
),
**kwargs
)
| true | true |
1c45b39ba990a7c522df62adb4f9bedffe167392 | 60,426 | py | Python | pandas/core/internals/managers.py | joybhallaa/pandas | 1779155552631a30d4bb176dec70b8cc477defd7 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 2 | 2022-02-02T02:05:28.000Z | 2022-02-02T02:09:37.000Z | pandas/core/internals/managers.py | north-star-saj/pandas | fc9fdba6592bdb5d0d1147ce4d65639acd897565 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/core/internals/managers.py | north-star-saj/pandas | fc9fdba6592bdb5d0d1147ce4d65639acd897565 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2020-10-28T03:32:40.000Z | 2020-10-28T03:32:40.000Z | from __future__ import annotations
from collections import defaultdict
import itertools
from typing import (
Any,
Callable,
DefaultDict,
Dict,
Hashable,
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
import warnings
import numpy as np
from pandas._libs import internals as libinternals, lib
from pandas._typing import ArrayLike, Dtype, DtypeObj, Shape
from pandas.errors import PerformanceWarning
from pandas.util._validators import validate_bool_kwarg
from pandas.core.dtypes.cast import find_common_type, infer_dtype_from_scalar
from pandas.core.dtypes.common import (
DT64NS_DTYPE,
is_dtype_equal,
is_extension_array_dtype,
is_list_like,
)
from pandas.core.dtypes.dtypes import ExtensionDtype
from pandas.core.dtypes.generic import ABCDataFrame, ABCPandasArray, ABCSeries
from pandas.core.dtypes.missing import array_equals, isna
import pandas.core.algorithms as algos
from pandas.core.arrays.sparse import SparseDtype
from pandas.core.construction import extract_array
from pandas.core.indexers import maybe_convert_indices
from pandas.core.indexes.api import Float64Index, Index, ensure_index
from pandas.core.internals.base import DataManager
from pandas.core.internals.blocks import (
Block,
CategoricalBlock,
DatetimeTZBlock,
ExtensionBlock,
ObjectValuesExtensionBlock,
extend_blocks,
get_block_type,
make_block,
safe_reshape,
)
from pandas.core.internals.ops import blockwise_all, operate_blockwise
# TODO: flexible with index=None and/or items=None
T = TypeVar("T", bound="BlockManager")
class BlockManager(DataManager):
"""
Core internal data structure to implement DataFrame, Series, etc.
Manage a bunch of labeled 2D mixed-type ndarrays. Essentially it's a
lightweight blocked set of labeled data to be manipulated by the DataFrame
public API class
Attributes
----------
shape
ndim
axes
values
items
Methods
-------
set_axis(axis, new_labels)
copy(deep=True)
get_dtypes
apply(func, axes, block_filter_fn)
get_bool_data
get_numeric_data
get_slice(slice_like, axis)
get(label)
iget(loc)
take(indexer, axis)
reindex_axis(new_labels, axis)
reindex_indexer(new_labels, indexer, axis)
delete(label)
insert(loc, label, value)
set(label, value)
Parameters
----------
blocks: Sequence of Block
axes: Sequence of Index
do_integrity_check: bool, default True
Notes
-----
This is *not* a public API class
"""
__slots__ = [
"axes",
"blocks",
"_known_consolidated",
"_is_consolidated",
"_blknos",
"_blklocs",
]
_blknos: np.ndarray
_blklocs: np.ndarray
def __init__(
self,
blocks: Sequence[Block],
axes: Sequence[Index],
do_integrity_check: bool = True,
):
self.axes = [ensure_index(ax) for ax in axes]
self.blocks: Tuple[Block, ...] = tuple(blocks)
for block in blocks:
if self.ndim != block.ndim:
raise AssertionError(
f"Number of Block dimensions ({block.ndim}) must equal "
f"number of axes ({self.ndim})"
)
if do_integrity_check:
self._verify_integrity()
# Populate known_consolidate, blknos, and blklocs lazily
self._known_consolidated = False
self._blknos = None
self._blklocs = None
@classmethod
def from_blocks(cls, blocks: List[Block], axes: List[Index]):
"""
Constructor for BlockManager and SingleBlockManager with same signature.
"""
return cls(blocks, axes, do_integrity_check=False)
@property
def blknos(self):
"""
Suppose we want to find the array corresponding to our i'th column.
blknos[i] identifies the block from self.blocks that contains this column.
blklocs[i] identifies the column of interest within
self.blocks[self.blknos[i]]
"""
if self._blknos is None:
# Note: these can be altered by other BlockManager methods.
self._rebuild_blknos_and_blklocs()
return self._blknos
@property
def blklocs(self):
"""
See blknos.__doc__
"""
if self._blklocs is None:
# Note: these can be altered by other BlockManager methods.
self._rebuild_blknos_and_blklocs()
return self._blklocs
def make_empty(self: T, axes=None) -> T:
""" return an empty BlockManager with the items axis of len 0 """
if axes is None:
axes = [Index([])] + self.axes[1:]
# preserve dtype if possible
if self.ndim == 1:
assert isinstance(self, SingleBlockManager) # for mypy
blk = self.blocks[0]
arr = blk.values[:0]
nb = blk.make_block_same_class(arr, placement=slice(0, 0), ndim=1)
blocks = [nb]
else:
blocks = []
return type(self).from_blocks(blocks, axes)
def __nonzero__(self) -> bool:
return True
# Python3 compat
__bool__ = __nonzero__
@property
def shape(self) -> Shape:
return tuple(len(ax) for ax in self.axes)
@property
def ndim(self) -> int:
return len(self.axes)
def set_axis(self, axis: int, new_labels: Index) -> None:
# Caller is responsible for ensuring we have an Index object.
old_len = len(self.axes[axis])
new_len = len(new_labels)
if new_len != old_len:
raise ValueError(
f"Length mismatch: Expected axis has {old_len} elements, new "
f"values have {new_len} elements"
)
self.axes[axis] = new_labels
@property
def is_single_block(self) -> bool:
# Assumes we are 2D; overridden by SingleBlockManager
return len(self.blocks) == 1
def _rebuild_blknos_and_blklocs(self) -> None:
"""
Update mgr._blknos / mgr._blklocs.
"""
new_blknos = np.empty(self.shape[0], dtype=np.intp)
new_blklocs = np.empty(self.shape[0], dtype=np.intp)
new_blknos.fill(-1)
new_blklocs.fill(-1)
for blkno, blk in enumerate(self.blocks):
rl = blk.mgr_locs
new_blknos[rl.indexer] = blkno
new_blklocs[rl.indexer] = np.arange(len(rl))
if (new_blknos == -1).any():
# TODO: can we avoid this? it isn't cheap
raise AssertionError("Gaps in blk ref_locs")
self._blknos = new_blknos
self._blklocs = new_blklocs
@property
def items(self) -> Index:
return self.axes[0]
def get_dtypes(self):
dtypes = np.array([blk.dtype for blk in self.blocks])
return algos.take_nd(dtypes, self.blknos, allow_fill=False)
def __getstate__(self):
block_values = [b.values for b in self.blocks]
block_items = [self.items[b.mgr_locs.indexer] for b in self.blocks]
axes_array = list(self.axes)
extra_state = {
"0.14.1": {
"axes": axes_array,
"blocks": [
{"values": b.values, "mgr_locs": b.mgr_locs.indexer}
for b in self.blocks
],
}
}
# First three elements of the state are to maintain forward
# compatibility with 0.13.1.
return axes_array, block_values, block_items, extra_state
def __setstate__(self, state):
def unpickle_block(values, mgr_locs, ndim: int):
# TODO(EA2D): ndim would be unnecessary with 2D EAs
return make_block(values, placement=mgr_locs, ndim=ndim)
if isinstance(state, tuple) and len(state) >= 4 and "0.14.1" in state[3]:
state = state[3]["0.14.1"]
self.axes = [ensure_index(ax) for ax in state["axes"]]
ndim = len(self.axes)
self.blocks = tuple(
unpickle_block(b["values"], b["mgr_locs"], ndim=ndim)
for b in state["blocks"]
)
else:
raise NotImplementedError("pre-0.14.1 pickles are no longer supported")
self._post_setstate()
def _post_setstate(self) -> None:
self._is_consolidated = False
self._known_consolidated = False
self._rebuild_blknos_and_blklocs()
def __len__(self) -> int:
return len(self.items)
def __repr__(self) -> str:
output = type(self).__name__
for i, ax in enumerate(self.axes):
if i == 0:
output += f"\nItems: {ax}"
else:
output += f"\nAxis {i}: {ax}"
for block in self.blocks:
output += f"\n{block}"
return output
def _verify_integrity(self) -> None:
mgr_shape = self.shape
tot_items = sum(len(x.mgr_locs) for x in self.blocks)
for block in self.blocks:
if block.shape[1:] != mgr_shape[1:]:
raise construction_error(tot_items, block.shape[1:], self.axes)
if len(self.items) != tot_items:
raise AssertionError(
"Number of manager items must equal union of "
f"block items\n# manager items: {len(self.items)}, # "
f"tot_items: {tot_items}"
)
def reduce(
self: T, func: Callable, ignore_failures: bool = False
) -> Tuple[T, np.ndarray]:
"""
Apply reduction function blockwise, returning a single-row BlockManager.
Parameters
----------
func : reduction function
ignore_failures : bool, default False
Whether to drop blocks where func raises TypeError.
Returns
-------
BlockManager
np.ndarray
Indexer of mgr_locs that are retained.
"""
# If 2D, we assume that we're operating column-wise
assert self.ndim == 2
res_blocks: List[Block] = []
for blk in self.blocks:
nbs = blk.reduce(func, ignore_failures)
res_blocks.extend(nbs)
index = Index([None]) # placeholder
if ignore_failures:
if res_blocks:
indexer = np.concatenate([blk.mgr_locs.as_array for blk in res_blocks])
new_mgr = self._combine(res_blocks, copy=False, index=index)
else:
indexer = []
new_mgr = type(self).from_blocks([], [Index([]), index])
else:
indexer = np.arange(self.shape[0])
new_mgr = type(self).from_blocks(res_blocks, [self.items, index])
return new_mgr, indexer
def operate_blockwise(self, other: BlockManager, array_op) -> BlockManager:
"""
Apply array_op blockwise with another (aligned) BlockManager.
"""
return operate_blockwise(self, other, array_op)
def apply(
self: T,
f,
align_keys: Optional[List[str]] = None,
ignore_failures: bool = False,
**kwargs,
) -> T:
"""
Iterate over the blocks, collect and create a new BlockManager.
Parameters
----------
f : str or callable
Name of the Block method to apply.
align_keys: List[str] or None, default None
ignore_failures: bool, default False
**kwargs
Keywords to pass to `f`
Returns
-------
BlockManager
"""
assert "filter" not in kwargs
align_keys = align_keys or []
result_blocks: List[Block] = []
# fillna: Series/DataFrame is responsible for making sure value is aligned
aligned_args = {k: kwargs[k] for k in align_keys}
for b in self.blocks:
if aligned_args:
for k, obj in aligned_args.items():
if isinstance(obj, (ABCSeries, ABCDataFrame)):
# The caller is responsible for ensuring that
# obj.axes[-1].equals(self.items)
if obj.ndim == 1:
kwargs[k] = obj.iloc[b.mgr_locs.indexer]._values
else:
kwargs[k] = obj.iloc[:, b.mgr_locs.indexer]._values
else:
# otherwise we have an ndarray
kwargs[k] = obj[b.mgr_locs.indexer]
try:
if callable(f):
applied = b.apply(f, **kwargs)
else:
applied = getattr(b, f)(**kwargs)
except (TypeError, NotImplementedError):
if not ignore_failures:
raise
continue
result_blocks = extend_blocks(applied, result_blocks)
if ignore_failures:
return self._combine(result_blocks)
if len(result_blocks) == 0:
return self.make_empty(self.axes)
return type(self).from_blocks(result_blocks, self.axes)
def quantile(
self,
*,
qs: Float64Index,
axis: int = 0,
transposed: bool = False,
interpolation="linear",
) -> BlockManager:
"""
Iterate over blocks applying quantile reduction.
This routine is intended for reduction type operations and
will do inference on the generated blocks.
Parameters
----------
axis: reduction axis, default 0
consolidate: bool, default True. Join together blocks having same
dtype
transposed: bool, default False
we are holding transposed data
interpolation : type of interpolation, default 'linear'
qs : list of the quantiles to be computed
Returns
-------
BlockManager
"""
# Series dispatches to DataFrame for quantile, which allows us to
# simplify some of the code here and in the blocks
assert self.ndim >= 2
assert is_list_like(qs) # caller is responsible for this
assert axis == 1 # only ever called this way
new_axes = list(self.axes)
new_axes[1] = Float64Index(qs)
blocks = [
blk.quantile(axis=axis, qs=qs, interpolation=interpolation)
for blk in self.blocks
]
if transposed:
new_axes = new_axes[::-1]
blocks = [
b.make_block(b.values.T, placement=np.arange(b.shape[1]))
for b in blocks
]
return type(self)(blocks, new_axes)
def isna(self, func) -> BlockManager:
return self.apply("apply", func=func)
def where(self, other, cond, align: bool, errors: str, axis: int) -> BlockManager:
if align:
align_keys = ["other", "cond"]
else:
align_keys = ["cond"]
other = extract_array(other, extract_numpy=True)
return self.apply(
"where",
align_keys=align_keys,
other=other,
cond=cond,
errors=errors,
axis=axis,
)
def setitem(self, indexer, value) -> BlockManager:
return self.apply("setitem", indexer=indexer, value=value)
def putmask(self, mask, new, align: bool = True):
if align:
align_keys = ["new", "mask"]
else:
align_keys = ["mask"]
new = extract_array(new, extract_numpy=True)
return self.apply(
"putmask",
align_keys=align_keys,
mask=mask,
new=new,
)
def diff(self, n: int, axis: int) -> BlockManager:
return self.apply("diff", n=n, axis=axis)
def interpolate(self, **kwargs) -> BlockManager:
return self.apply("interpolate", **kwargs)
def shift(self, periods: int, axis: int, fill_value) -> BlockManager:
if fill_value is lib.no_default:
fill_value = None
if axis == 0 and self.ndim == 2 and self.nblocks > 1:
# GH#35488 we need to watch out for multi-block cases
# We only get here with fill_value not-lib.no_default
ncols = self.shape[0]
if periods > 0:
indexer = [-1] * periods + list(range(ncols - periods))
else:
nper = abs(periods)
indexer = list(range(nper, ncols)) + [-1] * nper
result = self.reindex_indexer(
self.items,
indexer,
axis=0,
fill_value=fill_value,
allow_dups=True,
consolidate=False,
)
return result
return self.apply("shift", periods=periods, axis=axis, fill_value=fill_value)
def fillna(self, value, limit, inplace: bool, downcast) -> BlockManager:
return self.apply(
"fillna", value=value, limit=limit, inplace=inplace, downcast=downcast
)
def downcast(self) -> BlockManager:
return self.apply("downcast")
def astype(self, dtype, copy: bool = False, errors: str = "raise") -> BlockManager:
return self.apply("astype", dtype=dtype, copy=copy, errors=errors)
def convert(
self,
copy: bool = True,
datetime: bool = True,
numeric: bool = True,
timedelta: bool = True,
) -> BlockManager:
return self.apply(
"convert",
copy=copy,
datetime=datetime,
numeric=numeric,
timedelta=timedelta,
)
def replace(self, to_replace, value, inplace: bool, regex: bool) -> BlockManager:
assert np.ndim(value) == 0, value
return self.apply(
"replace", to_replace=to_replace, value=value, inplace=inplace, regex=regex
)
def replace_list(
self: T,
src_list: List[Any],
dest_list: List[Any],
inplace: bool = False,
regex: bool = False,
) -> T:
""" do a list replace """
inplace = validate_bool_kwarg(inplace, "inplace")
bm = self.apply(
"_replace_list",
src_list=src_list,
dest_list=dest_list,
inplace=inplace,
regex=regex,
)
bm._consolidate_inplace()
return bm
def to_native_types(self, **kwargs) -> BlockManager:
"""
Convert values to native types (strings / python objects) that are used
in formatting (repr / csv).
"""
return self.apply("to_native_types", **kwargs)
def is_consolidated(self) -> bool:
"""
Return True if more than one block with the same dtype
"""
if not self._known_consolidated:
self._consolidate_check()
return self._is_consolidated
def _consolidate_check(self) -> None:
dtypes = [blk.dtype for blk in self.blocks if blk._can_consolidate]
self._is_consolidated = len(dtypes) == len(set(dtypes))
self._known_consolidated = True
@property
def is_numeric_mixed_type(self) -> bool:
return all(block.is_numeric for block in self.blocks)
@property
def any_extension_types(self) -> bool:
"""Whether any of the blocks in this manager are extension blocks"""
return any(block.is_extension for block in self.blocks)
@property
def is_view(self) -> bool:
""" return a boolean if we are a single block and are a view """
if len(self.blocks) == 1:
return self.blocks[0].is_view
# It is technically possible to figure out which blocks are views
# e.g. [ b.values.base is not None for b in self.blocks ]
# but then we have the case of possibly some blocks being a view
# and some blocks not. setting in theory is possible on the non-view
# blocks w/o causing a SettingWithCopy raise/warn. But this is a bit
# complicated
return False
def get_bool_data(self, copy: bool = False) -> BlockManager:
"""
Select blocks that are bool-dtype and columns from object-dtype blocks
that are all-bool.
Parameters
----------
copy : bool, default False
Whether to copy the blocks
"""
new_blocks = []
for blk in self.blocks:
if blk.dtype == bool:
new_blocks.append(blk)
elif blk.is_object:
nbs = blk._split()
for nb in nbs:
if nb.is_bool:
new_blocks.append(nb)
return self._combine(new_blocks, copy)
def get_numeric_data(self, copy: bool = False) -> BlockManager:
"""
Parameters
----------
copy : bool, default False
Whether to copy the blocks
"""
return self._combine([b for b in self.blocks if b.is_numeric], copy)
def _combine(
self: T, blocks: List[Block], copy: bool = True, index: Optional[Index] = None
) -> T:
""" return a new manager with the blocks """
if len(blocks) == 0:
return self.make_empty()
# FIXME: optimization potential
indexer = np.sort(np.concatenate([b.mgr_locs.as_array for b in blocks]))
inv_indexer = lib.get_reverse_indexer(indexer, self.shape[0])
new_blocks: List[Block] = []
for b in blocks:
b = b.copy(deep=copy)
b.mgr_locs = inv_indexer[b.mgr_locs.indexer]
new_blocks.append(b)
axes = list(self.axes)
if index is not None:
axes[-1] = index
axes[0] = self.items.take(indexer)
return type(self).from_blocks(new_blocks, axes)
def get_slice(self, slobj: slice, axis: int = 0) -> BlockManager:
if axis == 0:
new_blocks = self._slice_take_blocks_ax0(slobj)
elif axis == 1:
slicer = (slice(None), slobj)
new_blocks = [blk.getitem_block(slicer) for blk in self.blocks]
else:
raise IndexError("Requested axis not found in manager")
new_axes = list(self.axes)
new_axes[axis] = new_axes[axis][slobj]
bm = type(self)(new_blocks, new_axes, do_integrity_check=False)
return bm
@property
def nblocks(self) -> int:
return len(self.blocks)
def copy(self: T, deep=True) -> T:
"""
Make deep or shallow copy of BlockManager
Parameters
----------
deep : bool or string, default True
If False, return shallow copy (do not copy data)
If 'all', copy data and a deep copy of the index
Returns
-------
BlockManager
"""
# this preserves the notion of view copying of axes
if deep:
# hit in e.g. tests.io.json.test_pandas
def copy_func(ax):
return ax.copy(deep=True) if deep == "all" else ax.view()
new_axes = [copy_func(ax) for ax in self.axes]
else:
new_axes = list(self.axes)
res = self.apply("copy", deep=deep)
res.axes = new_axes
return res
def as_array(
self,
transpose: bool = False,
dtype: Optional[Dtype] = None,
copy: bool = False,
na_value=lib.no_default,
) -> np.ndarray:
"""
Convert the blockmanager data into an numpy array.
Parameters
----------
transpose : bool, default False
If True, transpose the return array.
dtype : object, default None
Data type of the return array.
copy : bool, default False
If True then guarantee that a copy is returned. A value of
False does not guarantee that the underlying data is not
copied.
na_value : object, default lib.no_default
Value to be used as the missing value sentinel.
Returns
-------
arr : ndarray
"""
if len(self.blocks) == 0:
arr = np.empty(self.shape, dtype=float)
return arr.transpose() if transpose else arr
# We want to copy when na_value is provided to avoid
# mutating the original object
copy = copy or na_value is not lib.no_default
if self.is_single_block:
blk = self.blocks[0]
if blk.is_extension:
# Avoid implicit conversion of extension blocks to object
arr = blk.values.to_numpy(dtype=dtype, na_value=na_value).reshape(
blk.shape
)
else:
arr = np.asarray(blk.get_values())
if dtype:
arr = arr.astype(dtype, copy=False)
else:
arr = self._interleave(dtype=dtype, na_value=na_value)
# The underlying data was copied within _interleave
copy = False
if copy:
arr = arr.copy()
if na_value is not lib.no_default:
arr[isna(arr)] = na_value
return arr.transpose() if transpose else arr
def _interleave(
self, dtype: Optional[Dtype] = None, na_value=lib.no_default
) -> np.ndarray:
"""
Return ndarray from blocks with specified item order
Items must be contained in the blocks
"""
if not dtype:
dtype = _interleaved_dtype(self.blocks)
# TODO: https://github.com/pandas-dev/pandas/issues/22791
# Give EAs some input on what happens here. Sparse needs this.
if isinstance(dtype, SparseDtype):
dtype = dtype.subtype
elif is_extension_array_dtype(dtype):
dtype = "object"
elif is_dtype_equal(dtype, str):
dtype = "object"
result = np.empty(self.shape, dtype=dtype)
itemmask = np.zeros(self.shape[0])
for blk in self.blocks:
rl = blk.mgr_locs
if blk.is_extension:
# Avoid implicit conversion of extension blocks to object
arr = blk.values.to_numpy(dtype=dtype, na_value=na_value)
else:
arr = blk.get_values(dtype)
result[rl.indexer] = arr
itemmask[rl.indexer] = 1
if not itemmask.all():
raise AssertionError("Some items were not contained in blocks")
return result
def to_dict(self, copy: bool = True):
"""
Return a dict of str(dtype) -> BlockManager
Parameters
----------
copy : bool, default True
Returns
-------
values : a dict of dtype -> BlockManager
"""
bd: Dict[str, List[Block]] = {}
for b in self.blocks:
bd.setdefault(str(b.dtype), []).append(b)
# TODO(EA2D): the combine will be unnecessary with 2D EAs
return {dtype: self._combine(blocks, copy=copy) for dtype, blocks in bd.items()}
def fast_xs(self, loc: int) -> ArrayLike:
"""
Return the array corresponding to `frame.iloc[loc]`.
Parameters
----------
loc : int
Returns
-------
np.ndarray or ExtensionArray
"""
if len(self.blocks) == 1:
return self.blocks[0].iget((slice(None), loc))
dtype = _interleaved_dtype(self.blocks)
n = len(self)
if is_extension_array_dtype(dtype):
# we'll eventually construct an ExtensionArray.
result = np.empty(n, dtype=object)
else:
result = np.empty(n, dtype=dtype)
for blk in self.blocks:
# Such assignment may incorrectly coerce NaT to None
# result[blk.mgr_locs] = blk._slice((slice(None), loc))
for i, rl in enumerate(blk.mgr_locs):
result[rl] = blk.iget((i, loc))
if isinstance(dtype, ExtensionDtype):
result = dtype.construct_array_type()._from_sequence(result, dtype=dtype)
return result
def consolidate(self) -> BlockManager:
"""
Join together blocks having same dtype
Returns
-------
y : BlockManager
"""
if self.is_consolidated():
return self
bm = type(self)(self.blocks, self.axes)
bm._is_consolidated = False
bm._consolidate_inplace()
return bm
def _consolidate_inplace(self) -> None:
if not self.is_consolidated():
self.blocks = tuple(_consolidate(self.blocks))
self._is_consolidated = True
self._known_consolidated = True
self._rebuild_blknos_and_blklocs()
def iget(self, i: int) -> SingleBlockManager:
"""
Return the data as a SingleBlockManager.
"""
block = self.blocks[self.blknos[i]]
values = block.iget(self.blklocs[i])
# shortcut for select a single-dim from a 2-dim BM
return SingleBlockManager(
block.make_block_same_class(
values, placement=slice(0, len(values)), ndim=1
),
self.axes[1],
)
def iget_values(self, i: int) -> ArrayLike:
"""
Return the data for column i as the values (ndarray or ExtensionArray).
"""
block = self.blocks[self.blknos[i]]
values = block.iget(self.blklocs[i])
return values
def idelete(self, indexer):
"""
Delete selected locations in-place (new block and array, same BlockManager)
"""
is_deleted = np.zeros(self.shape[0], dtype=np.bool_)
is_deleted[indexer] = True
ref_loc_offset = -is_deleted.cumsum()
is_blk_deleted = [False] * len(self.blocks)
if isinstance(indexer, int):
affected_start = indexer
else:
affected_start = is_deleted.nonzero()[0][0]
for blkno, _ in _fast_count_smallints(self.blknos[affected_start:]):
blk = self.blocks[blkno]
bml = blk.mgr_locs
blk_del = is_deleted[bml.indexer].nonzero()[0]
if len(blk_del) == len(bml):
is_blk_deleted[blkno] = True
continue
elif len(blk_del) != 0:
blk.delete(blk_del)
bml = blk.mgr_locs
blk.mgr_locs = bml.add(ref_loc_offset[bml.indexer])
# FIXME: use Index.delete as soon as it uses fastpath=True
self.axes[0] = self.items[~is_deleted]
self.blocks = tuple(
b for blkno, b in enumerate(self.blocks) if not is_blk_deleted[blkno]
)
self._rebuild_blknos_and_blklocs()
def iset(self, loc: Union[int, slice, np.ndarray], value):
"""
Set new item in-place. Does not consolidate. Adds new Block if not
contained in the current set of items
"""
value = extract_array(value, extract_numpy=True)
# FIXME: refactor, clearly separate broadcasting & zip-like assignment
# can prob also fix the various if tests for sparse/categorical
if self._blklocs is None and self.ndim > 1:
self._rebuild_blknos_and_blklocs()
value_is_extension_type = is_extension_array_dtype(value)
# categorical/sparse/datetimetz
if value_is_extension_type:
def value_getitem(placement):
return value
else:
if value.ndim == 2:
value = value.T
if value.ndim == self.ndim - 1:
value = safe_reshape(value, (1,) + value.shape)
def value_getitem(placement):
return value
else:
def value_getitem(placement):
return value[placement.indexer]
if value.shape[1:] != self.shape[1:]:
raise AssertionError(
"Shape of new values must be compatible with manager shape"
)
if lib.is_integer(loc):
# We have 6 tests where loc is _not_ an int.
# In this case, get_blkno_placements will yield only one tuple,
# containing (self._blknos[loc], BlockPlacement(slice(0, 1, 1)))
loc = [loc]
# Accessing public blknos ensures the public versions are initialized
blknos = self.blknos[loc]
blklocs = self.blklocs[loc].copy()
unfit_mgr_locs = []
unfit_val_locs = []
removed_blknos = []
for blkno, val_locs in libinternals.get_blkno_placements(blknos, group=True):
blk = self.blocks[blkno]
blk_locs = blklocs[val_locs.indexer]
if blk.should_store(value):
blk.set_inplace(blk_locs, value_getitem(val_locs))
else:
unfit_mgr_locs.append(blk.mgr_locs.as_array[blk_locs])
unfit_val_locs.append(val_locs)
# If all block items are unfit, schedule the block for removal.
if len(val_locs) == len(blk.mgr_locs):
removed_blknos.append(blkno)
else:
blk.delete(blk_locs)
self._blklocs[blk.mgr_locs.indexer] = np.arange(len(blk))
if len(removed_blknos):
# Remove blocks & update blknos accordingly
is_deleted = np.zeros(self.nblocks, dtype=np.bool_)
is_deleted[removed_blknos] = True
new_blknos = np.empty(self.nblocks, dtype=np.int64)
new_blknos.fill(-1)
new_blknos[~is_deleted] = np.arange(self.nblocks - len(removed_blknos))
self._blknos = new_blknos[self._blknos]
self.blocks = tuple(
blk for i, blk in enumerate(self.blocks) if i not in set(removed_blknos)
)
if unfit_val_locs:
unfit_mgr_locs = np.concatenate(unfit_mgr_locs)
unfit_count = len(unfit_mgr_locs)
new_blocks: List[Block] = []
if value_is_extension_type:
# This code (ab-)uses the fact that EA blocks contain only
# one item.
# TODO(EA2D): special casing unnecessary with 2D EAs
new_blocks.extend(
make_block(
values=value,
ndim=self.ndim,
placement=slice(mgr_loc, mgr_loc + 1),
)
for mgr_loc in unfit_mgr_locs
)
self._blknos[unfit_mgr_locs] = np.arange(unfit_count) + len(self.blocks)
self._blklocs[unfit_mgr_locs] = 0
else:
# unfit_val_locs contains BlockPlacement objects
unfit_val_items = unfit_val_locs[0].append(unfit_val_locs[1:])
new_blocks.append(
make_block(
values=value_getitem(unfit_val_items),
ndim=self.ndim,
placement=unfit_mgr_locs,
)
)
self._blknos[unfit_mgr_locs] = len(self.blocks)
self._blklocs[unfit_mgr_locs] = np.arange(unfit_count)
self.blocks += tuple(new_blocks)
# Newly created block's dtype may already be present.
self._known_consolidated = False
def insert(self, loc: int, item: Hashable, value, allow_duplicates: bool = False):
"""
Insert item at selected position.
Parameters
----------
loc : int
item : hashable
value : array_like
allow_duplicates: bool
If False, trying to insert non-unique item will raise
"""
if not allow_duplicates and item in self.items:
# Should this be a different kind of error??
raise ValueError(f"cannot insert {item}, already exists")
if not isinstance(loc, int):
raise TypeError("loc must be int")
# insert to the axis; this could possibly raise a TypeError
new_axis = self.items.insert(loc, item)
if value.ndim == 2:
value = value.T
if value.ndim == self.ndim - 1 and not is_extension_array_dtype(value.dtype):
# TODO(EA2D): special case not needed with 2D EAs
value = safe_reshape(value, (1,) + value.shape)
block = make_block(values=value, ndim=self.ndim, placement=slice(loc, loc + 1))
for blkno, count in _fast_count_smallints(self.blknos[loc:]):
blk = self.blocks[blkno]
if count == len(blk.mgr_locs):
blk.mgr_locs = blk.mgr_locs.add(1)
else:
new_mgr_locs = blk.mgr_locs.as_array.copy()
new_mgr_locs[new_mgr_locs >= loc] += 1
blk.mgr_locs = new_mgr_locs
# Accessing public blklocs ensures the public versions are initialized
if loc == self.blklocs.shape[0]:
# np.append is a lot faster, let's use it if we can.
self._blklocs = np.append(self._blklocs, 0)
self._blknos = np.append(self._blknos, len(self.blocks))
else:
self._blklocs = np.insert(self._blklocs, loc, 0)
self._blknos = np.insert(self._blknos, loc, len(self.blocks))
self.axes[0] = new_axis
self.blocks += (block,)
self._known_consolidated = False
if len(self.blocks) > 100:
warnings.warn(
"DataFrame is highly fragmented. This is usually the result "
"of calling `frame.insert` many times, which has poor performance. "
"Consider using pd.concat instead. To get a de-fragmented frame, "
"use `newframe = frame.copy()`",
PerformanceWarning,
stacklevel=5,
)
def reindex_indexer(
self: T,
new_axis,
indexer,
axis: int,
fill_value=None,
allow_dups: bool = False,
copy: bool = True,
consolidate: bool = True,
only_slice: bool = False,
) -> T:
"""
Parameters
----------
new_axis : Index
indexer : ndarray of int64 or None
axis : int
fill_value : object, default None
allow_dups : bool, default False
copy : bool, default True
consolidate: bool, default True
Whether to consolidate inplace before reindexing.
only_slice : bool, default False
Whether to take views, not copies, along columns.
pandas-indexer with -1's only.
"""
if indexer is None:
if new_axis is self.axes[axis] and not copy:
return self
result = self.copy(deep=copy)
result.axes = list(self.axes)
result.axes[axis] = new_axis
return result
if consolidate:
self._consolidate_inplace()
# some axes don't allow reindexing with dups
if not allow_dups:
self.axes[axis]._can_reindex(indexer)
if axis >= self.ndim:
raise IndexError("Requested axis not found in manager")
if axis == 0:
new_blocks = self._slice_take_blocks_ax0(
indexer, fill_value=fill_value, only_slice=only_slice
)
else:
new_blocks = [
blk.take_nd(
indexer,
axis=axis,
fill_value=(
fill_value if fill_value is not None else blk.fill_value
),
)
for blk in self.blocks
]
new_axes = list(self.axes)
new_axes[axis] = new_axis
return type(self).from_blocks(new_blocks, new_axes)
def _slice_take_blocks_ax0(
self, slice_or_indexer, fill_value=lib.no_default, only_slice: bool = False
):
"""
Slice/take blocks along axis=0.
Overloaded for SingleBlock
Parameters
----------
slice_or_indexer : slice, ndarray[bool], or list-like of ints
fill_value : scalar, default lib.no_default
only_slice : bool, default False
If True, we always return views on existing arrays, never copies.
This is used when called from ops.blockwise.operate_blockwise.
Returns
-------
new_blocks : list of Block
"""
allow_fill = fill_value is not lib.no_default
sl_type, slobj, sllen = _preprocess_slice_or_indexer(
slice_or_indexer, self.shape[0], allow_fill=allow_fill
)
if self.is_single_block:
blk = self.blocks[0]
if sl_type in ("slice", "mask"):
# GH#32959 EABlock would fail since we can't make 0-width
# TODO(EA2D): special casing unnecessary with 2D EAs
if sllen == 0:
return []
return [blk.getitem_block(slobj, new_mgr_locs=slice(0, sllen))]
elif not allow_fill or self.ndim == 1:
if allow_fill and fill_value is None:
fill_value = blk.fill_value
if not allow_fill and only_slice:
# GH#33597 slice instead of take, so we get
# views instead of copies
blocks = [
blk.getitem_block([ml], new_mgr_locs=i)
for i, ml in enumerate(slobj)
]
return blocks
else:
return [
blk.take_nd(
slobj,
axis=0,
new_mgr_locs=slice(0, sllen),
fill_value=fill_value,
)
]
if sl_type in ("slice", "mask"):
blknos = self.blknos[slobj]
blklocs = self.blklocs[slobj]
else:
blknos = algos.take_nd(
self.blknos, slobj, fill_value=-1, allow_fill=allow_fill
)
blklocs = algos.take_nd(
self.blklocs, slobj, fill_value=-1, allow_fill=allow_fill
)
# When filling blknos, make sure blknos is updated before appending to
# blocks list, that way new blkno is exactly len(blocks).
blocks = []
group = not only_slice
for blkno, mgr_locs in libinternals.get_blkno_placements(blknos, group=group):
if blkno == -1:
# If we've got here, fill_value was not lib.no_default
blocks.append(
self._make_na_block(placement=mgr_locs, fill_value=fill_value)
)
else:
blk = self.blocks[blkno]
# Otherwise, slicing along items axis is necessary.
if not blk._can_consolidate:
# A non-consolidatable block, it's easy, because there's
# only one item and each mgr loc is a copy of that single
# item.
for mgr_loc in mgr_locs:
newblk = blk.copy(deep=False)
newblk.mgr_locs = slice(mgr_loc, mgr_loc + 1)
blocks.append(newblk)
else:
# GH#32779 to avoid the performance penalty of copying,
# we may try to only slice
taker = blklocs[mgr_locs.indexer]
max_len = max(len(mgr_locs), taker.max() + 1)
if only_slice:
taker = lib.maybe_indices_to_slice(taker, max_len)
if isinstance(taker, slice):
nb = blk.getitem_block(taker, new_mgr_locs=mgr_locs)
blocks.append(nb)
elif only_slice:
# GH#33597 slice instead of take, so we get
# views instead of copies
for i, ml in zip(taker, mgr_locs):
nb = blk.getitem_block([i], new_mgr_locs=ml)
blocks.append(nb)
else:
nb = blk.take_nd(taker, axis=0, new_mgr_locs=mgr_locs)
blocks.append(nb)
return blocks
def _make_na_block(self, placement, fill_value=None):
if fill_value is None:
fill_value = np.nan
block_shape = list(self.shape)
block_shape[0] = len(placement)
dtype, fill_value = infer_dtype_from_scalar(fill_value)
block_values = np.empty(block_shape, dtype=dtype)
block_values.fill(fill_value)
return make_block(block_values, placement=placement, ndim=block_values.ndim)
def take(self, indexer, axis: int = 1, verify: bool = True, convert: bool = True):
"""
Take items along any axis.
"""
indexer = (
np.arange(indexer.start, indexer.stop, indexer.step, dtype="int64")
if isinstance(indexer, slice)
else np.asanyarray(indexer, dtype="int64")
)
n = self.shape[axis]
if convert:
indexer = maybe_convert_indices(indexer, n)
if verify:
if ((indexer == -1) | (indexer >= n)).any():
raise Exception("Indices must be nonzero and less than the axis length")
new_labels = self.axes[axis].take(indexer)
return self.reindex_indexer(
new_axis=new_labels,
indexer=indexer,
axis=axis,
allow_dups=True,
consolidate=False,
)
def _equal_values(self: T, other: T) -> bool:
"""
Used in .equals defined in base class. Only check the column values
assuming shape and indexes have already been checked.
"""
if self.ndim == 1:
# For SingleBlockManager (i.e.Series)
if other.ndim != 1:
return False
left = self.blocks[0].values
right = other.blocks[0].values
return array_equals(left, right)
return blockwise_all(self, other, array_equals)
def unstack(self, unstacker, fill_value) -> BlockManager:
"""
Return a BlockManager with all blocks unstacked..
Parameters
----------
unstacker : reshape._Unstacker
fill_value : Any
fill_value for newly introduced missing values.
Returns
-------
unstacked : BlockManager
"""
new_columns = unstacker.get_new_columns(self.items)
new_index = unstacker.new_index
new_blocks: List[Block] = []
columns_mask: List[np.ndarray] = []
for blk in self.blocks:
blk_cols = self.items[blk.mgr_locs.indexer]
new_items = unstacker.get_new_columns(blk_cols)
new_placement = new_columns.get_indexer(new_items)
blocks, mask = blk._unstack(
unstacker, fill_value, new_placement=new_placement
)
new_blocks.extend(blocks)
columns_mask.extend(mask)
new_columns = new_columns[columns_mask]
bm = BlockManager(new_blocks, [new_columns, new_index])
return bm
class SingleBlockManager(BlockManager):
""" manage a single block with """
ndim = 1
_is_consolidated = True
_known_consolidated = True
__slots__ = ()
is_single_block = True
def __init__(
self,
block: Block,
axis: Index,
do_integrity_check: bool = False,
fastpath=lib.no_default,
):
assert isinstance(block, Block), type(block)
assert isinstance(axis, Index), type(axis)
if fastpath is not lib.no_default:
warnings.warn(
"The `fastpath` keyword is deprecated and will be removed "
"in a future version.",
FutureWarning,
stacklevel=2,
)
self.axes = [axis]
self.blocks = (block,)
@classmethod
def from_blocks(cls, blocks: List[Block], axes: List[Index]) -> SingleBlockManager:
"""
Constructor for BlockManager and SingleBlockManager with same signature.
"""
assert len(blocks) == 1
assert len(axes) == 1
return cls(blocks[0], axes[0], do_integrity_check=False)
@classmethod
def from_array(cls, array: ArrayLike, index: Index) -> SingleBlockManager:
"""
Constructor for if we have an array that is not yet a Block.
"""
block = make_block(array, placement=slice(0, len(index)), ndim=1)
return cls(block, index)
def _post_setstate(self):
pass
@property
def _block(self) -> Block:
return self.blocks[0]
@property
def _blknos(self):
""" compat with BlockManager """
return None
@property
def _blklocs(self):
""" compat with BlockManager """
return None
def get_slice(self, slobj: slice, axis: int = 0) -> SingleBlockManager:
if axis >= self.ndim:
raise IndexError("Requested axis not found in manager")
blk = self._block
array = blk._slice(slobj)
block = blk.make_block_same_class(array, placement=slice(0, len(array)))
return type(self)(block, self.index[slobj])
@property
def index(self) -> Index:
return self.axes[0]
@property
def dtype(self) -> DtypeObj:
return self._block.dtype
def get_dtypes(self) -> np.ndarray:
return np.array([self._block.dtype])
def external_values(self):
"""The array that Series.values returns"""
return self._block.external_values()
def internal_values(self):
"""The array that Series._values returns"""
return self._block.internal_values()
@property
def _can_hold_na(self) -> bool:
return self._block._can_hold_na
def is_consolidated(self) -> bool:
return True
def _consolidate_check(self):
pass
def _consolidate_inplace(self):
pass
def idelete(self, indexer):
"""
Delete single location from SingleBlockManager.
Ensures that self.blocks doesn't become empty.
"""
self._block.delete(indexer)
self.axes[0] = self.axes[0].delete(indexer)
def fast_xs(self, loc):
"""
fast path for getting a cross-section
return a view of the data
"""
raise NotImplementedError("Use series._values[loc] instead")
# --------------------------------------------------------------------
# Constructor Helpers
def create_block_manager_from_blocks(blocks, axes: List[Index]) -> BlockManager:
try:
if len(blocks) == 1 and not isinstance(blocks[0], Block):
# if blocks[0] is of length 0, return empty blocks
if not len(blocks[0]):
blocks = []
else:
# It's OK if a single block is passed as values, its placement
# is basically "all items", but if there're many, don't bother
# converting, it's an error anyway.
blocks = [
make_block(
values=blocks[0], placement=slice(0, len(axes[0])), ndim=2
)
]
mgr = BlockManager(blocks, axes)
mgr._consolidate_inplace()
return mgr
except ValueError as e:
blocks = [getattr(b, "values", b) for b in blocks]
tot_items = sum(b.shape[0] for b in blocks)
raise construction_error(tot_items, blocks[0].shape[1:], axes, e)
def create_block_manager_from_arrays(
arrays, names: Index, axes: List[Index]
) -> BlockManager:
assert isinstance(names, Index)
assert isinstance(axes, list)
assert all(isinstance(x, Index) for x in axes)
# ensure we dont have any PandasArrays when we call get_block_type
# Note: just calling extract_array breaks tests that patch PandasArray._typ.
arrays = [x if not isinstance(x, ABCPandasArray) else x.to_numpy() for x in arrays]
try:
blocks = _form_blocks(arrays, names, axes)
mgr = BlockManager(blocks, axes)
mgr._consolidate_inplace()
return mgr
except ValueError as e:
raise construction_error(len(arrays), arrays[0].shape, axes, e)
def construction_error(tot_items, block_shape, axes, e=None):
""" raise a helpful message about our construction """
passed = tuple(map(int, [tot_items] + list(block_shape)))
# Correcting the user facing error message during dataframe construction
if len(passed) <= 2:
passed = passed[::-1]
implied = tuple(len(ax) for ax in axes)
# Correcting the user facing error message during dataframe construction
if len(implied) <= 2:
implied = implied[::-1]
# We return the exception object instead of raising it so that we
# can raise it in the caller; mypy plays better with that
if passed == implied and e is not None:
return e
if block_shape[0] == 0:
return ValueError("Empty data passed with indices specified.")
return ValueError(f"Shape of passed values is {passed}, indices imply {implied}")
# -----------------------------------------------------------------------
def _form_blocks(arrays, names: Index, axes: List[Index]) -> List[Block]:
# put "leftover" items in float bucket, where else?
# generalize?
items_dict: DefaultDict[str, List] = defaultdict(list)
extra_locs = []
names_idx = names
if names_idx.equals(axes[0]):
names_indexer = np.arange(len(names_idx))
else:
assert names_idx.intersection(axes[0]).is_unique
names_indexer = names_idx.get_indexer_for(axes[0])
for i, name_idx in enumerate(names_indexer):
if name_idx == -1:
extra_locs.append(i)
continue
v = arrays[name_idx]
block_type = get_block_type(v)
items_dict[block_type.__name__].append((i, v))
blocks: List[Block] = []
if len(items_dict["FloatBlock"]):
float_blocks = _multi_blockify(items_dict["FloatBlock"])
blocks.extend(float_blocks)
if len(items_dict["NumericBlock"]):
complex_blocks = _multi_blockify(items_dict["NumericBlock"])
blocks.extend(complex_blocks)
if len(items_dict["TimeDeltaBlock"]):
timedelta_blocks = _multi_blockify(items_dict["TimeDeltaBlock"])
blocks.extend(timedelta_blocks)
if len(items_dict["DatetimeBlock"]):
datetime_blocks = _simple_blockify(items_dict["DatetimeBlock"], DT64NS_DTYPE)
blocks.extend(datetime_blocks)
if len(items_dict["DatetimeTZBlock"]):
dttz_blocks = [
make_block(array, klass=DatetimeTZBlock, placement=i, ndim=2)
for i, array in items_dict["DatetimeTZBlock"]
]
blocks.extend(dttz_blocks)
if len(items_dict["ObjectBlock"]) > 0:
object_blocks = _simple_blockify(items_dict["ObjectBlock"], np.object_)
blocks.extend(object_blocks)
if len(items_dict["CategoricalBlock"]) > 0:
cat_blocks = [
make_block(array, klass=CategoricalBlock, placement=i, ndim=2)
for i, array in items_dict["CategoricalBlock"]
]
blocks.extend(cat_blocks)
if len(items_dict["ExtensionBlock"]):
external_blocks = [
make_block(array, klass=ExtensionBlock, placement=i, ndim=2)
for i, array in items_dict["ExtensionBlock"]
]
blocks.extend(external_blocks)
if len(items_dict["ObjectValuesExtensionBlock"]):
external_blocks = [
make_block(array, klass=ObjectValuesExtensionBlock, placement=i, ndim=2)
for i, array in items_dict["ObjectValuesExtensionBlock"]
]
blocks.extend(external_blocks)
if len(extra_locs):
shape = (len(extra_locs),) + tuple(len(x) for x in axes[1:])
# empty items -> dtype object
block_values = np.empty(shape, dtype=object)
block_values.fill(np.nan)
na_block = make_block(block_values, placement=extra_locs, ndim=2)
blocks.append(na_block)
return blocks
def _simple_blockify(tuples, dtype) -> List[Block]:
"""
return a single array of a block that has a single dtype; if dtype is
not None, coerce to this dtype
"""
values, placement = _stack_arrays(tuples, dtype)
# TODO: CHECK DTYPE?
if dtype is not None and values.dtype != dtype: # pragma: no cover
values = values.astype(dtype)
block = make_block(values, placement=placement, ndim=2)
return [block]
def _multi_blockify(tuples, dtype: Optional[Dtype] = None):
""" return an array of blocks that potentially have different dtypes """
# group by dtype
grouper = itertools.groupby(tuples, lambda x: x[1].dtype)
new_blocks = []
for dtype, tup_block in grouper:
values, placement = _stack_arrays(list(tup_block), dtype)
block = make_block(values, placement=placement, ndim=2)
new_blocks.append(block)
return new_blocks
def _stack_arrays(tuples, dtype: np.dtype):
# fml
def _asarray_compat(x):
if isinstance(x, ABCSeries):
return x._values
else:
return np.asarray(x)
placement, arrays = zip(*tuples)
first = arrays[0]
shape = (len(arrays),) + first.shape
stacked = np.empty(shape, dtype=dtype)
for i, arr in enumerate(arrays):
stacked[i] = _asarray_compat(arr)
return stacked, placement
def _interleaved_dtype(blocks: Sequence[Block]) -> Optional[DtypeObj]:
"""
Find the common dtype for `blocks`.
Parameters
----------
blocks : List[Block]
Returns
-------
dtype : np.dtype, ExtensionDtype, or None
None is returned when `blocks` is empty.
"""
if not len(blocks):
return None
return find_common_type([b.dtype for b in blocks])
def _consolidate(blocks):
"""
Merge blocks having same dtype, exclude non-consolidating blocks
"""
# sort by _can_consolidate, dtype
gkey = lambda x: x._consolidate_key
grouper = itertools.groupby(sorted(blocks, key=gkey), gkey)
new_blocks: List[Block] = []
for (_can_consolidate, dtype), group_blocks in grouper:
merged_blocks = _merge_blocks(
list(group_blocks), dtype=dtype, can_consolidate=_can_consolidate
)
new_blocks = extend_blocks(merged_blocks, new_blocks)
return new_blocks
def _merge_blocks(
blocks: List[Block], dtype: DtypeObj, can_consolidate: bool
) -> List[Block]:
if len(blocks) == 1:
return blocks
if can_consolidate:
if dtype is None:
if len({b.dtype for b in blocks}) != 1:
raise AssertionError("_merge_blocks are invalid!")
# TODO: optimization potential in case all mgrs contain slices and
# combination of those slices is a slice, too.
new_mgr_locs = np.concatenate([b.mgr_locs.as_array for b in blocks])
new_values = np.vstack([b.values for b in blocks])
argsort = np.argsort(new_mgr_locs)
new_values = new_values[argsort]
new_mgr_locs = new_mgr_locs[argsort]
return [make_block(new_values, placement=new_mgr_locs, ndim=2)]
# can't consolidate --> no merge
return blocks
def _fast_count_smallints(arr: np.ndarray) -> np.ndarray:
"""Faster version of set(arr) for sequences of small numbers."""
counts = np.bincount(arr.astype(np.int_))
nz = counts.nonzero()[0]
return np.c_[nz, counts[nz]]
def _preprocess_slice_or_indexer(slice_or_indexer, length: int, allow_fill: bool):
if isinstance(slice_or_indexer, slice):
return (
"slice",
slice_or_indexer,
libinternals.slice_len(slice_or_indexer, length),
)
elif (
isinstance(slice_or_indexer, np.ndarray) and slice_or_indexer.dtype == np.bool_
):
return "mask", slice_or_indexer, slice_or_indexer.sum()
else:
indexer = np.asanyarray(slice_or_indexer, dtype=np.int64)
if not allow_fill:
indexer = maybe_convert_indices(indexer, length)
return "fancy", indexer, len(indexer)
| 32.175719 | 88 | 0.574389 | from __future__ import annotations
from collections import defaultdict
import itertools
from typing import (
Any,
Callable,
DefaultDict,
Dict,
Hashable,
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
import warnings
import numpy as np
from pandas._libs import internals as libinternals, lib
from pandas._typing import ArrayLike, Dtype, DtypeObj, Shape
from pandas.errors import PerformanceWarning
from pandas.util._validators import validate_bool_kwarg
from pandas.core.dtypes.cast import find_common_type, infer_dtype_from_scalar
from pandas.core.dtypes.common import (
DT64NS_DTYPE,
is_dtype_equal,
is_extension_array_dtype,
is_list_like,
)
from pandas.core.dtypes.dtypes import ExtensionDtype
from pandas.core.dtypes.generic import ABCDataFrame, ABCPandasArray, ABCSeries
from pandas.core.dtypes.missing import array_equals, isna
import pandas.core.algorithms as algos
from pandas.core.arrays.sparse import SparseDtype
from pandas.core.construction import extract_array
from pandas.core.indexers import maybe_convert_indices
from pandas.core.indexes.api import Float64Index, Index, ensure_index
from pandas.core.internals.base import DataManager
from pandas.core.internals.blocks import (
Block,
CategoricalBlock,
DatetimeTZBlock,
ExtensionBlock,
ObjectValuesExtensionBlock,
extend_blocks,
get_block_type,
make_block,
safe_reshape,
)
from pandas.core.internals.ops import blockwise_all, operate_blockwise
T = TypeVar("T", bound="BlockManager")
class BlockManager(DataManager):
__slots__ = [
"axes",
"blocks",
"_known_consolidated",
"_is_consolidated",
"_blknos",
"_blklocs",
]
_blknos: np.ndarray
_blklocs: np.ndarray
def __init__(
self,
blocks: Sequence[Block],
axes: Sequence[Index],
do_integrity_check: bool = True,
):
self.axes = [ensure_index(ax) for ax in axes]
self.blocks: Tuple[Block, ...] = tuple(blocks)
for block in blocks:
if self.ndim != block.ndim:
raise AssertionError(
f"Number of Block dimensions ({block.ndim}) must equal "
f"number of axes ({self.ndim})"
)
if do_integrity_check:
self._verify_integrity()
self._known_consolidated = False
self._blknos = None
self._blklocs = None
@classmethod
def from_blocks(cls, blocks: List[Block], axes: List[Index]):
return cls(blocks, axes, do_integrity_check=False)
@property
def blknos(self):
if self._blknos is None:
self._rebuild_blknos_and_blklocs()
return self._blknos
@property
def blklocs(self):
if self._blklocs is None:
self._rebuild_blknos_and_blklocs()
return self._blklocs
def make_empty(self: T, axes=None) -> T:
if axes is None:
axes = [Index([])] + self.axes[1:]
if self.ndim == 1:
assert isinstance(self, SingleBlockManager)
blk = self.blocks[0]
arr = blk.values[:0]
nb = blk.make_block_same_class(arr, placement=slice(0, 0), ndim=1)
blocks = [nb]
else:
blocks = []
return type(self).from_blocks(blocks, axes)
def __nonzero__(self) -> bool:
return True
__bool__ = __nonzero__
@property
def shape(self) -> Shape:
return tuple(len(ax) for ax in self.axes)
@property
def ndim(self) -> int:
return len(self.axes)
def set_axis(self, axis: int, new_labels: Index) -> None:
old_len = len(self.axes[axis])
new_len = len(new_labels)
if new_len != old_len:
raise ValueError(
f"Length mismatch: Expected axis has {old_len} elements, new "
f"values have {new_len} elements"
)
self.axes[axis] = new_labels
@property
def is_single_block(self) -> bool:
return len(self.blocks) == 1
def _rebuild_blknos_and_blklocs(self) -> None:
new_blknos = np.empty(self.shape[0], dtype=np.intp)
new_blklocs = np.empty(self.shape[0], dtype=np.intp)
new_blknos.fill(-1)
new_blklocs.fill(-1)
for blkno, blk in enumerate(self.blocks):
rl = blk.mgr_locs
new_blknos[rl.indexer] = blkno
new_blklocs[rl.indexer] = np.arange(len(rl))
if (new_blknos == -1).any():
raise AssertionError("Gaps in blk ref_locs")
self._blknos = new_blknos
self._blklocs = new_blklocs
@property
def items(self) -> Index:
return self.axes[0]
def get_dtypes(self):
dtypes = np.array([blk.dtype for blk in self.blocks])
return algos.take_nd(dtypes, self.blknos, allow_fill=False)
def __getstate__(self):
block_values = [b.values for b in self.blocks]
block_items = [self.items[b.mgr_locs.indexer] for b in self.blocks]
axes_array = list(self.axes)
extra_state = {
"0.14.1": {
"axes": axes_array,
"blocks": [
{"values": b.values, "mgr_locs": b.mgr_locs.indexer}
for b in self.blocks
],
}
}
# First three elements of the state are to maintain forward
# compatibility with 0.13.1.
return axes_array, block_values, block_items, extra_state
def __setstate__(self, state):
def unpickle_block(values, mgr_locs, ndim: int):
# TODO(EA2D): ndim would be unnecessary with 2D EAs
return make_block(values, placement=mgr_locs, ndim=ndim)
if isinstance(state, tuple) and len(state) >= 4 and "0.14.1" in state[3]:
state = state[3]["0.14.1"]
self.axes = [ensure_index(ax) for ax in state["axes"]]
ndim = len(self.axes)
self.blocks = tuple(
unpickle_block(b["values"], b["mgr_locs"], ndim=ndim)
for b in state["blocks"]
)
else:
raise NotImplementedError("pre-0.14.1 pickles are no longer supported")
self._post_setstate()
def _post_setstate(self) -> None:
self._is_consolidated = False
self._known_consolidated = False
self._rebuild_blknos_and_blklocs()
def __len__(self) -> int:
return len(self.items)
def __repr__(self) -> str:
output = type(self).__name__
for i, ax in enumerate(self.axes):
if i == 0:
output += f"\nItems: {ax}"
else:
output += f"\nAxis {i}: {ax}"
for block in self.blocks:
output += f"\n{block}"
return output
def _verify_integrity(self) -> None:
mgr_shape = self.shape
tot_items = sum(len(x.mgr_locs) for x in self.blocks)
for block in self.blocks:
if block.shape[1:] != mgr_shape[1:]:
raise construction_error(tot_items, block.shape[1:], self.axes)
if len(self.items) != tot_items:
raise AssertionError(
"Number of manager items must equal union of "
f"block items\n# manager items: {len(self.items)}, # "
f"tot_items: {tot_items}"
)
def reduce(
self: T, func: Callable, ignore_failures: bool = False
) -> Tuple[T, np.ndarray]:
# If 2D, we assume that we're operating column-wise
assert self.ndim == 2
res_blocks: List[Block] = []
for blk in self.blocks:
nbs = blk.reduce(func, ignore_failures)
res_blocks.extend(nbs)
index = Index([None])
if ignore_failures:
if res_blocks:
indexer = np.concatenate([blk.mgr_locs.as_array for blk in res_blocks])
new_mgr = self._combine(res_blocks, copy=False, index=index)
else:
indexer = []
new_mgr = type(self).from_blocks([], [Index([]), index])
else:
indexer = np.arange(self.shape[0])
new_mgr = type(self).from_blocks(res_blocks, [self.items, index])
return new_mgr, indexer
def operate_blockwise(self, other: BlockManager, array_op) -> BlockManager:
return operate_blockwise(self, other, array_op)
def apply(
self: T,
f,
align_keys: Optional[List[str]] = None,
ignore_failures: bool = False,
**kwargs,
) -> T:
assert "filter" not in kwargs
align_keys = align_keys or []
result_blocks: List[Block] = []
aligned_args = {k: kwargs[k] for k in align_keys}
for b in self.blocks:
if aligned_args:
for k, obj in aligned_args.items():
if isinstance(obj, (ABCSeries, ABCDataFrame)):
if obj.ndim == 1:
kwargs[k] = obj.iloc[b.mgr_locs.indexer]._values
else:
kwargs[k] = obj.iloc[:, b.mgr_locs.indexer]._values
else:
kwargs[k] = obj[b.mgr_locs.indexer]
try:
if callable(f):
applied = b.apply(f, **kwargs)
else:
applied = getattr(b, f)(**kwargs)
except (TypeError, NotImplementedError):
if not ignore_failures:
raise
continue
result_blocks = extend_blocks(applied, result_blocks)
if ignore_failures:
return self._combine(result_blocks)
if len(result_blocks) == 0:
return self.make_empty(self.axes)
return type(self).from_blocks(result_blocks, self.axes)
def quantile(
self,
*,
qs: Float64Index,
axis: int = 0,
transposed: bool = False,
interpolation="linear",
) -> BlockManager:
assert self.ndim >= 2
assert is_list_like(qs)
assert axis == 1
new_axes = list(self.axes)
new_axes[1] = Float64Index(qs)
blocks = [
blk.quantile(axis=axis, qs=qs, interpolation=interpolation)
for blk in self.blocks
]
if transposed:
new_axes = new_axes[::-1]
blocks = [
b.make_block(b.values.T, placement=np.arange(b.shape[1]))
for b in blocks
]
return type(self)(blocks, new_axes)
def isna(self, func) -> BlockManager:
return self.apply("apply", func=func)
def where(self, other, cond, align: bool, errors: str, axis: int) -> BlockManager:
if align:
align_keys = ["other", "cond"]
else:
align_keys = ["cond"]
other = extract_array(other, extract_numpy=True)
return self.apply(
"where",
align_keys=align_keys,
other=other,
cond=cond,
errors=errors,
axis=axis,
)
def setitem(self, indexer, value) -> BlockManager:
return self.apply("setitem", indexer=indexer, value=value)
def putmask(self, mask, new, align: bool = True):
if align:
align_keys = ["new", "mask"]
else:
align_keys = ["mask"]
new = extract_array(new, extract_numpy=True)
return self.apply(
"putmask",
align_keys=align_keys,
mask=mask,
new=new,
)
def diff(self, n: int, axis: int) -> BlockManager:
return self.apply("diff", n=n, axis=axis)
def interpolate(self, **kwargs) -> BlockManager:
return self.apply("interpolate", **kwargs)
def shift(self, periods: int, axis: int, fill_value) -> BlockManager:
if fill_value is lib.no_default:
fill_value = None
if axis == 0 and self.ndim == 2 and self.nblocks > 1:
if periods > 0:
indexer = [-1] * periods + list(range(ncols - periods))
else:
nper = abs(periods)
indexer = list(range(nper, ncols)) + [-1] * nper
result = self.reindex_indexer(
self.items,
indexer,
axis=0,
fill_value=fill_value,
allow_dups=True,
consolidate=False,
)
return result
return self.apply("shift", periods=periods, axis=axis, fill_value=fill_value)
def fillna(self, value, limit, inplace: bool, downcast) -> BlockManager:
return self.apply(
"fillna", value=value, limit=limit, inplace=inplace, downcast=downcast
)
def downcast(self) -> BlockManager:
return self.apply("downcast")
def astype(self, dtype, copy: bool = False, errors: str = "raise") -> BlockManager:
return self.apply("astype", dtype=dtype, copy=copy, errors=errors)
def convert(
self,
copy: bool = True,
datetime: bool = True,
numeric: bool = True,
timedelta: bool = True,
) -> BlockManager:
return self.apply(
"convert",
copy=copy,
datetime=datetime,
numeric=numeric,
timedelta=timedelta,
)
def replace(self, to_replace, value, inplace: bool, regex: bool) -> BlockManager:
assert np.ndim(value) == 0, value
return self.apply(
"replace", to_replace=to_replace, value=value, inplace=inplace, regex=regex
)
def replace_list(
self: T,
src_list: List[Any],
dest_list: List[Any],
inplace: bool = False,
regex: bool = False,
) -> T:
inplace = validate_bool_kwarg(inplace, "inplace")
bm = self.apply(
"_replace_list",
src_list=src_list,
dest_list=dest_list,
inplace=inplace,
regex=regex,
)
bm._consolidate_inplace()
return bm
def to_native_types(self, **kwargs) -> BlockManager:
return self.apply("to_native_types", **kwargs)
def is_consolidated(self) -> bool:
if not self._known_consolidated:
self._consolidate_check()
return self._is_consolidated
def _consolidate_check(self) -> None:
dtypes = [blk.dtype for blk in self.blocks if blk._can_consolidate]
self._is_consolidated = len(dtypes) == len(set(dtypes))
self._known_consolidated = True
@property
def is_numeric_mixed_type(self) -> bool:
return all(block.is_numeric for block in self.blocks)
@property
def any_extension_types(self) -> bool:
return any(block.is_extension for block in self.blocks)
@property
def is_view(self) -> bool:
if len(self.blocks) == 1:
return self.blocks[0].is_view
return False
def get_bool_data(self, copy: bool = False) -> BlockManager:
new_blocks = []
for blk in self.blocks:
if blk.dtype == bool:
new_blocks.append(blk)
elif blk.is_object:
nbs = blk._split()
for nb in nbs:
if nb.is_bool:
new_blocks.append(nb)
return self._combine(new_blocks, copy)
def get_numeric_data(self, copy: bool = False) -> BlockManager:
return self._combine([b for b in self.blocks if b.is_numeric], copy)
def _combine(
self: T, blocks: List[Block], copy: bool = True, index: Optional[Index] = None
) -> T:
if len(blocks) == 0:
return self.make_empty()
indexer = np.sort(np.concatenate([b.mgr_locs.as_array for b in blocks]))
inv_indexer = lib.get_reverse_indexer(indexer, self.shape[0])
new_blocks: List[Block] = []
for b in blocks:
b = b.copy(deep=copy)
b.mgr_locs = inv_indexer[b.mgr_locs.indexer]
new_blocks.append(b)
axes = list(self.axes)
if index is not None:
axes[-1] = index
axes[0] = self.items.take(indexer)
return type(self).from_blocks(new_blocks, axes)
def get_slice(self, slobj: slice, axis: int = 0) -> BlockManager:
if axis == 0:
new_blocks = self._slice_take_blocks_ax0(slobj)
elif axis == 1:
slicer = (slice(None), slobj)
new_blocks = [blk.getitem_block(slicer) for blk in self.blocks]
else:
raise IndexError("Requested axis not found in manager")
new_axes = list(self.axes)
new_axes[axis] = new_axes[axis][slobj]
bm = type(self)(new_blocks, new_axes, do_integrity_check=False)
return bm
@property
def nblocks(self) -> int:
return len(self.blocks)
def copy(self: T, deep=True) -> T:
if deep:
def copy_func(ax):
return ax.copy(deep=True) if deep == "all" else ax.view()
new_axes = [copy_func(ax) for ax in self.axes]
else:
new_axes = list(self.axes)
res = self.apply("copy", deep=deep)
res.axes = new_axes
return res
def as_array(
self,
transpose: bool = False,
dtype: Optional[Dtype] = None,
copy: bool = False,
na_value=lib.no_default,
) -> np.ndarray:
if len(self.blocks) == 0:
arr = np.empty(self.shape, dtype=float)
return arr.transpose() if transpose else arr
copy = copy or na_value is not lib.no_default
if self.is_single_block:
blk = self.blocks[0]
if blk.is_extension:
arr = blk.values.to_numpy(dtype=dtype, na_value=na_value).reshape(
blk.shape
)
else:
arr = np.asarray(blk.get_values())
if dtype:
arr = arr.astype(dtype, copy=False)
else:
arr = self._interleave(dtype=dtype, na_value=na_value)
copy = False
if copy:
arr = arr.copy()
if na_value is not lib.no_default:
arr[isna(arr)] = na_value
return arr.transpose() if transpose else arr
def _interleave(
self, dtype: Optional[Dtype] = None, na_value=lib.no_default
) -> np.ndarray:
if not dtype:
dtype = _interleaved_dtype(self.blocks)
if isinstance(dtype, SparseDtype):
dtype = dtype.subtype
elif is_extension_array_dtype(dtype):
dtype = "object"
elif is_dtype_equal(dtype, str):
dtype = "object"
result = np.empty(self.shape, dtype=dtype)
itemmask = np.zeros(self.shape[0])
for blk in self.blocks:
rl = blk.mgr_locs
if blk.is_extension:
arr = blk.values.to_numpy(dtype=dtype, na_value=na_value)
else:
arr = blk.get_values(dtype)
result[rl.indexer] = arr
itemmask[rl.indexer] = 1
if not itemmask.all():
raise AssertionError("Some items were not contained in blocks")
return result
def to_dict(self, copy: bool = True):
bd: Dict[str, List[Block]] = {}
for b in self.blocks:
bd.setdefault(str(b.dtype), []).append(b)
return {dtype: self._combine(blocks, copy=copy) for dtype, blocks in bd.items()}
def fast_xs(self, loc: int) -> ArrayLike:
if len(self.blocks) == 1:
return self.blocks[0].iget((slice(None), loc))
dtype = _interleaved_dtype(self.blocks)
n = len(self)
if is_extension_array_dtype(dtype):
result = np.empty(n, dtype=object)
else:
result = np.empty(n, dtype=dtype)
for blk in self.blocks:
# Such assignment may incorrectly coerce NaT to None
# result[blk.mgr_locs] = blk._slice((slice(None), loc))
for i, rl in enumerate(blk.mgr_locs):
result[rl] = blk.iget((i, loc))
if isinstance(dtype, ExtensionDtype):
result = dtype.construct_array_type()._from_sequence(result, dtype=dtype)
return result
def consolidate(self) -> BlockManager:
if self.is_consolidated():
return self
bm = type(self)(self.blocks, self.axes)
bm._is_consolidated = False
bm._consolidate_inplace()
return bm
def _consolidate_inplace(self) -> None:
if not self.is_consolidated():
self.blocks = tuple(_consolidate(self.blocks))
self._is_consolidated = True
self._known_consolidated = True
self._rebuild_blknos_and_blklocs()
def iget(self, i: int) -> SingleBlockManager:
block = self.blocks[self.blknos[i]]
values = block.iget(self.blklocs[i])
# shortcut for select a single-dim from a 2-dim BM
return SingleBlockManager(
block.make_block_same_class(
values, placement=slice(0, len(values)), ndim=1
),
self.axes[1],
)
def iget_values(self, i: int) -> ArrayLike:
block = self.blocks[self.blknos[i]]
values = block.iget(self.blklocs[i])
return values
def idelete(self, indexer):
is_deleted = np.zeros(self.shape[0], dtype=np.bool_)
is_deleted[indexer] = True
ref_loc_offset = -is_deleted.cumsum()
is_blk_deleted = [False] * len(self.blocks)
if isinstance(indexer, int):
affected_start = indexer
else:
affected_start = is_deleted.nonzero()[0][0]
for blkno, _ in _fast_count_smallints(self.blknos[affected_start:]):
blk = self.blocks[blkno]
bml = blk.mgr_locs
blk_del = is_deleted[bml.indexer].nonzero()[0]
if len(blk_del) == len(bml):
is_blk_deleted[blkno] = True
continue
elif len(blk_del) != 0:
blk.delete(blk_del)
bml = blk.mgr_locs
blk.mgr_locs = bml.add(ref_loc_offset[bml.indexer])
# FIXME: use Index.delete as soon as it uses fastpath=True
self.axes[0] = self.items[~is_deleted]
self.blocks = tuple(
b for blkno, b in enumerate(self.blocks) if not is_blk_deleted[blkno]
)
self._rebuild_blknos_and_blklocs()
def iset(self, loc: Union[int, slice, np.ndarray], value):
value = extract_array(value, extract_numpy=True)
# FIXME: refactor, clearly separate broadcasting & zip-like assignment
# can prob also fix the various if tests for sparse/categorical
if self._blklocs is None and self.ndim > 1:
self._rebuild_blknos_and_blklocs()
value_is_extension_type = is_extension_array_dtype(value)
# categorical/sparse/datetimetz
if value_is_extension_type:
def value_getitem(placement):
return value
else:
if value.ndim == 2:
value = value.T
if value.ndim == self.ndim - 1:
value = safe_reshape(value, (1,) + value.shape)
def value_getitem(placement):
return value
else:
def value_getitem(placement):
return value[placement.indexer]
if value.shape[1:] != self.shape[1:]:
raise AssertionError(
"Shape of new values must be compatible with manager shape"
)
if lib.is_integer(loc):
# We have 6 tests where loc is _not_ an int.
# In this case, get_blkno_placements will yield only one tuple,
# containing (self._blknos[loc], BlockPlacement(slice(0, 1, 1)))
loc = [loc]
# Accessing public blknos ensures the public versions are initialized
blknos = self.blknos[loc]
blklocs = self.blklocs[loc].copy()
unfit_mgr_locs = []
unfit_val_locs = []
removed_blknos = []
for blkno, val_locs in libinternals.get_blkno_placements(blknos, group=True):
blk = self.blocks[blkno]
blk_locs = blklocs[val_locs.indexer]
if blk.should_store(value):
blk.set_inplace(blk_locs, value_getitem(val_locs))
else:
unfit_mgr_locs.append(blk.mgr_locs.as_array[blk_locs])
unfit_val_locs.append(val_locs)
# If all block items are unfit, schedule the block for removal.
if len(val_locs) == len(blk.mgr_locs):
removed_blknos.append(blkno)
else:
blk.delete(blk_locs)
self._blklocs[blk.mgr_locs.indexer] = np.arange(len(blk))
if len(removed_blknos):
# Remove blocks & update blknos accordingly
is_deleted = np.zeros(self.nblocks, dtype=np.bool_)
is_deleted[removed_blknos] = True
new_blknos = np.empty(self.nblocks, dtype=np.int64)
new_blknos.fill(-1)
new_blknos[~is_deleted] = np.arange(self.nblocks - len(removed_blknos))
self._blknos = new_blknos[self._blknos]
self.blocks = tuple(
blk for i, blk in enumerate(self.blocks) if i not in set(removed_blknos)
)
if unfit_val_locs:
unfit_mgr_locs = np.concatenate(unfit_mgr_locs)
unfit_count = len(unfit_mgr_locs)
new_blocks: List[Block] = []
if value_is_extension_type:
# This code (ab-)uses the fact that EA blocks contain only
# one item.
# TODO(EA2D): special casing unnecessary with 2D EAs
new_blocks.extend(
make_block(
values=value,
ndim=self.ndim,
placement=slice(mgr_loc, mgr_loc + 1),
)
for mgr_loc in unfit_mgr_locs
)
self._blknos[unfit_mgr_locs] = np.arange(unfit_count) + len(self.blocks)
self._blklocs[unfit_mgr_locs] = 0
else:
# unfit_val_locs contains BlockPlacement objects
unfit_val_items = unfit_val_locs[0].append(unfit_val_locs[1:])
new_blocks.append(
make_block(
values=value_getitem(unfit_val_items),
ndim=self.ndim,
placement=unfit_mgr_locs,
)
)
self._blknos[unfit_mgr_locs] = len(self.blocks)
self._blklocs[unfit_mgr_locs] = np.arange(unfit_count)
self.blocks += tuple(new_blocks)
# Newly created block's dtype may already be present.
self._known_consolidated = False
def insert(self, loc: int, item: Hashable, value, allow_duplicates: bool = False):
if not allow_duplicates and item in self.items:
raise ValueError(f"cannot insert {item}, already exists")
if not isinstance(loc, int):
raise TypeError("loc must be int")
new_axis = self.items.insert(loc, item)
if value.ndim == 2:
value = value.T
if value.ndim == self.ndim - 1 and not is_extension_array_dtype(value.dtype):
value = safe_reshape(value, (1,) + value.shape)
block = make_block(values=value, ndim=self.ndim, placement=slice(loc, loc + 1))
for blkno, count in _fast_count_smallints(self.blknos[loc:]):
blk = self.blocks[blkno]
if count == len(blk.mgr_locs):
blk.mgr_locs = blk.mgr_locs.add(1)
else:
new_mgr_locs = blk.mgr_locs.as_array.copy()
new_mgr_locs[new_mgr_locs >= loc] += 1
blk.mgr_locs = new_mgr_locs
if loc == self.blklocs.shape[0]:
self._blklocs = np.append(self._blklocs, 0)
self._blknos = np.append(self._blknos, len(self.blocks))
else:
self._blklocs = np.insert(self._blklocs, loc, 0)
self._blknos = np.insert(self._blknos, loc, len(self.blocks))
self.axes[0] = new_axis
self.blocks += (block,)
self._known_consolidated = False
if len(self.blocks) > 100:
warnings.warn(
"DataFrame is highly fragmented. This is usually the result "
"of calling `frame.insert` many times, which has poor performance. "
"Consider using pd.concat instead. To get a de-fragmented frame, "
"use `newframe = frame.copy()`",
PerformanceWarning,
stacklevel=5,
)
def reindex_indexer(
self: T,
new_axis,
indexer,
axis: int,
fill_value=None,
allow_dups: bool = False,
copy: bool = True,
consolidate: bool = True,
only_slice: bool = False,
) -> T:
if indexer is None:
if new_axis is self.axes[axis] and not copy:
return self
result = self.copy(deep=copy)
result.axes = list(self.axes)
result.axes[axis] = new_axis
return result
if consolidate:
self._consolidate_inplace()
# some axes don't allow reindexing with dups
if not allow_dups:
self.axes[axis]._can_reindex(indexer)
if axis >= self.ndim:
raise IndexError("Requested axis not found in manager")
if axis == 0:
new_blocks = self._slice_take_blocks_ax0(
indexer, fill_value=fill_value, only_slice=only_slice
)
else:
new_blocks = [
blk.take_nd(
indexer,
axis=axis,
fill_value=(
fill_value if fill_value is not None else blk.fill_value
),
)
for blk in self.blocks
]
new_axes = list(self.axes)
new_axes[axis] = new_axis
return type(self).from_blocks(new_blocks, new_axes)
def _slice_take_blocks_ax0(
self, slice_or_indexer, fill_value=lib.no_default, only_slice: bool = False
):
allow_fill = fill_value is not lib.no_default
sl_type, slobj, sllen = _preprocess_slice_or_indexer(
slice_or_indexer, self.shape[0], allow_fill=allow_fill
)
if self.is_single_block:
blk = self.blocks[0]
if sl_type in ("slice", "mask"):
sary with 2D EAs
if sllen == 0:
return []
return [blk.getitem_block(slobj, new_mgr_locs=slice(0, sllen))]
elif not allow_fill or self.ndim == 1:
if allow_fill and fill_value is None:
fill_value = blk.fill_value
if not allow_fill and only_slice:
# GH#33597 slice instead of take, so we get
# views instead of copies
blocks = [
blk.getitem_block([ml], new_mgr_locs=i)
for i, ml in enumerate(slobj)
]
return blocks
else:
return [
blk.take_nd(
slobj,
axis=0,
new_mgr_locs=slice(0, sllen),
fill_value=fill_value,
)
]
if sl_type in ("slice", "mask"):
blknos = self.blknos[slobj]
blklocs = self.blklocs[slobj]
else:
blknos = algos.take_nd(
self.blknos, slobj, fill_value=-1, allow_fill=allow_fill
)
blklocs = algos.take_nd(
self.blklocs, slobj, fill_value=-1, allow_fill=allow_fill
)
# When filling blknos, make sure blknos is updated before appending to
# blocks list, that way new blkno is exactly len(blocks).
blocks = []
group = not only_slice
for blkno, mgr_locs in libinternals.get_blkno_placements(blknos, group=group):
if blkno == -1:
# If we've got here, fill_value was not lib.no_default
blocks.append(
self._make_na_block(placement=mgr_locs, fill_value=fill_value)
)
else:
blk = self.blocks[blkno]
if not blk._can_consolidate:
for mgr_loc in mgr_locs:
newblk = blk.copy(deep=False)
newblk.mgr_locs = slice(mgr_loc, mgr_loc + 1)
blocks.append(newblk)
else:
lklocs[mgr_locs.indexer]
max_len = max(len(mgr_locs), taker.max() + 1)
if only_slice:
taker = lib.maybe_indices_to_slice(taker, max_len)
if isinstance(taker, slice):
nb = blk.getitem_block(taker, new_mgr_locs=mgr_locs)
blocks.append(nb)
elif only_slice:
for i, ml in zip(taker, mgr_locs):
nb = blk.getitem_block([i], new_mgr_locs=ml)
blocks.append(nb)
else:
nb = blk.take_nd(taker, axis=0, new_mgr_locs=mgr_locs)
blocks.append(nb)
return blocks
def _make_na_block(self, placement, fill_value=None):
if fill_value is None:
fill_value = np.nan
block_shape = list(self.shape)
block_shape[0] = len(placement)
dtype, fill_value = infer_dtype_from_scalar(fill_value)
block_values = np.empty(block_shape, dtype=dtype)
block_values.fill(fill_value)
return make_block(block_values, placement=placement, ndim=block_values.ndim)
def take(self, indexer, axis: int = 1, verify: bool = True, convert: bool = True):
indexer = (
np.arange(indexer.start, indexer.stop, indexer.step, dtype="int64")
if isinstance(indexer, slice)
else np.asanyarray(indexer, dtype="int64")
)
n = self.shape[axis]
if convert:
indexer = maybe_convert_indices(indexer, n)
if verify:
if ((indexer == -1) | (indexer >= n)).any():
raise Exception("Indices must be nonzero and less than the axis length")
new_labels = self.axes[axis].take(indexer)
return self.reindex_indexer(
new_axis=new_labels,
indexer=indexer,
axis=axis,
allow_dups=True,
consolidate=False,
)
def _equal_values(self: T, other: T) -> bool:
if self.ndim == 1:
if other.ndim != 1:
return False
left = self.blocks[0].values
right = other.blocks[0].values
return array_equals(left, right)
return blockwise_all(self, other, array_equals)
def unstack(self, unstacker, fill_value) -> BlockManager:
new_columns = unstacker.get_new_columns(self.items)
new_index = unstacker.new_index
new_blocks: List[Block] = []
columns_mask: List[np.ndarray] = []
for blk in self.blocks:
blk_cols = self.items[blk.mgr_locs.indexer]
new_items = unstacker.get_new_columns(blk_cols)
new_placement = new_columns.get_indexer(new_items)
blocks, mask = blk._unstack(
unstacker, fill_value, new_placement=new_placement
)
new_blocks.extend(blocks)
columns_mask.extend(mask)
new_columns = new_columns[columns_mask]
bm = BlockManager(new_blocks, [new_columns, new_index])
return bm
class SingleBlockManager(BlockManager):
ndim = 1
_is_consolidated = True
_known_consolidated = True
__slots__ = ()
is_single_block = True
def __init__(
self,
block: Block,
axis: Index,
do_integrity_check: bool = False,
fastpath=lib.no_default,
):
assert isinstance(block, Block), type(block)
assert isinstance(axis, Index), type(axis)
if fastpath is not lib.no_default:
warnings.warn(
"The `fastpath` keyword is deprecated and will be removed "
"in a future version.",
FutureWarning,
stacklevel=2,
)
self.axes = [axis]
self.blocks = (block,)
@classmethod
def from_blocks(cls, blocks: List[Block], axes: List[Index]) -> SingleBlockManager:
assert len(blocks) == 1
assert len(axes) == 1
return cls(blocks[0], axes[0], do_integrity_check=False)
@classmethod
def from_array(cls, array: ArrayLike, index: Index) -> SingleBlockManager:
block = make_block(array, placement=slice(0, len(index)), ndim=1)
return cls(block, index)
def _post_setstate(self):
pass
@property
def _block(self) -> Block:
return self.blocks[0]
@property
def _blknos(self):
return None
@property
def _blklocs(self):
return None
def get_slice(self, slobj: slice, axis: int = 0) -> SingleBlockManager:
if axis >= self.ndim:
raise IndexError("Requested axis not found in manager")
blk = self._block
array = blk._slice(slobj)
block = blk.make_block_same_class(array, placement=slice(0, len(array)))
return type(self)(block, self.index[slobj])
@property
def index(self) -> Index:
return self.axes[0]
@property
def dtype(self) -> DtypeObj:
return self._block.dtype
def get_dtypes(self) -> np.ndarray:
return np.array([self._block.dtype])
def external_values(self):
return self._block.external_values()
def internal_values(self):
return self._block.internal_values()
@property
def _can_hold_na(self) -> bool:
return self._block._can_hold_na
def is_consolidated(self) -> bool:
return True
def _consolidate_check(self):
pass
def _consolidate_inplace(self):
pass
def idelete(self, indexer):
self._block.delete(indexer)
self.axes[0] = self.axes[0].delete(indexer)
def fast_xs(self, loc):
raise NotImplementedError("Use series._values[loc] instead")
def create_block_manager_from_blocks(blocks, axes: List[Index]) -> BlockManager:
try:
if len(blocks) == 1 and not isinstance(blocks[0], Block):
if not len(blocks[0]):
blocks = []
else:
# is basically "all items", but if there're many, don't bother
# converting, it's an error anyway.
blocks = [
make_block(
values=blocks[0], placement=slice(0, len(axes[0])), ndim=2
)
]
mgr = BlockManager(blocks, axes)
mgr._consolidate_inplace()
return mgr
except ValueError as e:
blocks = [getattr(b, "values", b) for b in blocks]
tot_items = sum(b.shape[0] for b in blocks)
raise construction_error(tot_items, blocks[0].shape[1:], axes, e)
def create_block_manager_from_arrays(
arrays, names: Index, axes: List[Index]
) -> BlockManager:
assert isinstance(names, Index)
assert isinstance(axes, list)
assert all(isinstance(x, Index) for x in axes)
arrays = [x if not isinstance(x, ABCPandasArray) else x.to_numpy() for x in arrays]
try:
blocks = _form_blocks(arrays, names, axes)
mgr = BlockManager(blocks, axes)
mgr._consolidate_inplace()
return mgr
except ValueError as e:
raise construction_error(len(arrays), arrays[0].shape, axes, e)
def construction_error(tot_items, block_shape, axes, e=None):
passed = tuple(map(int, [tot_items] + list(block_shape)))
if len(passed) <= 2:
passed = passed[::-1]
implied = tuple(len(ax) for ax in axes)
if len(implied) <= 2:
implied = implied[::-1]
if passed == implied and e is not None:
return e
if block_shape[0] == 0:
return ValueError("Empty data passed with indices specified.")
return ValueError(f"Shape of passed values is {passed}, indices imply {implied}")
def _form_blocks(arrays, names: Index, axes: List[Index]) -> List[Block]:
items_dict: DefaultDict[str, List] = defaultdict(list)
extra_locs = []
names_idx = names
if names_idx.equals(axes[0]):
names_indexer = np.arange(len(names_idx))
else:
assert names_idx.intersection(axes[0]).is_unique
names_indexer = names_idx.get_indexer_for(axes[0])
for i, name_idx in enumerate(names_indexer):
if name_idx == -1:
extra_locs.append(i)
continue
v = arrays[name_idx]
block_type = get_block_type(v)
items_dict[block_type.__name__].append((i, v))
blocks: List[Block] = []
if len(items_dict["FloatBlock"]):
float_blocks = _multi_blockify(items_dict["FloatBlock"])
blocks.extend(float_blocks)
if len(items_dict["NumericBlock"]):
complex_blocks = _multi_blockify(items_dict["NumericBlock"])
blocks.extend(complex_blocks)
if len(items_dict["TimeDeltaBlock"]):
timedelta_blocks = _multi_blockify(items_dict["TimeDeltaBlock"])
blocks.extend(timedelta_blocks)
if len(items_dict["DatetimeBlock"]):
datetime_blocks = _simple_blockify(items_dict["DatetimeBlock"], DT64NS_DTYPE)
blocks.extend(datetime_blocks)
if len(items_dict["DatetimeTZBlock"]):
dttz_blocks = [
make_block(array, klass=DatetimeTZBlock, placement=i, ndim=2)
for i, array in items_dict["DatetimeTZBlock"]
]
blocks.extend(dttz_blocks)
if len(items_dict["ObjectBlock"]) > 0:
object_blocks = _simple_blockify(items_dict["ObjectBlock"], np.object_)
blocks.extend(object_blocks)
if len(items_dict["CategoricalBlock"]) > 0:
cat_blocks = [
make_block(array, klass=CategoricalBlock, placement=i, ndim=2)
for i, array in items_dict["CategoricalBlock"]
]
blocks.extend(cat_blocks)
if len(items_dict["ExtensionBlock"]):
external_blocks = [
make_block(array, klass=ExtensionBlock, placement=i, ndim=2)
for i, array in items_dict["ExtensionBlock"]
]
blocks.extend(external_blocks)
if len(items_dict["ObjectValuesExtensionBlock"]):
external_blocks = [
make_block(array, klass=ObjectValuesExtensionBlock, placement=i, ndim=2)
for i, array in items_dict["ObjectValuesExtensionBlock"]
]
blocks.extend(external_blocks)
if len(extra_locs):
shape = (len(extra_locs),) + tuple(len(x) for x in axes[1:])
block_values = np.empty(shape, dtype=object)
block_values.fill(np.nan)
na_block = make_block(block_values, placement=extra_locs, ndim=2)
blocks.append(na_block)
return blocks
def _simple_blockify(tuples, dtype) -> List[Block]:
values, placement = _stack_arrays(tuples, dtype)
if dtype is not None and values.dtype != dtype:
values = values.astype(dtype)
block = make_block(values, placement=placement, ndim=2)
return [block]
def _multi_blockify(tuples, dtype: Optional[Dtype] = None):
grouper = itertools.groupby(tuples, lambda x: x[1].dtype)
new_blocks = []
for dtype, tup_block in grouper:
values, placement = _stack_arrays(list(tup_block), dtype)
block = make_block(values, placement=placement, ndim=2)
new_blocks.append(block)
return new_blocks
def _stack_arrays(tuples, dtype: np.dtype):
def _asarray_compat(x):
if isinstance(x, ABCSeries):
return x._values
else:
return np.asarray(x)
placement, arrays = zip(*tuples)
first = arrays[0]
shape = (len(arrays),) + first.shape
stacked = np.empty(shape, dtype=dtype)
for i, arr in enumerate(arrays):
stacked[i] = _asarray_compat(arr)
return stacked, placement
def _interleaved_dtype(blocks: Sequence[Block]) -> Optional[DtypeObj]:
if not len(blocks):
return None
return find_common_type([b.dtype for b in blocks])
def _consolidate(blocks):
gkey = lambda x: x._consolidate_key
grouper = itertools.groupby(sorted(blocks, key=gkey), gkey)
new_blocks: List[Block] = []
for (_can_consolidate, dtype), group_blocks in grouper:
merged_blocks = _merge_blocks(
list(group_blocks), dtype=dtype, can_consolidate=_can_consolidate
)
new_blocks = extend_blocks(merged_blocks, new_blocks)
return new_blocks
def _merge_blocks(
blocks: List[Block], dtype: DtypeObj, can_consolidate: bool
) -> List[Block]:
if len(blocks) == 1:
return blocks
if can_consolidate:
if dtype is None:
if len({b.dtype for b in blocks}) != 1:
raise AssertionError("_merge_blocks are invalid!")
new_mgr_locs = np.concatenate([b.mgr_locs.as_array for b in blocks])
new_values = np.vstack([b.values for b in blocks])
argsort = np.argsort(new_mgr_locs)
new_values = new_values[argsort]
new_mgr_locs = new_mgr_locs[argsort]
return [make_block(new_values, placement=new_mgr_locs, ndim=2)]
return blocks
def _fast_count_smallints(arr: np.ndarray) -> np.ndarray:
counts = np.bincount(arr.astype(np.int_))
nz = counts.nonzero()[0]
return np.c_[nz, counts[nz]]
def _preprocess_slice_or_indexer(slice_or_indexer, length: int, allow_fill: bool):
if isinstance(slice_or_indexer, slice):
return (
"slice",
slice_or_indexer,
libinternals.slice_len(slice_or_indexer, length),
)
elif (
isinstance(slice_or_indexer, np.ndarray) and slice_or_indexer.dtype == np.bool_
):
return "mask", slice_or_indexer, slice_or_indexer.sum()
else:
indexer = np.asanyarray(slice_or_indexer, dtype=np.int64)
if not allow_fill:
indexer = maybe_convert_indices(indexer, length)
return "fancy", indexer, len(indexer)
| true | true |
1c45b3d5de333d6534be0122ea89da552988ca0c | 601 | py | Python | tests/changes/api/test_build_mark_seen.py | bowlofstew/changes | ebd393520e0fdb07c240a8d4e8747281b6186e28 | [
"Apache-2.0"
] | 1 | 2015-11-08T13:00:44.000Z | 2015-11-08T13:00:44.000Z | tests/changes/api/test_build_mark_seen.py | alex/changes | 69a17b4c639e7082a75d037384ccb68ead3a0b4b | [
"Apache-2.0"
] | null | null | null | tests/changes/api/test_build_mark_seen.py | alex/changes | 69a17b4c639e7082a75d037384ccb68ead3a0b4b | [
"Apache-2.0"
] | null | null | null | from changes.models import BuildSeen
from changes.testutils import APITestCase
class BuildMarkSeenTest(APITestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project=project)
self.login_default()
path = '/api/0/builds/{0}/mark_seen/'.format(build.id.hex)
resp = self.client.post(path)
assert resp.status_code == 200
buildseen = BuildSeen.query.filter(
BuildSeen.user_id == self.default_user.id,
BuildSeen.build_id == build.id,
).first()
assert buildseen
| 26.130435 | 66 | 0.648918 | from changes.models import BuildSeen
from changes.testutils import APITestCase
class BuildMarkSeenTest(APITestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project=project)
self.login_default()
path = '/api/0/builds/{0}/mark_seen/'.format(build.id.hex)
resp = self.client.post(path)
assert resp.status_code == 200
buildseen = BuildSeen.query.filter(
BuildSeen.user_id == self.default_user.id,
BuildSeen.build_id == build.id,
).first()
assert buildseen
| true | true |
1c45b4011172fbf7f667e12379db8e0b37a73ae8 | 644 | py | Python | WebFilm/urls.py | marekbaranowski98/WebFilm | 5d78bb9518070c195feffc2181735b93be019ca0 | [
"MIT"
] | null | null | null | WebFilm/urls.py | marekbaranowski98/WebFilm | 5d78bb9518070c195feffc2181735b93be019ca0 | [
"MIT"
] | null | null | null | WebFilm/urls.py | marekbaranowski98/WebFilm | 5d78bb9518070c195feffc2181735b93be019ca0 | [
"MIT"
] | null | null | null | """WebFilm URL Configuration
path docs/ loads url from apps docs
path / loads url from apps frontend
path api/users/ loads url from apps users
path api/photos loads url from app photos
path api/movies loads url from app movies
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('docs/', include('docs.urls')),
path('api/users/', include('users.urls')),
path('api/photos/', include('photos.urls')),
path('api/movies/', include('movies.urls')),
path('api/evaluations/', include('evaluations.urls')),
path('', include('frontend.urls')),
]
| 30.666667 | 58 | 0.692547 | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('docs/', include('docs.urls')),
path('api/users/', include('users.urls')),
path('api/photos/', include('photos.urls')),
path('api/movies/', include('movies.urls')),
path('api/evaluations/', include('evaluations.urls')),
path('', include('frontend.urls')),
]
| true | true |
1c45b4b0875ea7d446dba15109b8e98b5d4bdaab | 3,439 | py | Python | libqtile/widget/windowname.py | Bauthe/qtile | 569c4d9aaad1dbd912435648f5f814e084de8365 | [
"MIT"
] | null | null | null | libqtile/widget/windowname.py | Bauthe/qtile | 569c4d9aaad1dbd912435648f5f814e084de8365 | [
"MIT"
] | null | null | null | libqtile/widget/windowname.py | Bauthe/qtile | 569c4d9aaad1dbd912435648f5f814e084de8365 | [
"MIT"
] | null | null | null | # Copyright (c) 2008, 2010 Aldo Cortesi
# Copyright (c) 2010 matt
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2012 Tim Neumann
# Copyright (c) 2013 Craig Barnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from libqtile import bar, hook, pangocffi
from libqtile.widget import base
class WindowName(base._TextBox):
"""Displays the name of the window that currently has focus"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('for_current_screen', False, 'instead of this bars screen use currently active screen'),
('empty_group_string', ' ', 'string to display when no windows are focused on current group'),
('max_chars', 0, 'max chars before truncating with ellipsis'),
('format', '{state}{name}', 'format of the text'),
]
def __init__(self, width=bar.STRETCH, **config):
base._TextBox.__init__(self, width=width, **config)
self.add_defaults(WindowName.defaults)
def _configure(self, qtile, bar):
base._TextBox._configure(self, qtile, bar)
hook.subscribe.client_name_updated(self.update)
hook.subscribe.focus_change(self.update)
hook.subscribe.float_change(self.update)
@hook.subscribe.current_screen_change
def on_screen_changed():
if self.for_current_screen:
self.update()
def truncate(self, text):
if self.max_chars == 0:
return text
return (text[:self.max_chars - 3].rstrip() + "...") if len(text) > self.max_chars else text
def update(self, *args):
if self.for_current_screen:
w = self.qtile.current_screen.group.current_window
else:
w = self.bar.screen.group.current_window
state = ''
if w:
if w.maximized:
state = '[] '
elif w.minimized:
state = '_ '
elif w.floating:
state = 'V '
var = {}
var["state"] = state
var["name"] = w.name
var["class"] = w.window.get_wm_class()[0] if len(w.window.get_wm_class()) > 0 else ""
text = self.format.format(**var)
unescaped = self.truncate(text)
else:
unescaped = self.empty_group_string
self.text = pangocffi.markup_escape_text(unescaped)
self.bar.draw()
| 40.458824 | 102 | 0.662402 |
from libqtile import bar, hook, pangocffi
from libqtile.widget import base
class WindowName(base._TextBox):
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('for_current_screen', False, 'instead of this bars screen use currently active screen'),
('empty_group_string', ' ', 'string to display when no windows are focused on current group'),
('max_chars', 0, 'max chars before truncating with ellipsis'),
('format', '{state}{name}', 'format of the text'),
]
def __init__(self, width=bar.STRETCH, **config):
base._TextBox.__init__(self, width=width, **config)
self.add_defaults(WindowName.defaults)
def _configure(self, qtile, bar):
base._TextBox._configure(self, qtile, bar)
hook.subscribe.client_name_updated(self.update)
hook.subscribe.focus_change(self.update)
hook.subscribe.float_change(self.update)
@hook.subscribe.current_screen_change
def on_screen_changed():
if self.for_current_screen:
self.update()
def truncate(self, text):
if self.max_chars == 0:
return text
return (text[:self.max_chars - 3].rstrip() + "...") if len(text) > self.max_chars else text
def update(self, *args):
if self.for_current_screen:
w = self.qtile.current_screen.group.current_window
else:
w = self.bar.screen.group.current_window
state = ''
if w:
if w.maximized:
state = '[] '
elif w.minimized:
state = '_ '
elif w.floating:
state = 'V '
var = {}
var["state"] = state
var["name"] = w.name
var["class"] = w.window.get_wm_class()[0] if len(w.window.get_wm_class()) > 0 else ""
text = self.format.format(**var)
unescaped = self.truncate(text)
else:
unescaped = self.empty_group_string
self.text = pangocffi.markup_escape_text(unescaped)
self.bar.draw()
| true | true |
1c45b8317ee2fbfb8197eed5bc2187f391f7f3ad | 3,634 | py | Python | root/settings.py | henrid3v/pocket-man | d0e7f44674db877b3e658ee7fc8b0fddf79bfcc8 | [
"MIT"
] | null | null | null | root/settings.py | henrid3v/pocket-man | d0e7f44674db877b3e658ee7fc8b0fddf79bfcc8 | [
"MIT"
] | 1 | 2020-11-28T21:27:01.000Z | 2020-11-28T21:29:32.000Z | root/settings.py | shadowcompiler/pocket-man | d0e7f44674db877b3e658ee7fc8b0fddf79bfcc8 | [
"MIT"
] | null | null | null | """
Django settings for root project.
Generated by 'django-admin startproject' using Django 3.0.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
import environ
env = environ.Env()
environ.Env.read_env()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pocket',
'pocket.users',
'pocket.manager',
'crispy_forms',
'widget_tweaks',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'root.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'root.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static/')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedStaticFilesStorage'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
LOGIN_URL = 'accounts/login'
LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 26.720588 | 91 | 0.705559 |
import os
import environ
env = environ.Env()
environ.Env.read_env()
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.environ.get('SECRET_KEY')
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pocket',
'pocket.users',
'pocket.manager',
'crispy_forms',
'widget_tweaks',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'root.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'root.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static/')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedStaticFilesStorage'
CRISPY_TEMPLATE_PACK = 'bootstrap4'
LOGIN_URL = 'accounts/login'
LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| true | true |
1c45b92429dcb84d7d15f647c4e3472f81ee716b | 4,819 | py | Python | pychron/lasers/tasks/panes/uv.py | ASUPychron/pychron | dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76 | [
"Apache-2.0"
] | 31 | 2016-03-07T02:38:17.000Z | 2022-02-14T18:23:43.000Z | pychron/lasers/tasks/panes/uv.py | ASUPychron/pychron | dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76 | [
"Apache-2.0"
] | 1,626 | 2015-01-07T04:52:35.000Z | 2022-03-25T19:15:59.000Z | pychron/lasers/tasks/panes/uv.py | UIllinoisHALPychron/pychron | f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc | [
"Apache-2.0"
] | 26 | 2015-05-23T00:10:06.000Z | 2022-03-07T16:51:57.000Z | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from pyface.tasks.traits_dock_pane import TraitsDockPane
from traitsui.api import (
View,
Item,
VGroup,
HGroup,
spring,
UItem,
ButtonEditor,
Group,
EnumEditor,
)
from pychron.core.ui.led_editor import LEDEditor
from pychron.core.ui.qt.reference_mark_editor import ReferenceMarkEditor
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.lasers.tasks.laser_panes import (
BaseLaserPane,
ClientPane,
StageControlPane,
AxesPane,
SupplementalPane,
)
# ============= standard library imports ========================
# ============= local library imports ==========================
class FusionsUVClientPane(ClientPane):
pass
class FusionsUVPane(BaseLaserPane):
pass
class FusionsUVStagePane(StageControlPane):
id = "pychron.fusions.uv.stage"
def _get_tabs(self):
tabs = super(FusionsUVStagePane, self)._get_tabs()
refmark_grp = VGroup(
HGroup(
UItem(
"object.reference_marks.mark",
editor=EnumEditor(name="object.reference_marks.mark_ids"),
),
icon_button_editor("add_reference_mark_button", "add"),
),
Item("object.reference_marks.mark_display", editor=ReferenceMarkEditor()),
UItem("reset_reference_marks_button"),
Item("object.reference_marks.spacing"),
Item("save_reference_marks_canvas_button"),
label="Ref. Marks",
)
tabs.content.append(refmark_grp)
return tabs
class FusionsUVAxesPane(AxesPane):
id = "pychron.fusions.uv.axes"
class FusionsUVSupplementalPane(SupplementalPane):
id = "pychron.fusions.uv.supplemental"
name = "UV"
def traits_view(self):
v = View(
Group(
VGroup(
Item("fiber_light", style="custom", show_label=False),
label="FiberLight",
),
layout="tabbed",
)
)
return v
def button_editor(name, label, **kw):
return UItem(name, editor=ButtonEditor(label_value=label))
class FusionsUVControlPane(TraitsDockPane):
id = "pychron.fusions.uv.control"
def traits_view(self):
grp = VGroup(
HGroup(
Item(
"enabled",
show_label=False,
style="custom",
editor=LEDEditor(colors=["red", "green"]),
),
button_editor("enable", "enable_label"),
spring,
),
HGroup(
Item("action_readback", width=100, style="readonly", label="Action"),
Item("status_readback", style="readonly", label="Status"),
),
HGroup(
button_editor("fire_button", "fire_label"),
Item("fire_mode", show_label=False),
enabled_when='object.enabled and object.status_readback=="Laser On"',
),
HGroup(
Item("burst_shot", label="N Burst", enabled_when='fire_mode=="Burst"'),
Item("reprate", label="Rep. Rate"),
),
HGroup(
Item("burst_readback", label="Burst Rem.", width=50, style="readonly"),
Item(
"energy_readback",
label="Energy (mJ)",
style="readonly",
format_str="%0.2f",
),
Item(
"pressure_readback",
label="Pressure (mbar)",
style="readonly",
width=100,
format_str="%0.1f",
),
spring,
enabled_when="object.enabled",
),
)
v = View(grp)
return v
# ============= EOF =============================================
| 31.292208 | 87 | 0.529155 |
from __future__ import absolute_import
from pyface.tasks.traits_dock_pane import TraitsDockPane
from traitsui.api import (
View,
Item,
VGroup,
HGroup,
spring,
UItem,
ButtonEditor,
Group,
EnumEditor,
)
from pychron.core.ui.led_editor import LEDEditor
from pychron.core.ui.qt.reference_mark_editor import ReferenceMarkEditor
from pychron.envisage.icon_button_editor import icon_button_editor
from pychron.lasers.tasks.laser_panes import (
BaseLaserPane,
ClientPane,
StageControlPane,
AxesPane,
SupplementalPane,
)
class FusionsUVClientPane(ClientPane):
pass
class FusionsUVPane(BaseLaserPane):
pass
class FusionsUVStagePane(StageControlPane):
id = "pychron.fusions.uv.stage"
def _get_tabs(self):
tabs = super(FusionsUVStagePane, self)._get_tabs()
refmark_grp = VGroup(
HGroup(
UItem(
"object.reference_marks.mark",
editor=EnumEditor(name="object.reference_marks.mark_ids"),
),
icon_button_editor("add_reference_mark_button", "add"),
),
Item("object.reference_marks.mark_display", editor=ReferenceMarkEditor()),
UItem("reset_reference_marks_button"),
Item("object.reference_marks.spacing"),
Item("save_reference_marks_canvas_button"),
label="Ref. Marks",
)
tabs.content.append(refmark_grp)
return tabs
class FusionsUVAxesPane(AxesPane):
id = "pychron.fusions.uv.axes"
class FusionsUVSupplementalPane(SupplementalPane):
id = "pychron.fusions.uv.supplemental"
name = "UV"
def traits_view(self):
v = View(
Group(
VGroup(
Item("fiber_light", style="custom", show_label=False),
label="FiberLight",
),
layout="tabbed",
)
)
return v
def button_editor(name, label, **kw):
return UItem(name, editor=ButtonEditor(label_value=label))
class FusionsUVControlPane(TraitsDockPane):
id = "pychron.fusions.uv.control"
def traits_view(self):
grp = VGroup(
HGroup(
Item(
"enabled",
show_label=False,
style="custom",
editor=LEDEditor(colors=["red", "green"]),
),
button_editor("enable", "enable_label"),
spring,
),
HGroup(
Item("action_readback", width=100, style="readonly", label="Action"),
Item("status_readback", style="readonly", label="Status"),
),
HGroup(
button_editor("fire_button", "fire_label"),
Item("fire_mode", show_label=False),
enabled_when='object.enabled and object.status_readback=="Laser On"',
),
HGroup(
Item("burst_shot", label="N Burst", enabled_when='fire_mode=="Burst"'),
Item("reprate", label="Rep. Rate"),
),
HGroup(
Item("burst_readback", label="Burst Rem.", width=50, style="readonly"),
Item(
"energy_readback",
label="Energy (mJ)",
style="readonly",
format_str="%0.2f",
),
Item(
"pressure_readback",
label="Pressure (mbar)",
style="readonly",
width=100,
format_str="%0.1f",
),
spring,
enabled_when="object.enabled",
),
)
v = View(grp)
return v
| true | true |
1c45b960408ef5e1ab38b4817737225fd34b5a9f | 575 | py | Python | test/test_ncbi.py | Daniel-Davies/pytaxize | 446990c0f64c8360f1ee65fa7beaeb2410f6213d | [
"MIT"
] | 21 | 2015-02-23T19:41:09.000Z | 2020-11-04T15:11:20.000Z | test/test_ncbi.py | Daniel-Davies/pytaxize | 446990c0f64c8360f1ee65fa7beaeb2410f6213d | [
"MIT"
] | 56 | 2015-01-12T09:05:10.000Z | 2020-09-24T01:48:10.000Z | test/test_ncbi.py | Daniel-Davies/pytaxize | 446990c0f64c8360f1ee65fa7beaeb2410f6213d | [
"MIT"
] | 21 | 2015-01-12T08:45:02.000Z | 2020-09-10T01:01:43.000Z | import os
from nose.tools import *
import unittest
import vcr
from pytaxize import ncbi
class NcbiTest(unittest.TestCase):
@vcr.use_cassette("test/vcr_cassettes/ncbi_search.yml", filter_query_parameters=['api_key'])
def test_ncbi_search(self):
"ncbi.search"
x = ncbi.search(sci_com = "Apis")
assert type(x) == dict
assert list(x.keys()) == ["Apis"]
assert type(x['Apis']) == list
assert type(x['Apis'][0]) == dict
assert x['Apis'][0]['ScientificName'] == "Apis"
assert x['Apis'][0]['TaxId'] == "7459"
| 30.263158 | 96 | 0.61913 | import os
from nose.tools import *
import unittest
import vcr
from pytaxize import ncbi
class NcbiTest(unittest.TestCase):
@vcr.use_cassette("test/vcr_cassettes/ncbi_search.yml", filter_query_parameters=['api_key'])
def test_ncbi_search(self):
x = ncbi.search(sci_com = "Apis")
assert type(x) == dict
assert list(x.keys()) == ["Apis"]
assert type(x['Apis']) == list
assert type(x['Apis'][0]) == dict
assert x['Apis'][0]['ScientificName'] == "Apis"
assert x['Apis'][0]['TaxId'] == "7459"
| true | true |
1c45ba8f50be8960f823fac0995df7dfaa1215e0 | 218 | py | Python | models/__init__.py | netotz/p-dispersion-problem | 123a6110dbf64d19a221da545c0590f7efc500dc | [
"MIT"
] | 1 | 2021-09-23T06:31:47.000Z | 2021-09-23T06:31:47.000Z | models/__init__.py | binary-hideout/p-dispersion-problem | 123a6110dbf64d19a221da545c0590f7efc500dc | [
"MIT"
] | 1 | 2021-08-31T15:15:08.000Z | 2021-08-31T15:15:08.000Z | models/__init__.py | netotz/p-dispersion-problem | 123a6110dbf64d19a221da545c0590f7efc500dc | [
"MIT"
] | 1 | 2020-05-19T04:46:47.000Z | 2020-05-19T04:46:47.000Z | '''
Package that contains the models of the project.
These models are the classes of Point and PDPInstance.
'''
# package level imports
from .point import Point
from .pdp_instance import PDPInstance, Matrix, Solution
| 24.222222 | 55 | 0.784404 |
from .point import Point
from .pdp_instance import PDPInstance, Matrix, Solution
| true | true |
1c45bafe765f80375e19d84146bad5379603a450 | 356 | py | Python | Interviews/HUAWEI/19/1.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | 1 | 2019-05-04T10:28:32.000Z | 2019-05-04T10:28:32.000Z | Interviews/HUAWEI/19/1.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | null | null | null | Interviews/HUAWEI/19/1.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | 3 | 2020-12-31T04:36:38.000Z | 2021-07-25T07:39:31.000Z | import math
n = list(map(int, input().split()))
length = len(n)
dp = [math.inf] * length
for i in range(1, int(length / 2)):
step = 1
idx = i
while idx < length:
dp[idx] = min(dp[idx], step)
idx = idx + n[idx]
step += 1
if dp[length - 1] == math.inf:
print(-1)
else:
print(dp[length - 1])
| 17.8 | 37 | 0.491573 | import math
n = list(map(int, input().split()))
length = len(n)
dp = [math.inf] * length
for i in range(1, int(length / 2)):
step = 1
idx = i
while idx < length:
dp[idx] = min(dp[idx], step)
idx = idx + n[idx]
step += 1
if dp[length - 1] == math.inf:
print(-1)
else:
print(dp[length - 1])
| true | true |
1c45bb098fd540b0ca4ce20913c1c1b808e0ae7b | 1,204 | py | Python | tutorial/proxy.py | maksimKorzh/fresh-proxy-list | e9ed2821a8445430aa30252c01b618892093f5ed | [
"MIT"
] | 7 | 2019-05-24T15:08:25.000Z | 2020-06-08T07:51:33.000Z | tutorial/proxy.py | maksimKorzh/fresh-proxy-list | e9ed2821a8445430aa30252c01b618892093f5ed | [
"MIT"
] | null | null | null | tutorial/proxy.py | maksimKorzh/fresh-proxy-list | e9ed2821a8445430aa30252c01b618892093f5ed | [
"MIT"
] | 5 | 2019-11-19T23:00:57.000Z | 2021-12-22T04:01:31.000Z | import requests
from bs4 import BeautifulSoup
proxyList = []
response = requests.get('https://free-proxy-list.net/')
bs = BeautifulSoup(response.text, 'lxml')
table = bs.find('table')
rows = table.find_all('tr')
count = 0
for row in rows:
ip = row.contents[0].text
port = row.contents[1].text
anonym = row.contents[4].text
secconn = row.contents[6].text
if(secconn == 'yes' and (anonym == 'anonymous' or anonym == 'elite proxy')):
line = 'http://' + ip + ':' + port
proxies = { 'http': line, 'https': line }
try:
testIP = requests.get('https://httpbin.org/ip', proxies = proxies, timeout = 3)
print(testIP.text)
resIP = testIP.json()['origin']
origin = resIP.split(',')
if origin[0] == ip:
print(' Proxy ok! Appending proxy to proxyList...')
proxyList.append(line)
count += 1
if count == 5:
break
except:
print('Bad proxy')
with open('proxies.txt', 'w') as f:
for proxy in proxyList:
f.write("%s\n" % proxy)
| 27.363636 | 91 | 0.508306 | import requests
from bs4 import BeautifulSoup
proxyList = []
response = requests.get('https://free-proxy-list.net/')
bs = BeautifulSoup(response.text, 'lxml')
table = bs.find('table')
rows = table.find_all('tr')
count = 0
for row in rows:
ip = row.contents[0].text
port = row.contents[1].text
anonym = row.contents[4].text
secconn = row.contents[6].text
if(secconn == 'yes' and (anonym == 'anonymous' or anonym == 'elite proxy')):
line = 'http://' + ip + ':' + port
proxies = { 'http': line, 'https': line }
try:
testIP = requests.get('https://httpbin.org/ip', proxies = proxies, timeout = 3)
print(testIP.text)
resIP = testIP.json()['origin']
origin = resIP.split(',')
if origin[0] == ip:
print(' Proxy ok! Appending proxy to proxyList...')
proxyList.append(line)
count += 1
if count == 5:
break
except:
print('Bad proxy')
with open('proxies.txt', 'w') as f:
for proxy in proxyList:
f.write("%s\n" % proxy)
| true | true |
1c45bb97d6036108335eeb9c5089a59bb600968e | 8,237 | py | Python | bluzelle/codec/crud/KeyValue_pb2.py | hhio618/bluezelle-py | c38a07458a36305457680196e8c47372008db5ab | [
"MIT"
] | 3 | 2021-08-19T10:09:29.000Z | 2022-01-05T14:19:59.000Z | bluzelle/codec/crud/KeyValue_pb2.py | hhio618/bluzelle-py | c38a07458a36305457680196e8c47372008db5ab | [
"MIT"
] | null | null | null | bluzelle/codec/crud/KeyValue_pb2.py | hhio618/bluzelle-py | c38a07458a36305457680196e8c47372008db5ab | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: crud/KeyValue.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from bluzelle.codec.crud import lease_pb2 as crud_dot_lease__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="crud/KeyValue.proto",
package="bluzelle.curium.crud",
syntax="proto3",
serialized_options=b"Z'github.com/bluzelle/curium/x/crud/types",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x13\x63rud/KeyValue.proto\x12\x14\x62luzelle.curium.crud\x1a\x10\x63rud/lease.proto"&\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c"W\n\rKeyValueLease\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x12*\n\x05lease\x18\x03 \x01(\x0b\x32\x1b.bluzelle.curium.crud.Lease"(\n\x08KeyLease\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07seconds\x18\x02 \x01(\rB)Z\'github.com/bluzelle/curium/x/crud/typesb\x06proto3',
dependencies=[
crud_dot_lease__pb2.DESCRIPTOR,
],
)
_KEYVALUE = _descriptor.Descriptor(
name="KeyValue",
full_name="bluzelle.curium.crud.KeyValue",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="bluzelle.curium.crud.KeyValue.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value",
full_name="bluzelle.curium.crud.KeyValue.value",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=63,
serialized_end=101,
)
_KEYVALUELEASE = _descriptor.Descriptor(
name="KeyValueLease",
full_name="bluzelle.curium.crud.KeyValueLease",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="bluzelle.curium.crud.KeyValueLease.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value",
full_name="bluzelle.curium.crud.KeyValueLease.value",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="lease",
full_name="bluzelle.curium.crud.KeyValueLease.lease",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=103,
serialized_end=190,
)
_KEYLEASE = _descriptor.Descriptor(
name="KeyLease",
full_name="bluzelle.curium.crud.KeyLease",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="bluzelle.curium.crud.KeyLease.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="seconds",
full_name="bluzelle.curium.crud.KeyLease.seconds",
index=1,
number=2,
type=13,
cpp_type=3,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=192,
serialized_end=232,
)
_KEYVALUELEASE.fields_by_name["lease"].message_type = crud_dot_lease__pb2._LEASE
DESCRIPTOR.message_types_by_name["KeyValue"] = _KEYVALUE
DESCRIPTOR.message_types_by_name["KeyValueLease"] = _KEYVALUELEASE
DESCRIPTOR.message_types_by_name["KeyLease"] = _KEYLEASE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
KeyValue = _reflection.GeneratedProtocolMessageType(
"KeyValue",
(_message.Message,),
{
"DESCRIPTOR": _KEYVALUE,
"__module__": "crud.KeyValue_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.KeyValue)
},
)
_sym_db.RegisterMessage(KeyValue)
KeyValueLease = _reflection.GeneratedProtocolMessageType(
"KeyValueLease",
(_message.Message,),
{
"DESCRIPTOR": _KEYVALUELEASE,
"__module__": "crud.KeyValue_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.KeyValueLease)
},
)
_sym_db.RegisterMessage(KeyValueLease)
KeyLease = _reflection.GeneratedProtocolMessageType(
"KeyLease",
(_message.Message,),
{
"DESCRIPTOR": _KEYLEASE,
"__module__": "crud.KeyValue_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.KeyLease)
},
)
_sym_db.RegisterMessage(KeyLease)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 30.507407 | 489 | 0.618308 |
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from bluzelle.codec.crud import lease_pb2 as crud_dot_lease__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="crud/KeyValue.proto",
package="bluzelle.curium.crud",
syntax="proto3",
serialized_options=b"Z'github.com/bluzelle/curium/x/crud/types",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x13\x63rud/KeyValue.proto\x12\x14\x62luzelle.curium.crud\x1a\x10\x63rud/lease.proto"&\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c"W\n\rKeyValueLease\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x12*\n\x05lease\x18\x03 \x01(\x0b\x32\x1b.bluzelle.curium.crud.Lease"(\n\x08KeyLease\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07seconds\x18\x02 \x01(\rB)Z\'github.com/bluzelle/curium/x/crud/typesb\x06proto3',
dependencies=[
crud_dot_lease__pb2.DESCRIPTOR,
],
)
_KEYVALUE = _descriptor.Descriptor(
name="KeyValue",
full_name="bluzelle.curium.crud.KeyValue",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="bluzelle.curium.crud.KeyValue.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value",
full_name="bluzelle.curium.crud.KeyValue.value",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=63,
serialized_end=101,
)
_KEYVALUELEASE = _descriptor.Descriptor(
name="KeyValueLease",
full_name="bluzelle.curium.crud.KeyValueLease",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="bluzelle.curium.crud.KeyValueLease.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value",
full_name="bluzelle.curium.crud.KeyValueLease.value",
index=1,
number=2,
type=12,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"",
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="lease",
full_name="bluzelle.curium.crud.KeyValueLease.lease",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=103,
serialized_end=190,
)
_KEYLEASE = _descriptor.Descriptor(
name="KeyLease",
full_name="bluzelle.curium.crud.KeyLease",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="bluzelle.curium.crud.KeyLease.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="seconds",
full_name="bluzelle.curium.crud.KeyLease.seconds",
index=1,
number=2,
type=13,
cpp_type=3,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=192,
serialized_end=232,
)
_KEYVALUELEASE.fields_by_name["lease"].message_type = crud_dot_lease__pb2._LEASE
DESCRIPTOR.message_types_by_name["KeyValue"] = _KEYVALUE
DESCRIPTOR.message_types_by_name["KeyValueLease"] = _KEYVALUELEASE
DESCRIPTOR.message_types_by_name["KeyLease"] = _KEYLEASE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
KeyValue = _reflection.GeneratedProtocolMessageType(
"KeyValue",
(_message.Message,),
{
"DESCRIPTOR": _KEYVALUE,
"__module__": "crud.KeyValue_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.KeyValue)
},
)
_sym_db.RegisterMessage(KeyValue)
KeyValueLease = _reflection.GeneratedProtocolMessageType(
"KeyValueLease",
(_message.Message,),
{
"DESCRIPTOR": _KEYVALUELEASE,
"__module__": "crud.KeyValue_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.KeyValueLease)
},
)
_sym_db.RegisterMessage(KeyValueLease)
KeyLease = _reflection.GeneratedProtocolMessageType(
"KeyLease",
(_message.Message,),
{
"DESCRIPTOR": _KEYLEASE,
"__module__": "crud.KeyValue_pb2"
# @@protoc_insertion_point(class_scope:bluzelle.curium.crud.KeyLease)
},
)
_sym_db.RegisterMessage(KeyLease)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| true | true |
1c45bbaba79b9d8c2de84555e39251039007bf90 | 2,727 | py | Python | crypto/hard1/service/server.py | AnyKeyShik/CTF_Code | 32ff5dce6452dbea09eff0a4db7ad603efe4027d | [
"Apache-2.0"
] | null | null | null | crypto/hard1/service/server.py | AnyKeyShik/CTF_Code | 32ff5dce6452dbea09eff0a4db7ad603efe4027d | [
"Apache-2.0"
] | null | null | null | crypto/hard1/service/server.py | AnyKeyShik/CTF_Code | 32ff5dce6452dbea09eff0a4db7ad603efe4027d | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from random import randint
from math import pow
def gcd(a, b):
if a < b:
return gcd(b, a)
elif a % b == 0:
return b;
else:
return gcd(b, a % b)
def gen_key(modulo):
key = randint(pow(10, 20), modulo)
while gcd(modulo, key) != 1:
key = randint(pow(10, 20), modulo)
return key
def power(num, exp, mod):
x = 1
y = num
while exp > 0:
if exp % 2 != 0:
x = (x * y) % mod
y = (y * y) % mod
exp = exp // 2
return x % mod
def encrypt(flag, modulo, generator, pub):
sender_key = gen_key(modulo)
secret = power(pub, sender_key, modulo)
c1 = power(generator, sender_key, modulo)
c2 = secret * flag
return c1, c2
def decrypt(c1, c2, priv, modulo):
c1_x = power(c1, priv, modulo)
msg = (c2 // c1_x) % modulo
msg = hex(msg)[2:]
msg = ''.join([chr(int(''.join(ch), 16)) for ch in zip(msg[0::2], msg[1::2])])
return msg
def read_flag():
try:
with open("flag", 'r') as flagfile:
flag = flagfile.read()
except IOError:
print('Some files is missing, tell admin')
exit(-1)
hexflag = "".join("{:02x}".format(ord(ch)) for ch in flag)
numflag = int(hexflag, 16)
return numflag
def prepare_elgamal():
modulo = randint(pow(10, 20), pow(10, 50))
generator = randint(2, modulo)
private = gen_key(modulo)
public = power(generator, private, modulo)
return (modulo, generator, public), private
def main():
# Challenge text
CHALL_TEXT = "Hi. This is your friendly 'Decryption Oracle'\nWe have implemented a well-known public-key cryptosystem. Guess which ;)\n\nModulo: {modulo}\nGenerator: {generator}\nPublic key: {public}\nCiphertext: {cipher}\n\nInsert your Ciphertext-Tuple for me to decrypt - comma seperated (e.g. 5,6)"
SAME_MSG = "Duh! This would be too easy, right?"
INVITE = ">>> "
INCORRECT_INPUT = "Incorrect input!"
flag = read_flag()
public, private = prepare_elgamal()
cipher = encrypt(flag, *public)
print(CHALL_TEXT.format(modulo=public[0], generator=public[1], public=public[2], cipher=cipher))
while True:
print(INVITE, end='')
user_input = input()
try:
enc_msg = tuple(map(int, user_input.replace(' ', '').split(',')))
if len(enc_msg) != 2:
raise ValueException
except Exception:
print(INCORRECT_INPUT)
continue
if enc_msg == cipher:
msg = SAME_MSG
else:
msg = decrypt(*enc_msg, private, public[0])
print(msg)
if __name__ == '__main__':
main()
| 23.110169 | 305 | 0.574624 |
from random import randint
from math import pow
def gcd(a, b):
if a < b:
return gcd(b, a)
elif a % b == 0:
return b;
else:
return gcd(b, a % b)
def gen_key(modulo):
key = randint(pow(10, 20), modulo)
while gcd(modulo, key) != 1:
key = randint(pow(10, 20), modulo)
return key
def power(num, exp, mod):
x = 1
y = num
while exp > 0:
if exp % 2 != 0:
x = (x * y) % mod
y = (y * y) % mod
exp = exp // 2
return x % mod
def encrypt(flag, modulo, generator, pub):
sender_key = gen_key(modulo)
secret = power(pub, sender_key, modulo)
c1 = power(generator, sender_key, modulo)
c2 = secret * flag
return c1, c2
def decrypt(c1, c2, priv, modulo):
c1_x = power(c1, priv, modulo)
msg = (c2 // c1_x) % modulo
msg = hex(msg)[2:]
msg = ''.join([chr(int(''.join(ch), 16)) for ch in zip(msg[0::2], msg[1::2])])
return msg
def read_flag():
try:
with open("flag", 'r') as flagfile:
flag = flagfile.read()
except IOError:
print('Some files is missing, tell admin')
exit(-1)
hexflag = "".join("{:02x}".format(ord(ch)) for ch in flag)
numflag = int(hexflag, 16)
return numflag
def prepare_elgamal():
modulo = randint(pow(10, 20), pow(10, 50))
generator = randint(2, modulo)
private = gen_key(modulo)
public = power(generator, private, modulo)
return (modulo, generator, public), private
def main():
CHALL_TEXT = "Hi. This is your friendly 'Decryption Oracle'\nWe have implemented a well-known public-key cryptosystem. Guess which ;)\n\nModulo: {modulo}\nGenerator: {generator}\nPublic key: {public}\nCiphertext: {cipher}\n\nInsert your Ciphertext-Tuple for me to decrypt - comma seperated (e.g. 5,6)"
SAME_MSG = "Duh! This would be too easy, right?"
INVITE = ">>> "
INCORRECT_INPUT = "Incorrect input!"
flag = read_flag()
public, private = prepare_elgamal()
cipher = encrypt(flag, *public)
print(CHALL_TEXT.format(modulo=public[0], generator=public[1], public=public[2], cipher=cipher))
while True:
print(INVITE, end='')
user_input = input()
try:
enc_msg = tuple(map(int, user_input.replace(' ', '').split(',')))
if len(enc_msg) != 2:
raise ValueException
except Exception:
print(INCORRECT_INPUT)
continue
if enc_msg == cipher:
msg = SAME_MSG
else:
msg = decrypt(*enc_msg, private, public[0])
print(msg)
if __name__ == '__main__':
main()
| true | true |
1c45bd5ae57fb300ba5e328a5611c8d8c5854181 | 1,330 | py | Python | tests/test_data/test_sciense.py | el/elizabeth | dc82cd9d2bb230acdb2f1a49bc16b1c3d12077ff | [
"MIT"
] | null | null | null | tests/test_data/test_sciense.py | el/elizabeth | dc82cd9d2bb230acdb2f1a49bc16b1c3d12077ff | [
"MIT"
] | null | null | null | tests/test_data/test_sciense.py | el/elizabeth | dc82cd9d2bb230acdb2f1a49bc16b1c3d12077ff | [
"MIT"
] | 1 | 2019-12-27T19:34:17.000Z | 2019-12-27T19:34:17.000Z | # -*- coding: utf-8 -*-
import re
from unittest import TestCase
from elizabeth import Science
import elizabeth.core.interdata as common
from tests.test_data import DummyCase
from ._patterns import STR_REGEX
class ScienceBaseTest(TestCase):
def setUp(self):
self.science = Science()
def tearDown(self):
del self.science
def test_str(self):
self.assertTrue(re.match(STR_REGEX, self.science.__str__()))
def test_math_formula(self):
result = self.science.math_formula()
self.assertIn(result, common.MATH_FORMULAS)
class ScienceTestCase(DummyCase):
def test_scientific_article(self):
result = self.generic.science.scientific_article()
self.assertIn(result, self.generic.science._data['article'])
def test_scientist(self):
result = self.generic.science.scientist()
self.assertIn(result, self.generic.science._data['scientist'])
def test_chemical_element(self):
# Because: https://travis-ci.org/lk-geimfari/elizabeth/jobs/196565835
if self.generic.locale != 'fa':
result = self.generic.science.chemical_element(name_only=True)
self.assertTrue(len(result) >= 1)
result = self.generic.science.chemical_element(name_only=False)
self.assertIsInstance(result, dict)
| 30.227273 | 77 | 0.695489 |
import re
from unittest import TestCase
from elizabeth import Science
import elizabeth.core.interdata as common
from tests.test_data import DummyCase
from ._patterns import STR_REGEX
class ScienceBaseTest(TestCase):
def setUp(self):
self.science = Science()
def tearDown(self):
del self.science
def test_str(self):
self.assertTrue(re.match(STR_REGEX, self.science.__str__()))
def test_math_formula(self):
result = self.science.math_formula()
self.assertIn(result, common.MATH_FORMULAS)
class ScienceTestCase(DummyCase):
def test_scientific_article(self):
result = self.generic.science.scientific_article()
self.assertIn(result, self.generic.science._data['article'])
def test_scientist(self):
result = self.generic.science.scientist()
self.assertIn(result, self.generic.science._data['scientist'])
def test_chemical_element(self):
if self.generic.locale != 'fa':
result = self.generic.science.chemical_element(name_only=True)
self.assertTrue(len(result) >= 1)
result = self.generic.science.chemical_element(name_only=False)
self.assertIsInstance(result, dict)
| true | true |
1c45be92d15874a5e90e8e60efad7107c63df898 | 43,600 | py | Python | arelle/ValidateVersReport.py | theredpea/Arelle | e53097f142a69b2fefc18298a72f1f1b219b973d | [
"Apache-2.0"
] | 1 | 2018-01-04T01:39:04.000Z | 2018-01-04T01:39:04.000Z | arelle/ValidateVersReport.py | GuoHuiChen/Arelle | 76b3c720e55348fd91b7be091040d2207f85400c | [
"Apache-2.0"
] | null | null | null | arelle/ValidateVersReport.py | GuoHuiChen/Arelle | 76b3c720e55348fd91b7be091040d2207f85400c | [
"Apache-2.0"
] | null | null | null | '''
Created on Nov 9, 2010
@author: Mark V Systems Limited
(c) Copyright 2010 Mark V Systems Limited, All rights reserved.
'''
from arelle import ModelVersObject, XbrlConst, ValidateXbrl, ModelDocument
from arelle.ModelValue import qname
conceptAttributeEventAttributes = {
"conceptAttributeDelete": ("fromCustomAttribute",),
"conceptAttributeAdd": ("toCustomAttribute",),
"conceptAttributeChange": ("fromCustomAttribute","toCustomAttribute"),
"conceptAttributeChange": ("fromCustomAttribute","toCustomAttribute"),
"attributeDefinitionChange": ("fromCustomAttribute","toCustomAttribute"),
}
schemaAttributeEventAttributes = {
"conceptIDChange": "id",
"conceptTypeChange": "type",
"conceptSubstitutionGroupChange": "substitutionGroup",
"conceptNillableChange": "nillable",
"conceptAbstractChange": "abstract",
"conceptBlockChange": "block",
"conceptDefaultChange": "default",
"conceptFixedChange": "fixed",
"conceptFinalChange": "final"
}
class ValidateVersReport():
def __init__(self, testModelXbrl):
self.testModelXbrl = testModelXbrl # testcase or controlling validation object
def close(self):
self.__dict__.clear() # dereference everything
def validate(self, modelVersReport):
self.modelVersReport = modelVersReport
versReport = modelVersReport.modelDocument
if not hasattr(versReport, "xmlDocument"): # not parsed
return
for DTSname in ("fromDTS", "toDTS"):
DTSmodelXbrl = getattr(versReport, DTSname)
if DTSmodelXbrl is None or DTSmodelXbrl.modelDocument is None:
self.modelVersReport.error("vere:invalidDTSIdentifier",
_("%(dts)s is missing or not loaded"),
modelObject=self, dts=DTSname)
else:
# validate DTS
ValidateXbrl.ValidateXbrl(DTSmodelXbrl).validate(DTSmodelXbrl)
if len(DTSmodelXbrl.errors) > 0:
self.modelVersReport.error("vere:invalidDTSIdentifier",
_("%(dts) has errors: %(error)s"),
modelObject=DTSmodelXbrl.modelDocument, dts=DTSname, error=DTSmodelXbrl.errors)
# validate linkbases
ValidateXbrl.ValidateXbrl(self.modelVersReport).validate(modelVersReport)
versReportElt = versReport.xmlRootElement
# check actions
for assignmentRef in versReportElt.iterdescendants(tag="{http://xbrl.org/2010/versioning-base}assignmentRef"):
ref = assignmentRef.get("ref")
if ref not in versReport.idObjects or \
not isinstance(versReport.idObjects[ref], ModelVersObject.ModelAssignment):
self.modelVersReport.error("vere:invalidAssignmentRef",
_("AssignmentRef %(assignmentRef)s does not reference an assignment"),
modelObject=assignmentRef, assignmentRef=ref)
# check namespace renames
for NSrename in versReport.namespaceRenameFrom.values():
if NSrename.fromURI not in versReport.fromDTS.namespaceDocs:
self.modelVersReport.error("vere:invalidNamespaceMapping",
_("NamespaceRename fromURI %(uri)s does not reference a schema in fromDTS"),
modelObject=self, uri=NSrename.fromURI)
if NSrename.toURI not in versReport.toDTS.namespaceDocs:
self.modelVersReport.error("vere:invalidNamespaceMapping",
_("NamespaceRename toURI %(uri)s does not reference a schema in toDTS"),
modelObject=self, uri=NSrename.toURI)
# check role changes
for roleChange in versReport.roleChanges.values():
if roleChange.fromURI not in versReport.fromDTS.roleTypes:
self.modelVersReport.error("vere:invalidRoleChange",
_("RoleChange fromURI %(uri)s does not reference a roleType in fromDTS"),
modelObject=self, uri=roleChange.fromURI)
if roleChange.toURI not in versReport.toDTS.roleTypes:
self.modelVersReport.error("vere:invalidRoleChange",
_("RoleChange toURI %(uri)s does not reference a roleType in toDTS"),
modelObject=self, uri=roleChange.toURI)
# check reportRefs
# check actions
for reportRef in versReportElt.iterdescendants(tag="{http://xbrl.org/2010/versioning-base}reportRef"):
# if existing it must be valid
href = reportRef.get("{http://www.w3.org/1999/xlink}href")
# TBD
if versReport.fromDTS and versReport.toDTS:
# check concept changes of concept basic
for conceptChange in versReport.conceptUseChanges:
fromConceptQn = conceptChange.fromConceptQname
toConceptQn = conceptChange.toConceptQname
if (conceptChange.name != "conceptAdd" and
(fromConceptQn is None or fromConceptQn not in versReport.fromDTS.qnameConcepts)):
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s fromConcept %(concept)s does not reference a concept in fromDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.fromConceptQname)
if (conceptChange.name != "conceptDelete" and
(toConceptQn is None or toConceptQn not in versReport.toDTS.qnameConcepts)):
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s toConcept %(concept)s does not reference a concept in toDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
if (conceptChange.name == "conceptAdd" and toConceptQn is not None and
conceptChange.isPhysical ^
(qname(versReport.namespaceRenameTo.get(toConceptQn.namespaceURI, toConceptQn.namespaceURI),
toConceptQn.localName) not in versReport.fromDTS.qnameConcepts)):
self.modelVersReport.error("vercue:inconsistentPhysicalAttribute",
_("%(event)s toConcept %(concept)s physical attribute conflicts with presence in fromDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
if (conceptChange.name == "conceptDelete" and toConceptQn is not None and
conceptChange.isPhysical ^
(qname(versReport.namespaceRenameFrom.get(fromConceptQn.namespaceURI, fromConceptQn.namespaceURI),
fromConceptQn.localName) in versReport.toDTS.qnameConcepts)):
self.modelVersReport.error("vercue:inconsistentPhysicalAttribute",
_("%(event)s toConcept %(concept)s physical attribute conflicts with presence in toDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
# check concept changes of concept extended
equivalentAttributes = {}
for conceptChange in versReport.conceptDetailsChanges:
fromConcept = conceptChange.fromConcept
toConcept = conceptChange.toConcept
fromResource = conceptChange.fromResource
toResource = conceptChange.toResource
# fromConcept checks
if not conceptChange.name.endswith("Add"):
if not fromConcept is not None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s fromConcept %(concept)s does not reference a concept in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.fromConceptQname)
# tuple check
elif _("Child") in conceptChange.name and \
not versReport.fromDTS.qnameConcepts[fromConcept.qname] \
.isTuple:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s fromConcept %(concept)s must be defined as a tuple"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.fromConceptQname)
# resource check
elif "Label" in conceptChange.name:
if fromResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not reference a resource in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.conceptLabel)
if relationship is not None:
if (relationship.qname != XbrlConst.qnLinkLabelArc or
relationship.parentQname != XbrlConst.qnLinkLabelLink or
fromResource.qname != XbrlConst.qnLinkLabel):
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.elementLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
fromResource.qname != XbrlConst.qnGenLabel:
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not have a label relationship to {3} in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
elif "Reference" in conceptChange.name:
if fromResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not reference a resource in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.conceptReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkReferenceArc or \
relationship.parentQname != XbrlConst.qnLinkReferenceLink or \
fromResource.qname != XbrlConst.qnLinkReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.elementReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
fromResource.qname != XbrlConst.qnGenReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not have a reference relationship to %(concept)s in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
# toConcept checks
if not conceptChange.name.endswith("Delete"):
if not toConcept is not None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s toConcept %(concept)s does not reference a concept in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.toConceptQname)
# tuple check
elif "Child" in conceptChange.name and \
not versReport.toDTS.qnameConcepts[toConcept.qname] \
.isTuple:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s toConcept %(concept)s must be defined as a tuple"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.toConceptQname)
# resource check
elif "Label" in conceptChange.name:
if toResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s does not reference a resource in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
elif toResource.qname not in (XbrlConst.qnLinkLabel, XbrlConst.qnGenLabel):
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s toResource %(resource)s is not a label in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.conceptLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkLabelArc or \
relationship.parentQname != XbrlConst.qnLinkLabelLink or \
toResource.qname != XbrlConst.qnLinkLabel:
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.elementLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
toResource.qname != XbrlConst.qnGenLabel:
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
self.modelVersReport.error("vercde:invalidConceptResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s does not have a label relationship to %(concept)s in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
elif "Reference" in conceptChange.name:
if toResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s does not reference a resource in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue)
elif toResource.qname not in (XbrlConst.qnLinkReference, XbrlConst.qnGenReference):
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s toResource %(resource)s is not a reference in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.conceptReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkReferenceArc or \
relationship.parentQname != XbrlConst.qnLinkReferenceLink or \
toResource.qname != XbrlConst.qnLinkReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.elementReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
toResource.qname != XbrlConst.qnGenReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
self.modelVersReport.error("vercde:invalidConceptResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s does not have a reference relationship to %(concept)s in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
# check concept correspondence
if fromConcept is not None and toConcept is not None:
if (versReport.toDTSqname(fromConcept.qname) != toConcept.qname and
versReport.equivalentConcepts.get(fromConcept.qname) != toConcept.qname and
toConcept.qname not in versReport.relatedConcepts.get(fromConcept.qname,[])):
self.modelVersReport.error("vercde:invalidConceptCorrespondence",
_("%(action)s %(event)s fromConcept %(conceptFrom)s and toConcept %(conceptTo)s must be equivalent or related"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, conceptFrom=conceptChange.fromConceptQname, conceptTo=conceptChange.toConceptQname)
# custom attribute events
if conceptChange.name.startswith("conceptAttribute") or conceptChange.name == "attributeDefinitionChange":
try:
for attr in conceptAttributeEventAttributes[conceptChange.name]:
customAttributeQname = conceptChange.customAttributeQname(attr)
if not customAttributeQname:
self.modelVersReport.info("arelle:invalidAttributeChange",
_("%(action)s %(event)s %(attr)s $(attrName)s does not have a name"),
modelObject=conceptChange, action=conceptChange.actionId,
attr=attr, attrName=customAttributeQname)
elif customAttributeQname.namespaceURI in (None, XbrlConst.xbrli, XbrlConst.xsd):
self.modelVersReport.error("vercde:illegalCustomAttributeEvent",
_("%(action)s %(event)s %(attr)s $(attrName)s has an invalid namespace"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name,
attr=attr, attrName=customAttributeQname)
except KeyError:
self.modelVersReport.info("arelle:eventNotRecognized",
_("%(action)s %(event)s event is not recognized"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name)
if conceptChange.name == "attributeDefinitionChange":
fromAttr = conceptChange.customAttributeQname("fromCustomAttribute")
toAttr = conceptChange.customAttributeQname("toCustomAttribute")
equivalentAttributes[fromAttr] = toAttr
equivalentAttributes[toAttr] = fromAttr
# check item concept identifiers
if conceptChange.name in ("conceptPeriodTypeChange", "conceptPeriodTypeChange"):
for concept in (fromConcept, toConcept):
if concept is not None and not concept.isItem:
self.modelVersReport.error("vercde:invalidItemConceptIdentifier",
_("%(action)s %(event)s concept %(concept)s does not reference an item concept."),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=concept.qname)
# check tuple concept identifiers
if conceptChange.name in ("tupleContentModelChange", ):
for concept in (fromConcept, toConcept):
if concept is not None and not concept.isItem:
self.modelVersReport.error("vercde:invalidTupleConceptIdentifier",
_("%(action)s %(event)s concept %(concept)s does not reference a tuple concept."),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=concept.qname)
if conceptChange.name in schemaAttributeEventAttributes:
attr = schemaAttributeEventAttributes[conceptChange.name]
if (fromConcept is not None and not fromConcept.get(attr) and
toConcept is not None and not toConcept.get(attr)):
self.modelVersReport.error("vercde:illegalSchemaAttributeChangeEvent",
_("%(action)s %(event)s neither concepts have a %(attribute)s attribute: %(fromConcept)s, %(toConcept)s."),
modelObject=conceptChange, action=conceptChange.actionId, attribute=attr,
event=conceptChange.name, fromConcept=fromConcept.qname, toConcept=toConcept.qname)
# check concept changes for equivalent attributes
for conceptChange in versReport.conceptDetailsChanges:
if conceptChange.name == "conceptAttributeChange":
fromAttr = conceptChange.customAttributeQname("fromCustomAttribute")
toAttr = conceptChange.customAttributeQname("toCustomAttribute")
if (equivalentAttributes.get(fromAttr) != toAttr and
(fromAttr.localName != toAttr.localName or
(fromAttr.namespaceURI != toAttr.namespaceURI and
versReport.namespaceRenameFrom.get(fromAttr.namespaceURI, fromAttr.namespaceURI) != toAttr.namespaceURI))):
self.modelVersReport.error("vercde:invalidAttributeCorrespondence",
_("%(action)s %(event)s has non-equivalent attributes %(fromQname)s and %(toQname)s"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name,
fromQname=fromAttr, toQname=toAttr)
del equivalentAttributes # dereference
# check relationship set changes
for relSetChange in versReport.relationshipSetChanges:
for relationshipSet, name in ((relSetChange.fromRelationshipSet, "fromRelationshipSet"),
(relSetChange.toRelationshipSet, "toRelationshipSet")):
if relationshipSet is not None:
dts = relationshipSet.dts
relationshipSetValid = True
if relationshipSet.link:
if (relationshipSet.link not in dts.qnameConcepts or
(dts.qnameConcepts[relationshipSet.link].type is not None and
not dts.qnameConcepts[relationshipSet.link].type.isDerivedFrom(XbrlConst.qnXlExtendedType))):
self.modelVersReport.error("verrelse:invalidLinkElementReferenceEvent",
_("%(event)s %(relSet)s link %(link)s does not reference an element in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
link=relationshipSet.link)
relationshipSetValid = False
if relationshipSet.arc:
if (relationshipSet.arc not in dts.qnameConcepts or
(dts.qnameConcepts[relationshipSet.arc].type is not None and
not dts.qnameConcepts[relationshipSet.arc].type.isDerivedFrom(XbrlConst.qnXlArcType))):
self.modelVersReport.error("verrelse:invalidArcElementReferenceEvent",
_("%(event)s %(relSet)s arc %(arc) does not reference an element in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arc=relationshipSet.arc)
relationshipSetValid = False
if relationshipSet.linkrole:
if not (XbrlConst.isStandardRole(relationshipSet.linkrole) or
relationshipSet.linkrole in relationshipSet.dts.roleTypes):
self.modelVersReport.error("verrelse:invalidLinkrole",
_("%(event)s %(relSet)s linkrole %(linkrole)s does not reference an linkrole in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
linkrole=relationshipSet.linkrole)
relationshipSetValid = False
elif not any(linkrole == relationshipSet.linkrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verrelse:invalidLinkrole",
_("%(event)s %(relSet)s linkrole %(linkrole)s is not used in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
linkrole=relationshipSet.linkrole)
relationshipSetValid = False
if relationshipSet.arcrole:
if not (XbrlConst.isStandardArcrole(relationshipSet.arcrole) or
relationshipSet.arcrole in relationshipSet.dts.arcroleTypes):
self.modelVersReport.error("verrelse:invalidArcrole",
_("%(event)s %(relSet)s arcrole %(arcrole)s does not reference an arcrole in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arcrole=relationshipSet.arcrole)
relationshipSetValid = False
elif not any(arcrole == relationshipSet.arcrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verrelse:invalidArcrole",
_("%(event)s %(relSet)s arcrole %(arcrole)s is not used in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arcrole=relationshipSet.arcrole)
relationshipSetValid = False
for relationship in relationshipSet.relationships:
# fromConcept checks
if relationship.fromConcept is None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s %(relSet)s relationship fromConcept %(conceptFrom)s does not reference a concept in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName)
relationshipSetValid = False
if relationship.toName and relationship.toConcept is None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s %(relSet)s relationship toConcept %(conceptTo)s does not reference a concept in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptTo=relationship.toName)
relationshipSetValid = False
if relationshipSetValid: # test that relations exist
if relationship.fromRelationship is None:
if relationship.toName:
self.modelVersReport.error("verrelse:invalidRelationshipReference",
_("%(event)s %(relSet)s no relationship found from fromConcept %(conceptFrom)s to toConcept %(conceptTo)s in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName, conceptTo=relationship.toName)
else:
self.modelVersReport.error("verrelse:invalidRelationshipReference",
_("%(event)s %(relSet)s no relationship found fromConcept %(conceptFrom)s in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName)
# check instance aspect changes
for iaChange in versReport.instanceAspectChanges:
for instAspects in (iaChange.fromAspects, iaChange.toAspects):
if instAspects is not None and instAspects.aspects:
dimAspectElts = {}
for aspect in instAspects.aspects:
dts = aspect.modelAspects.dts
if (aspect.localName in ("explicitDimension", "typedDimension") and aspect.concept is None):
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s dimension %(dimension)s is not a concept in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
elif aspect.localName == "explicitDimension":
dimConcept = aspect.concept
if not dimConcept.isExplicitDimension:
self.modelVersReport.error("verdime:invalidExplicitDimensionIdentifier",
_("%(event)s dimension %(dimension)s is not an explicit dimension in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
if dimConcept in dimAspectElts:
self.modelVersReport.error("verdime:duplicateExplicitDimensionAspect",
_("%(event)s dimension %(dimension)s is duplicated in a single explicitDimension element"),
modelObject=(aspect, dimAspectElts[dimConcept]), event=iaChange.name, dimension=aspect.conceptName)
else:
dimAspectElts[dimConcept] = aspect
elif aspect.localName == "typedDimension":
dimConcept = aspect.concept
if not dimConcept.isTypedDimension:
self.modelVersReport.error("verdime:invalidTypedDimensionIdentifier",
_("%(event)s dimension %(dimension)s is not a typed dimension in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
if dimConcept in dimAspectElts:
self.modelVersReport.error("verdime:duplicateTypedDimensionAspect",
_("%(event)s dimension %(dimension)s is duplicated in a single explicitDimension element"),
modelObject=(aspect, dimAspectElts[dimConcept]), event=iaChange.name, dimension=aspect.conceptName)
else:
dimAspectElts[dimConcept] = aspect
if aspect.localName in ("explicitDimension", "concepts"):
for relatedConcept in aspect.relatedConcepts:
conceptMdlObj = relatedConcept.concept
if conceptMdlObj is None or not conceptMdlObj.isItem:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s concept %(concept)s is not an item in its DTS"),
modelObject=aspect, event=iaChange.name, concept=relatedConcept.conceptName)
if relatedConcept.arcrole is not None:
if (not XbrlConst.isStandardArcrole(relatedConcept.arcrole) and
relatedConcept.arcrole not in dts.arcroleTypes):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s arcrole %(arcrole)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, arcrole=relatedConcept.arcrole)
elif not any(arcrole == relatedConcept.arcrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s arcrole %(arcrole)s is not used in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.arcrole)
if relatedConcept.linkrole is not None:
if (relatedConcept.linkrole != "http://www.xbrl.org/2003/role/link" and
relatedConcept.linkrole not in dts.roleTypes):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s linkrole %(linkrole)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.linkrole)
elif not any(linkrole == relatedConcept.linkrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s linkrole %(linkrole)s is not used in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.linkrole)
if (relatedConcept.arc is not None and
(relatedConcept.arc not in dts.qnameConcepts or
(dts.qnameConcepts[relatedConcept.arc].type is not None and
not dts.qnameConcepts[relatedConcept.arc].type.isDerivedFrom(XbrlConst.qnXlArcType)))):
self.modelVersReport.error("verdime:invalidArcElement",
_("%(event)s arc %(arc)s is not defined as an arc in its DTS"),
modelObject=aspect, event=iaChange.name, arc=relatedConcept.arc)
if (relatedConcept.link is not None and
(relatedConcept.link not in dts.qnameConcepts or
(dts.qnameConcepts[relatedConcept.link].type is not None and
not dts.qnameConcepts[relatedConcept.link].type.isDerivedFrom(XbrlConst.qnXlExtendedType)))):
self.modelVersReport.error("verdime:invalidLinkElement",
_("%(event)s link %(link)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, link=relatedConcept.link)
self.close() | 80.740741 | 176 | 0.547546 | from arelle import ModelVersObject, XbrlConst, ValidateXbrl, ModelDocument
from arelle.ModelValue import qname
conceptAttributeEventAttributes = {
"conceptAttributeDelete": ("fromCustomAttribute",),
"conceptAttributeAdd": ("toCustomAttribute",),
"conceptAttributeChange": ("fromCustomAttribute","toCustomAttribute"),
"conceptAttributeChange": ("fromCustomAttribute","toCustomAttribute"),
"attributeDefinitionChange": ("fromCustomAttribute","toCustomAttribute"),
}
schemaAttributeEventAttributes = {
"conceptIDChange": "id",
"conceptTypeChange": "type",
"conceptSubstitutionGroupChange": "substitutionGroup",
"conceptNillableChange": "nillable",
"conceptAbstractChange": "abstract",
"conceptBlockChange": "block",
"conceptDefaultChange": "default",
"conceptFixedChange": "fixed",
"conceptFinalChange": "final"
}
class ValidateVersReport():
def __init__(self, testModelXbrl):
self.testModelXbrl = testModelXbrl
def close(self):
self.__dict__.clear()
def validate(self, modelVersReport):
self.modelVersReport = modelVersReport
versReport = modelVersReport.modelDocument
if not hasattr(versReport, "xmlDocument"):
return
for DTSname in ("fromDTS", "toDTS"):
DTSmodelXbrl = getattr(versReport, DTSname)
if DTSmodelXbrl is None or DTSmodelXbrl.modelDocument is None:
self.modelVersReport.error("vere:invalidDTSIdentifier",
_("%(dts)s is missing or not loaded"),
modelObject=self, dts=DTSname)
else:
ValidateXbrl.ValidateXbrl(DTSmodelXbrl).validate(DTSmodelXbrl)
if len(DTSmodelXbrl.errors) > 0:
self.modelVersReport.error("vere:invalidDTSIdentifier",
_("%(dts) has errors: %(error)s"),
modelObject=DTSmodelXbrl.modelDocument, dts=DTSname, error=DTSmodelXbrl.errors)
ValidateXbrl.ValidateXbrl(self.modelVersReport).validate(modelVersReport)
versReportElt = versReport.xmlRootElement
for assignmentRef in versReportElt.iterdescendants(tag="{http://xbrl.org/2010/versioning-base}assignmentRef"):
ref = assignmentRef.get("ref")
if ref not in versReport.idObjects or \
not isinstance(versReport.idObjects[ref], ModelVersObject.ModelAssignment):
self.modelVersReport.error("vere:invalidAssignmentRef",
_("AssignmentRef %(assignmentRef)s does not reference an assignment"),
modelObject=assignmentRef, assignmentRef=ref)
for NSrename in versReport.namespaceRenameFrom.values():
if NSrename.fromURI not in versReport.fromDTS.namespaceDocs:
self.modelVersReport.error("vere:invalidNamespaceMapping",
_("NamespaceRename fromURI %(uri)s does not reference a schema in fromDTS"),
modelObject=self, uri=NSrename.fromURI)
if NSrename.toURI not in versReport.toDTS.namespaceDocs:
self.modelVersReport.error("vere:invalidNamespaceMapping",
_("NamespaceRename toURI %(uri)s does not reference a schema in toDTS"),
modelObject=self, uri=NSrename.toURI)
for roleChange in versReport.roleChanges.values():
if roleChange.fromURI not in versReport.fromDTS.roleTypes:
self.modelVersReport.error("vere:invalidRoleChange",
_("RoleChange fromURI %(uri)s does not reference a roleType in fromDTS"),
modelObject=self, uri=roleChange.fromURI)
if roleChange.toURI not in versReport.toDTS.roleTypes:
self.modelVersReport.error("vere:invalidRoleChange",
_("RoleChange toURI %(uri)s does not reference a roleType in toDTS"),
modelObject=self, uri=roleChange.toURI)
for reportRef in versReportElt.iterdescendants(tag="{http://xbrl.org/2010/versioning-base}reportRef"):
href = reportRef.get("{http://www.w3.org/1999/xlink}href")
if versReport.fromDTS and versReport.toDTS:
for conceptChange in versReport.conceptUseChanges:
fromConceptQn = conceptChange.fromConceptQname
toConceptQn = conceptChange.toConceptQname
if (conceptChange.name != "conceptAdd" and
(fromConceptQn is None or fromConceptQn not in versReport.fromDTS.qnameConcepts)):
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s fromConcept %(concept)s does not reference a concept in fromDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.fromConceptQname)
if (conceptChange.name != "conceptDelete" and
(toConceptQn is None or toConceptQn not in versReport.toDTS.qnameConcepts)):
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s toConcept %(concept)s does not reference a concept in toDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
if (conceptChange.name == "conceptAdd" and toConceptQn is not None and
conceptChange.isPhysical ^
(qname(versReport.namespaceRenameTo.get(toConceptQn.namespaceURI, toConceptQn.namespaceURI),
toConceptQn.localName) not in versReport.fromDTS.qnameConcepts)):
self.modelVersReport.error("vercue:inconsistentPhysicalAttribute",
_("%(event)s toConcept %(concept)s physical attribute conflicts with presence in fromDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
if (conceptChange.name == "conceptDelete" and toConceptQn is not None and
conceptChange.isPhysical ^
(qname(versReport.namespaceRenameFrom.get(fromConceptQn.namespaceURI, fromConceptQn.namespaceURI),
fromConceptQn.localName) in versReport.toDTS.qnameConcepts)):
self.modelVersReport.error("vercue:inconsistentPhysicalAttribute",
_("%(event)s toConcept %(concept)s physical attribute conflicts with presence in toDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
equivalentAttributes = {}
for conceptChange in versReport.conceptDetailsChanges:
fromConcept = conceptChange.fromConcept
toConcept = conceptChange.toConcept
fromResource = conceptChange.fromResource
toResource = conceptChange.toResource
if not conceptChange.name.endswith("Add"):
if not fromConcept is not None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s fromConcept %(concept)s does not reference a concept in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.fromConceptQname)
elif _("Child") in conceptChange.name and \
not versReport.fromDTS.qnameConcepts[fromConcept.qname] \
.isTuple:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s fromConcept %(concept)s must be defined as a tuple"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.fromConceptQname)
elif "Label" in conceptChange.name:
if fromResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not reference a resource in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.conceptLabel)
if relationship is not None:
if (relationship.qname != XbrlConst.qnLinkLabelArc or
relationship.parentQname != XbrlConst.qnLinkLabelLink or
fromResource.qname != XbrlConst.qnLinkLabel):
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.elementLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
fromResource.qname != XbrlConst.qnGenLabel:
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not have a label relationship to {3} in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
elif "Reference" in conceptChange.name:
if fromResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not reference a resource in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.conceptReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkReferenceArc or \
relationship.parentQname != XbrlConst.qnLinkReferenceLink or \
fromResource.qname != XbrlConst.qnLinkReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.elementReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
fromResource.qname != XbrlConst.qnGenReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s fromResource %(resource)s does not have a reference relationship to %(concept)s in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
if not conceptChange.name.endswith("Delete"):
if not toConcept is not None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s toConcept %(concept)s does not reference a concept in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.toConceptQname)
elif "Child" in conceptChange.name and \
not versReport.toDTS.qnameConcepts[toConcept.qname] \
.isTuple:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(action)s %(event)s toConcept %(concept)s must be defined as a tuple"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.toConceptQname)
elif "Label" in conceptChange.name:
if toResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s does not reference a resource in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
elif toResource.qname not in (XbrlConst.qnLinkLabel, XbrlConst.qnGenLabel):
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s toResource %(resource)s is not a label in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.conceptLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkLabelArc or \
relationship.parentQname != XbrlConst.qnLinkLabelLink or \
toResource.qname != XbrlConst.qnLinkLabel:
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.elementLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
toResource.qname != XbrlConst.qnGenLabel:
self.modelVersReport.error("vercde:invalidConceptLabelIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
self.modelVersReport.error("vercde:invalidConceptResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s does not have a label relationship to %(concept)s in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
elif "Reference" in conceptChange.name:
if toResource is None:
self.modelVersReport.error("vercde:invalidResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s does not reference a resource in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue)
elif toResource.qname not in (XbrlConst.qnLinkReference, XbrlConst.qnGenReference):
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s toResource %(resource)s is not a reference in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.conceptReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkReferenceArc or \
relationship.parentQname != XbrlConst.qnLinkReferenceLink or \
toResource.qname != XbrlConst.qnLinkReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.elementReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
toResource.qname != XbrlConst.qnGenReference:
self.modelVersReport.error("vercde:invalidConceptReferenceIdentifier",
_("%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
self.modelVersReport.error("vercde:invalidConceptResourceIdentifier",
_("%(action)s %(event)s toResource %(resource)s does not have a reference relationship to %(concept)s in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
if fromConcept is not None and toConcept is not None:
if (versReport.toDTSqname(fromConcept.qname) != toConcept.qname and
versReport.equivalentConcepts.get(fromConcept.qname) != toConcept.qname and
toConcept.qname not in versReport.relatedConcepts.get(fromConcept.qname,[])):
self.modelVersReport.error("vercde:invalidConceptCorrespondence",
_("%(action)s %(event)s fromConcept %(conceptFrom)s and toConcept %(conceptTo)s must be equivalent or related"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, conceptFrom=conceptChange.fromConceptQname, conceptTo=conceptChange.toConceptQname)
if conceptChange.name.startswith("conceptAttribute") or conceptChange.name == "attributeDefinitionChange":
try:
for attr in conceptAttributeEventAttributes[conceptChange.name]:
customAttributeQname = conceptChange.customAttributeQname(attr)
if not customAttributeQname:
self.modelVersReport.info("arelle:invalidAttributeChange",
_("%(action)s %(event)s %(attr)s $(attrName)s does not have a name"),
modelObject=conceptChange, action=conceptChange.actionId,
attr=attr, attrName=customAttributeQname)
elif customAttributeQname.namespaceURI in (None, XbrlConst.xbrli, XbrlConst.xsd):
self.modelVersReport.error("vercde:illegalCustomAttributeEvent",
_("%(action)s %(event)s %(attr)s $(attrName)s has an invalid namespace"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name,
attr=attr, attrName=customAttributeQname)
except KeyError:
self.modelVersReport.info("arelle:eventNotRecognized",
_("%(action)s %(event)s event is not recognized"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name)
if conceptChange.name == "attributeDefinitionChange":
fromAttr = conceptChange.customAttributeQname("fromCustomAttribute")
toAttr = conceptChange.customAttributeQname("toCustomAttribute")
equivalentAttributes[fromAttr] = toAttr
equivalentAttributes[toAttr] = fromAttr
if conceptChange.name in ("conceptPeriodTypeChange", "conceptPeriodTypeChange"):
for concept in (fromConcept, toConcept):
if concept is not None and not concept.isItem:
self.modelVersReport.error("vercde:invalidItemConceptIdentifier",
_("%(action)s %(event)s concept %(concept)s does not reference an item concept."),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=concept.qname)
if conceptChange.name in ("tupleContentModelChange", ):
for concept in (fromConcept, toConcept):
if concept is not None and not concept.isItem:
self.modelVersReport.error("vercde:invalidTupleConceptIdentifier",
_("%(action)s %(event)s concept %(concept)s does not reference a tuple concept."),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=concept.qname)
if conceptChange.name in schemaAttributeEventAttributes:
attr = schemaAttributeEventAttributes[conceptChange.name]
if (fromConcept is not None and not fromConcept.get(attr) and
toConcept is not None and not toConcept.get(attr)):
self.modelVersReport.error("vercde:illegalSchemaAttributeChangeEvent",
_("%(action)s %(event)s neither concepts have a %(attribute)s attribute: %(fromConcept)s, %(toConcept)s."),
modelObject=conceptChange, action=conceptChange.actionId, attribute=attr,
event=conceptChange.name, fromConcept=fromConcept.qname, toConcept=toConcept.qname)
for conceptChange in versReport.conceptDetailsChanges:
if conceptChange.name == "conceptAttributeChange":
fromAttr = conceptChange.customAttributeQname("fromCustomAttribute")
toAttr = conceptChange.customAttributeQname("toCustomAttribute")
if (equivalentAttributes.get(fromAttr) != toAttr and
(fromAttr.localName != toAttr.localName or
(fromAttr.namespaceURI != toAttr.namespaceURI and
versReport.namespaceRenameFrom.get(fromAttr.namespaceURI, fromAttr.namespaceURI) != toAttr.namespaceURI))):
self.modelVersReport.error("vercde:invalidAttributeCorrespondence",
_("%(action)s %(event)s has non-equivalent attributes %(fromQname)s and %(toQname)s"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name,
fromQname=fromAttr, toQname=toAttr)
del equivalentAttributes
for relSetChange in versReport.relationshipSetChanges:
for relationshipSet, name in ((relSetChange.fromRelationshipSet, "fromRelationshipSet"),
(relSetChange.toRelationshipSet, "toRelationshipSet")):
if relationshipSet is not None:
dts = relationshipSet.dts
relationshipSetValid = True
if relationshipSet.link:
if (relationshipSet.link not in dts.qnameConcepts or
(dts.qnameConcepts[relationshipSet.link].type is not None and
not dts.qnameConcepts[relationshipSet.link].type.isDerivedFrom(XbrlConst.qnXlExtendedType))):
self.modelVersReport.error("verrelse:invalidLinkElementReferenceEvent",
_("%(event)s %(relSet)s link %(link)s does not reference an element in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
link=relationshipSet.link)
relationshipSetValid = False
if relationshipSet.arc:
if (relationshipSet.arc not in dts.qnameConcepts or
(dts.qnameConcepts[relationshipSet.arc].type is not None and
not dts.qnameConcepts[relationshipSet.arc].type.isDerivedFrom(XbrlConst.qnXlArcType))):
self.modelVersReport.error("verrelse:invalidArcElementReferenceEvent",
_("%(event)s %(relSet)s arc %(arc) does not reference an element in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arc=relationshipSet.arc)
relationshipSetValid = False
if relationshipSet.linkrole:
if not (XbrlConst.isStandardRole(relationshipSet.linkrole) or
relationshipSet.linkrole in relationshipSet.dts.roleTypes):
self.modelVersReport.error("verrelse:invalidLinkrole",
_("%(event)s %(relSet)s linkrole %(linkrole)s does not reference an linkrole in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
linkrole=relationshipSet.linkrole)
relationshipSetValid = False
elif not any(linkrole == relationshipSet.linkrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verrelse:invalidLinkrole",
_("%(event)s %(relSet)s linkrole %(linkrole)s is not used in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
linkrole=relationshipSet.linkrole)
relationshipSetValid = False
if relationshipSet.arcrole:
if not (XbrlConst.isStandardArcrole(relationshipSet.arcrole) or
relationshipSet.arcrole in relationshipSet.dts.arcroleTypes):
self.modelVersReport.error("verrelse:invalidArcrole",
_("%(event)s %(relSet)s arcrole %(arcrole)s does not reference an arcrole in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arcrole=relationshipSet.arcrole)
relationshipSetValid = False
elif not any(arcrole == relationshipSet.arcrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verrelse:invalidArcrole",
_("%(event)s %(relSet)s arcrole %(arcrole)s is not used in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arcrole=relationshipSet.arcrole)
relationshipSetValid = False
for relationship in relationshipSet.relationships:
if relationship.fromConcept is None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s %(relSet)s relationship fromConcept %(conceptFrom)s does not reference a concept in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName)
relationshipSetValid = False
if relationship.toName and relationship.toConcept is None:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s %(relSet)s relationship toConcept %(conceptTo)s does not reference a concept in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptTo=relationship.toName)
relationshipSetValid = False
if relationshipSetValid:
if relationship.fromRelationship is None:
if relationship.toName:
self.modelVersReport.error("verrelse:invalidRelationshipReference",
_("%(event)s %(relSet)s no relationship found from fromConcept %(conceptFrom)s to toConcept %(conceptTo)s in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName, conceptTo=relationship.toName)
else:
self.modelVersReport.error("verrelse:invalidRelationshipReference",
_("%(event)s %(relSet)s no relationship found fromConcept %(conceptFrom)s in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName)
for iaChange in versReport.instanceAspectChanges:
for instAspects in (iaChange.fromAspects, iaChange.toAspects):
if instAspects is not None and instAspects.aspects:
dimAspectElts = {}
for aspect in instAspects.aspects:
dts = aspect.modelAspects.dts
if (aspect.localName in ("explicitDimension", "typedDimension") and aspect.concept is None):
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s dimension %(dimension)s is not a concept in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
elif aspect.localName == "explicitDimension":
dimConcept = aspect.concept
if not dimConcept.isExplicitDimension:
self.modelVersReport.error("verdime:invalidExplicitDimensionIdentifier",
_("%(event)s dimension %(dimension)s is not an explicit dimension in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
if dimConcept in dimAspectElts:
self.modelVersReport.error("verdime:duplicateExplicitDimensionAspect",
_("%(event)s dimension %(dimension)s is duplicated in a single explicitDimension element"),
modelObject=(aspect, dimAspectElts[dimConcept]), event=iaChange.name, dimension=aspect.conceptName)
else:
dimAspectElts[dimConcept] = aspect
elif aspect.localName == "typedDimension":
dimConcept = aspect.concept
if not dimConcept.isTypedDimension:
self.modelVersReport.error("verdime:invalidTypedDimensionIdentifier",
_("%(event)s dimension %(dimension)s is not a typed dimension in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
if dimConcept in dimAspectElts:
self.modelVersReport.error("verdime:duplicateTypedDimensionAspect",
_("%(event)s dimension %(dimension)s is duplicated in a single explicitDimension element"),
modelObject=(aspect, dimAspectElts[dimConcept]), event=iaChange.name, dimension=aspect.conceptName)
else:
dimAspectElts[dimConcept] = aspect
if aspect.localName in ("explicitDimension", "concepts"):
for relatedConcept in aspect.relatedConcepts:
conceptMdlObj = relatedConcept.concept
if conceptMdlObj is None or not conceptMdlObj.isItem:
self.modelVersReport.error("vercue:invalidConceptReference",
_("%(event)s concept %(concept)s is not an item in its DTS"),
modelObject=aspect, event=iaChange.name, concept=relatedConcept.conceptName)
if relatedConcept.arcrole is not None:
if (not XbrlConst.isStandardArcrole(relatedConcept.arcrole) and
relatedConcept.arcrole not in dts.arcroleTypes):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s arcrole %(arcrole)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, arcrole=relatedConcept.arcrole)
elif not any(arcrole == relatedConcept.arcrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s arcrole %(arcrole)s is not used in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.arcrole)
if relatedConcept.linkrole is not None:
if (relatedConcept.linkrole != "http://www.xbrl.org/2003/role/link" and
relatedConcept.linkrole not in dts.roleTypes):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s linkrole %(linkrole)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.linkrole)
elif not any(linkrole == relatedConcept.linkrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error("verdime:invalidURI",
_("%(event)s linkrole %(linkrole)s is not used in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.linkrole)
if (relatedConcept.arc is not None and
(relatedConcept.arc not in dts.qnameConcepts or
(dts.qnameConcepts[relatedConcept.arc].type is not None and
not dts.qnameConcepts[relatedConcept.arc].type.isDerivedFrom(XbrlConst.qnXlArcType)))):
self.modelVersReport.error("verdime:invalidArcElement",
_("%(event)s arc %(arc)s is not defined as an arc in its DTS"),
modelObject=aspect, event=iaChange.name, arc=relatedConcept.arc)
if (relatedConcept.link is not None and
(relatedConcept.link not in dts.qnameConcepts or
(dts.qnameConcepts[relatedConcept.link].type is not None and
not dts.qnameConcepts[relatedConcept.link].type.isDerivedFrom(XbrlConst.qnXlExtendedType)))):
self.modelVersReport.error("verdime:invalidLinkElement",
_("%(event)s link %(link)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, link=relatedConcept.link)
self.close() | true | true |
1c45bee0b72f7290f98a152d2fd4047f74e16502 | 8,482 | py | Python | inbm/dispatcher-agent/dispatcher/fota/fota.py | intel/intel-inb-manageability | cdb17765120857fd41cacb838d6ee6e34e1f5047 | [
"Apache-2.0"
] | 5 | 2021-12-13T21:19:31.000Z | 2022-01-18T18:29:43.000Z | inbm/dispatcher-agent/dispatcher/fota/fota.py | intel/intel-inb-manageability | cdb17765120857fd41cacb838d6ee6e34e1f5047 | [
"Apache-2.0"
] | 45 | 2021-12-30T17:21:09.000Z | 2022-03-29T22:47:32.000Z | inbm/dispatcher-agent/dispatcher/fota/fota.py | intel/intel-inb-manageability | cdb17765120857fd41cacb838d6ee6e34e1f5047 | [
"Apache-2.0"
] | 4 | 2022-01-26T17:42:54.000Z | 2022-03-30T04:48:04.000Z | """
FOTA update tool which is called from the dispatcher during installation
Copyright (C) 2017-2022 Intel Corporation
SPDX-License-Identifier: Apache-2.0
"""
import logging
import os
import platform
from threading import Timer
from typing import Any, Optional, Mapping
from future.moves.urllib.parse import urlparse
from inbm_common_lib.exceptions import UrlSecurityException
from inbm_common_lib.utility import canonicalize_uri
from inbm_common_lib.constants import REMOTE_SOURCE
from .constants import *
from .fota_error import FotaError
from .manifest import parse_tool_options, parse_guid, parse_hold_reboot_flag
from .os_factory import OsFactory, OsType
from ..common import dispatcher_state
from ..common.result_constants import *
from ..constants import UMASK_OTA
from ..dispatcher_callbacks import DispatcherCallbacks
from ..dispatcher_exception import DispatcherException
from ..downloader import download
from ..packagemanager.local_repo import DirectoryRepo
logger = logging.getLogger(__name__)
class FOTA:
"""AKA FOTA Tool
An instance of this class will be called from the
dispatcher if the requested type of update is FOTA
"""
def __init__(self,
parsed_manifest: Mapping[str, Optional[Any]],
repo_type: str,
dispatcher_callbacks: DispatcherCallbacks) -> None:
"""Base class constructor for variable assignment, to send telemetry info and create a new
directory if no repo is present
@param parsed_manifest: Parsed parameters from manifest
@param repo_type: OTA source location -> local or remote
@param dispatcher_callbacks: DispatcherCallbacks instance
"""
logger.debug(f"parsed_manifest: {parsed_manifest}")
self._ota_element = parsed_manifest.get('resource')
logger.debug(f"ota_element: {self._ota_element}")
self._dispatcher_callbacks = dispatcher_callbacks
self._uri: Optional[str] = parsed_manifest['uri']
self._repo_type = repo_type
repo_path: Optional[str]
"""If repo_type=local, then use path and not URI"""
if self._repo_type == REMOTE_SOURCE:
if not self._uri:
raise FotaError("missing URI.")
else:
self._pkg_filename = os.path.basename(urlparse(self._uri).path)
repo_path = None
else:
if self._ota_element is None or 'path' not in self._ota_element:
raise FotaError('attempting to use local repo for FOTA but no path specified')
self._pkg_filename = os.path.basename(self._ota_element['path'])
path = self._ota_element.get('path', None)
logger.debug(f"path: {path}")
if path is None:
repo_path = None
else:
repo_path = os.path.dirname(path)
logger.debug(f"repo_path: {repo_path}")
self.__signature = parsed_manifest['signature']
self._hash_algorithm = parsed_manifest['hash_algorithm']
self._username = parsed_manifest['username']
self._password = parsed_manifest['password']
if self._dispatcher_callbacks is None:
raise FotaError("dispatcher_callbacks not specified in FOTA constructor")
self._dispatcher_callbacks.broker_core.telemetry("Firmware Update Tool launched")
if repo_path:
logger.debug("Using manifest specified repo path")
self._repo = DirectoryRepo(repo_path)
else:
logger.debug("Using default repo path")
self._repo = DirectoryRepo(CACHE)
def install(self) -> Result:
"""checks current platform versions and then issues download
and install. Performs clean() in failure conditions
@return: (Result) containing status code and message
"""
logger.debug("")
return_message: Result = Result()
hold_reboot = False
try:
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
bios_vendor, platform_product = factory.create_upgrade_checker().check()
if self._repo_type.lower() == REMOTE_SOURCE:
# need to perform this check here because some FOTA commands don't have a URI -- see constructor
# (instead they have a path)
if self._uri is None:
raise FotaError(
"internal error: _uri uninitialized in Fota.install with download requested in manifest")
uri = canonicalize_uri(self._uri)
download(dispatcher_callbacks=self._dispatcher_callbacks,
uri=uri,
repo=self._repo,
umask=UMASK_OTA,
username=self._username,
password=self._password)
else:
logger.debug("Skipping FOTA upgradable check for local repo")
if self._ota_element is None:
raise FotaError("missing ota_element")
tool_options = parse_tool_options(self._ota_element)
logger.debug(f"tool_options: {tool_options}")
guid = parse_guid(self._ota_element)
logger.debug(f"guid: {guid}")
hold_reboot = parse_hold_reboot_flag(self._ota_element)
logger.debug(f"holdReboot: {hold_reboot}; pkg_filename: {self._pkg_filename}")
factory.create_installer(self._repo, FOTA_CONF_PATH, FOTA_CONF_SCHEMA_LOC).\
install(guid=guid,
tool_options=tool_options,
pkg_filename=self._pkg_filename,
signature=self.__signature,
hash_algorithm=self._hash_algorithm,
bios_vendor=bios_vendor,
platform_product=platform_product)
def trigger_reboot() -> None:
"""This method triggers a reboot."""
factory.create_rebooter().reboot()
if not hold_reboot:
logger.debug("")
state = {'restart_reason': "fota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
time_to_trigger_reboot = Timer(0.1, trigger_reboot)
time_to_trigger_reboot.start()
return_message = COMMAND_SUCCESS
else:
status = 'Reboot on hold after Firmware update...'
state = {'restart_reason': "pota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
logger.debug(status)
self._dispatcher_callbacks.broker_core.telemetry(status)
except (DispatcherException, FotaError, UrlSecurityException, ValueError, FileNotFoundError) as e:
error = 'Firmware Update Aborted: ' + str(e)
logger.error(error)
self._dispatcher_callbacks.broker_core.telemetry(error)
return_message = INSTALL_FAILURE
self._repo.delete(self._pkg_filename)
# In POTA, mender file needs to be deleted also.
if hold_reboot:
self._repo.delete_all()
finally:
if return_message == COMMAND_SUCCESS:
status = 'Firmware update in process...'
else:
status = 'Firmware Update Aborted'
dispatcher_state.clear_dispatcher_state()
logger.debug('Firmware update status: ' + status)
self._dispatcher_callbacks.broker_core.telemetry(status)
return return_message
@staticmethod
def _verify_os_supported():
"""checks if the current OS is supported.
@return True if OS is supported; otherwise, false.
@raise ValueError Unsupported OS
"""
logger.debug("")
os_type = platform.system()
logger.debug(f"os_type: {os_type}")
if os_type in OsType.__members__:
return os_type
else:
logger.error("Unsupported OS type.")
raise ValueError('Unsupported OS type.')
def check(self) -> None:
"""validate the manifest before FOTA"""
logger.debug("")
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
factory.create_upgrade_checker().check()
| 42.838384 | 113 | 0.630512 |
import logging
import os
import platform
from threading import Timer
from typing import Any, Optional, Mapping
from future.moves.urllib.parse import urlparse
from inbm_common_lib.exceptions import UrlSecurityException
from inbm_common_lib.utility import canonicalize_uri
from inbm_common_lib.constants import REMOTE_SOURCE
from .constants import *
from .fota_error import FotaError
from .manifest import parse_tool_options, parse_guid, parse_hold_reboot_flag
from .os_factory import OsFactory, OsType
from ..common import dispatcher_state
from ..common.result_constants import *
from ..constants import UMASK_OTA
from ..dispatcher_callbacks import DispatcherCallbacks
from ..dispatcher_exception import DispatcherException
from ..downloader import download
from ..packagemanager.local_repo import DirectoryRepo
logger = logging.getLogger(__name__)
class FOTA:
def __init__(self,
parsed_manifest: Mapping[str, Optional[Any]],
repo_type: str,
dispatcher_callbacks: DispatcherCallbacks) -> None:
logger.debug(f"parsed_manifest: {parsed_manifest}")
self._ota_element = parsed_manifest.get('resource')
logger.debug(f"ota_element: {self._ota_element}")
self._dispatcher_callbacks = dispatcher_callbacks
self._uri: Optional[str] = parsed_manifest['uri']
self._repo_type = repo_type
repo_path: Optional[str]
if self._repo_type == REMOTE_SOURCE:
if not self._uri:
raise FotaError("missing URI.")
else:
self._pkg_filename = os.path.basename(urlparse(self._uri).path)
repo_path = None
else:
if self._ota_element is None or 'path' not in self._ota_element:
raise FotaError('attempting to use local repo for FOTA but no path specified')
self._pkg_filename = os.path.basename(self._ota_element['path'])
path = self._ota_element.get('path', None)
logger.debug(f"path: {path}")
if path is None:
repo_path = None
else:
repo_path = os.path.dirname(path)
logger.debug(f"repo_path: {repo_path}")
self.__signature = parsed_manifest['signature']
self._hash_algorithm = parsed_manifest['hash_algorithm']
self._username = parsed_manifest['username']
self._password = parsed_manifest['password']
if self._dispatcher_callbacks is None:
raise FotaError("dispatcher_callbacks not specified in FOTA constructor")
self._dispatcher_callbacks.broker_core.telemetry("Firmware Update Tool launched")
if repo_path:
logger.debug("Using manifest specified repo path")
self._repo = DirectoryRepo(repo_path)
else:
logger.debug("Using default repo path")
self._repo = DirectoryRepo(CACHE)
def install(self) -> Result:
logger.debug("")
return_message: Result = Result()
hold_reboot = False
try:
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
bios_vendor, platform_product = factory.create_upgrade_checker().check()
if self._repo_type.lower() == REMOTE_SOURCE:
# (instead they have a path)
if self._uri is None:
raise FotaError(
"internal error: _uri uninitialized in Fota.install with download requested in manifest")
uri = canonicalize_uri(self._uri)
download(dispatcher_callbacks=self._dispatcher_callbacks,
uri=uri,
repo=self._repo,
umask=UMASK_OTA,
username=self._username,
password=self._password)
else:
logger.debug("Skipping FOTA upgradable check for local repo")
if self._ota_element is None:
raise FotaError("missing ota_element")
tool_options = parse_tool_options(self._ota_element)
logger.debug(f"tool_options: {tool_options}")
guid = parse_guid(self._ota_element)
logger.debug(f"guid: {guid}")
hold_reboot = parse_hold_reboot_flag(self._ota_element)
logger.debug(f"holdReboot: {hold_reboot}; pkg_filename: {self._pkg_filename}")
factory.create_installer(self._repo, FOTA_CONF_PATH, FOTA_CONF_SCHEMA_LOC).\
install(guid=guid,
tool_options=tool_options,
pkg_filename=self._pkg_filename,
signature=self.__signature,
hash_algorithm=self._hash_algorithm,
bios_vendor=bios_vendor,
platform_product=platform_product)
def trigger_reboot() -> None:
factory.create_rebooter().reboot()
if not hold_reboot:
logger.debug("")
state = {'restart_reason': "fota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
time_to_trigger_reboot = Timer(0.1, trigger_reboot)
time_to_trigger_reboot.start()
return_message = COMMAND_SUCCESS
else:
status = 'Reboot on hold after Firmware update...'
state = {'restart_reason': "pota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
logger.debug(status)
self._dispatcher_callbacks.broker_core.telemetry(status)
except (DispatcherException, FotaError, UrlSecurityException, ValueError, FileNotFoundError) as e:
error = 'Firmware Update Aborted: ' + str(e)
logger.error(error)
self._dispatcher_callbacks.broker_core.telemetry(error)
return_message = INSTALL_FAILURE
self._repo.delete(self._pkg_filename)
# In POTA, mender file needs to be deleted also.
if hold_reboot:
self._repo.delete_all()
finally:
if return_message == COMMAND_SUCCESS:
status = 'Firmware update in process...'
else:
status = 'Firmware Update Aborted'
dispatcher_state.clear_dispatcher_state()
logger.debug('Firmware update status: ' + status)
self._dispatcher_callbacks.broker_core.telemetry(status)
return return_message
@staticmethod
def _verify_os_supported():
logger.debug("")
os_type = platform.system()
logger.debug(f"os_type: {os_type}")
if os_type in OsType.__members__:
return os_type
else:
logger.error("Unsupported OS type.")
raise ValueError('Unsupported OS type.')
def check(self) -> None:
logger.debug("")
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
factory.create_upgrade_checker().check()
| true | true |
1c45bf70eca6a992410fb3243e168ae272e4fd35 | 1,699 | py | Python | coding_interviews/elements_of_programming_interview/delete_duplicates_from_a_sorted_array.py | LeandroTk/Algorithms | 569ed68eba3eeff902f8078992099c28ce4d7cd6 | [
"MIT"
] | 205 | 2018-12-01T17:49:49.000Z | 2021-12-22T07:02:27.000Z | coding_interviews/elements_of_programming_interview/delete_duplicates_from_a_sorted_array.py | LeandroTk/Algorithms | 569ed68eba3eeff902f8078992099c28ce4d7cd6 | [
"MIT"
] | 2 | 2020-01-01T16:34:29.000Z | 2020-04-26T19:11:13.000Z | coding_interviews/elements_of_programming_interview/delete_duplicates_from_a_sorted_array.py | LeandroTk/Algorithms | 569ed68eba3eeff902f8078992099c28ce4d7cd6 | [
"MIT"
] | 50 | 2018-11-28T20:51:36.000Z | 2021-11-29T04:08:25.000Z | # input: [2,3,5,5,7,11,11,11,13]
# output: [2,3,5,7,11,13,0,0,0]
# input: [-2,-2,1]
# output: [-2,1,0]
# input: [0,0,1,1]
# output: [0,1,0,0]
'''
result = []
counter = {}
loop input
is not in the counter
counter[number] << True
result << number
n = len(input) - len(result)
loop n
result << 0
return result
Space: O(2N) = O(N)
Runtime: O(N)
'''
def delete_duplicates(numbers):
if not numbers:
return [[], 0]
result = []
counter_mapper = {}
counter = 0
for number in numbers:
if number not in counter_mapper:
counter_mapper[number] = True
result.append(number)
counter += 1
difference_of_lengths = len(numbers) - len(result)
for _ in range(difference_of_lengths):
result.append(0)
return [result, counter]
def test(input, expect):
print(delete_duplicates(input) == expect)
test([2,3,5,5,7,11,11,11,13], [[2,3,5,7,11,13,0,0,0],6])
test([-2,-2,1], [[-2,1,0],2])
test([0,0,1,1], [[0,1,0,0],2])
test([], [[],0])
def delete_duplicates_2(numbers):
counter = 1
if not numbers:
return [[], 0]
for index in range(1, len(numbers)):
if numbers[index - 1] != numbers[index]:
numbers[counter] = numbers[index]
counter += 1
difference_of_lengths = len(numbers) - counter
for index in range(difference_of_lengths):
numbers[len(numbers) - index - 1] = 0
return [numbers, counter]
def test_2(input, expect):
print(delete_duplicates(input) == expect)
test_2([2,3,5,5,7,11,11,11,13], [[2,3,5,7,11,13,0,0,0],6])
test_2([-2,-2,1], [[-2,1,0],2])
test_2([0,0,1,1], [[0,1,0,0],2])
test_2([], [[],0])
| 20.22619 | 58 | 0.566215 |
def delete_duplicates(numbers):
if not numbers:
return [[], 0]
result = []
counter_mapper = {}
counter = 0
for number in numbers:
if number not in counter_mapper:
counter_mapper[number] = True
result.append(number)
counter += 1
difference_of_lengths = len(numbers) - len(result)
for _ in range(difference_of_lengths):
result.append(0)
return [result, counter]
def test(input, expect):
print(delete_duplicates(input) == expect)
test([2,3,5,5,7,11,11,11,13], [[2,3,5,7,11,13,0,0,0],6])
test([-2,-2,1], [[-2,1,0],2])
test([0,0,1,1], [[0,1,0,0],2])
test([], [[],0])
def delete_duplicates_2(numbers):
counter = 1
if not numbers:
return [[], 0]
for index in range(1, len(numbers)):
if numbers[index - 1] != numbers[index]:
numbers[counter] = numbers[index]
counter += 1
difference_of_lengths = len(numbers) - counter
for index in range(difference_of_lengths):
numbers[len(numbers) - index - 1] = 0
return [numbers, counter]
def test_2(input, expect):
print(delete_duplicates(input) == expect)
test_2([2,3,5,5,7,11,11,11,13], [[2,3,5,7,11,13,0,0,0],6])
test_2([-2,-2,1], [[-2,1,0],2])
test_2([0,0,1,1], [[0,1,0,0],2])
test_2([], [[],0])
| true | true |
1c45bfbfe06e66c030a706f0763fdf1865d626d3 | 1,904 | py | Python | map/views.py | alzseven/djeju | 5aade103dd97999dd7b5f97c461aeccbfb0ea23e | [
"MIT"
] | null | null | null | map/views.py | alzseven/djeju | 5aade103dd97999dd7b5f97c461aeccbfb0ea23e | [
"MIT"
] | 2 | 2021-06-04T23:32:09.000Z | 2021-06-10T19:39:20.000Z | map/views.py | alzseven/djeju | 5aade103dd97999dd7b5f97c461aeccbfb0ea23e | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.template import Context
import json
import requests
from map.models import Hospitals
from django.contrib.gis.geos import fromstr
from django.contrib.gis.db.models.functions import Distance
# Create your views here.
def maskmap(request):
# djangoReq
cur_lat = request.GET.get('lat')
cur_lng = request.GET.get('lng')
lvl = int(request.GET.get("level"))
dis = 0
if(lvl>0 and lvl<5):
dis = 125 * 2**(lvl+1)
elif(lvl>=5):
dis = 5000
# else:
# #invalid value
# dis = 0
apiReqtxt = "lat="+ str(cur_lat) + "&lng=" + str(cur_lng) + "&m=" + str(dis)
url = "https://8oi9s0nnth.apigw.ntruss.com/corona19-masks/v1/storesByGeo/json?" + apiReqtxt
result = requests.get(url).text
data = Context(
{"lat":float(cur_lat),
"lng":float(cur_lng),
"lvl":int(lvl),
"strdata":str(result)
})
return render(request, 'map/maskstore.html', {'strdata':data})
def hospmap(request):
cur_lat = request.GET.get('lat')
cur_lng = request.GET.get('lng')
lvl = int(request.GET.get("level"))
dis = 0
if(lvl>0 and lvl<5):
dis = 250 * 2**(lvl+1)
elif(lvl>=5):
dis = 10000 #TODO:Set New Max
# else:
# #invalid value
# dis = 0
user_location = fromstr(f'POINT({float(cur_lng)} {float(cur_lat)})', srid=4326)
qs = Hospitals.objects.filter(location__distance_lte=(user_location, dis))\
.values('latitude','longtitude','yadmNm','hospTyTpCd','telno','adtFrDd','isReliefhos','isInspect','isTriage')
data = Context(
{"lat":float(cur_lat),
"lng":float(cur_lng),
"lvl":int(lvl),
"hosdata":json.dumps(list(qs), ensure_ascii=False, default=str)
})
#TODO:Filtering at view?
return render(request, 'map/hospital.html', {'data':data})
| 27.594203 | 117 | 0.605567 | from django.shortcuts import render
from django.template import Context
import json
import requests
from map.models import Hospitals
from django.contrib.gis.geos import fromstr
from django.contrib.gis.db.models.functions import Distance
def maskmap(request):
cur_lat = request.GET.get('lat')
cur_lng = request.GET.get('lng')
lvl = int(request.GET.get("level"))
dis = 0
if(lvl>0 and lvl<5):
dis = 125 * 2**(lvl+1)
elif(lvl>=5):
dis = 5000
Reqtxt = "lat="+ str(cur_lat) + "&lng=" + str(cur_lng) + "&m=" + str(dis)
url = "https://8oi9s0nnth.apigw.ntruss.com/corona19-masks/v1/storesByGeo/json?" + apiReqtxt
result = requests.get(url).text
data = Context(
{"lat":float(cur_lat),
"lng":float(cur_lng),
"lvl":int(lvl),
"strdata":str(result)
})
return render(request, 'map/maskstore.html', {'strdata':data})
def hospmap(request):
cur_lat = request.GET.get('lat')
cur_lng = request.GET.get('lng')
lvl = int(request.GET.get("level"))
dis = 0
if(lvl>0 and lvl<5):
dis = 250 * 2**(lvl+1)
elif(lvl>=5):
dis = 10000
r_location = fromstr(f'POINT({float(cur_lng)} {float(cur_lat)})', srid=4326)
qs = Hospitals.objects.filter(location__distance_lte=(user_location, dis))\
.values('latitude','longtitude','yadmNm','hospTyTpCd','telno','adtFrDd','isReliefhos','isInspect','isTriage')
data = Context(
{"lat":float(cur_lat),
"lng":float(cur_lng),
"lvl":int(lvl),
"hosdata":json.dumps(list(qs), ensure_ascii=False, default=str)
})
return render(request, 'map/hospital.html', {'data':data})
| true | true |
1c45c0b5f0f1b4ac58ff0d930371bca1e8a86c2c | 31,428 | py | Python | boto/gs/key.py | dreamhost/boto | 57eaacfc66acd7083641ef504857786a12e330ff | [
"MIT"
] | null | null | null | boto/gs/key.py | dreamhost/boto | 57eaacfc66acd7083641ef504857786a12e330ff | [
"MIT"
] | null | null | null | boto/gs/key.py | dreamhost/boto | 57eaacfc66acd7083641ef504857786a12e330ff | [
"MIT"
] | null | null | null | # Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import base64
import binascii
import os
import re
import StringIO
from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
from boto.s3.keyfile import KeyFile
class Key(S3Key):
"""
Represents a key (object) in a GS bucket.
:ivar bucket: The parent :class:`boto.gs.bucket.Bucket`.
:ivar name: The name of this Key object.
:ivar metadata: A dictionary containing user metadata that you
wish to store with the object or that has been retrieved from
an existing object.
:ivar cache_control: The value of the `Cache-Control` HTTP header.
:ivar content_type: The value of the `Content-Type` HTTP header.
:ivar content_encoding: The value of the `Content-Encoding` HTTP header.
:ivar content_disposition: The value of the `Content-Disposition` HTTP
header.
:ivar content_language: The value of the `Content-Language` HTTP header.
:ivar etag: The `etag` associated with this object.
:ivar last_modified: The string timestamp representing the last
time this object was modified in GS.
:ivar owner: The ID of the owner of this object.
:ivar storage_class: The storage class of the object. Currently, one of:
STANDARD | DURABLE_REDUCED_AVAILABILITY.
:ivar md5: The MD5 hash of the contents of the object.
:ivar size: The size, in bytes, of the object.
:ivar generation: The generation number of the object.
:ivar meta_generation: The generation number of the object metadata.
:ivar encrypted: Whether the object is encrypted while at rest on
the server.
"""
generation = None
meta_generation = None
def endElement(self, name, value, connection):
if name == 'Key':
self.name = value
elif name == 'ETag':
self.etag = value
elif name == 'IsLatest':
if value == 'true':
self.is_latest = True
else:
self.is_latest = False
elif name == 'LastModified':
self.last_modified = value
elif name == 'Size':
self.size = int(value)
elif name == 'StorageClass':
self.storage_class = value
elif name == 'Owner':
pass
elif name == 'VersionId':
self.version_id = value
elif name == 'Generation':
self.generation = value
elif name == 'MetaGeneration':
self.meta_generation = value
else:
setattr(self, name, value)
def handle_version_headers(self, resp, force=False):
self.meta_generation = resp.getheader('x-goog-metageneration', None)
self.generation = resp.getheader('x-goog-generation', None)
def get_file(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None):
query_args = None
if self.generation:
query_args = ['generation=%s' % self.generation]
self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
override_num_retries=override_num_retries,
response_headers=response_headers,
query_args=query_args)
def delete(self):
return self.bucket.delete_key(self.name, version_id=self.version_id,
generation=self.generation)
def add_email_grant(self, permission, email_address):
"""
Convenience method that provides a quick way to add an email grant to a
key. This method retrieves the current ACL, creates a new grant based on
the parameters passed in, adds that grant to the ACL and then PUT's the
new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type email_address: string
:param email_address: The email address associated with the Google
account to which you are granting the permission.
"""
acl = self.get_acl()
acl.add_email_grant(permission, email_address)
self.set_acl(acl)
def add_user_grant(self, permission, user_id):
"""
Convenience method that provides a quick way to add a canonical user
grant to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL and
then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type user_id: string
:param user_id: The canonical user id associated with the GS account to
which you are granting the permission.
"""
acl = self.get_acl()
acl.add_user_grant(permission, user_id)
self.set_acl(acl)
def add_group_email_grant(self, permission, email_address, headers=None):
"""
Convenience method that provides a quick way to add an email group
grant to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL and
then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type email_address: string
:param email_address: The email address associated with the Google
Group to which you are granting the permission.
"""
acl = self.get_acl(headers=headers)
acl.add_group_email_grant(permission, email_address)
self.set_acl(acl, headers=headers)
def add_group_grant(self, permission, group_id):
"""
Convenience method that provides a quick way to add a canonical group
grant to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL and
then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
for more details on permissions.
:type group_id: string
:param group_id: The canonical group id associated with the Google
Groups account you are granting the permission to.
"""
acl = self.get_acl()
acl.add_group_grant(permission, group_id)
self.set_acl(acl)
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
res_upload_handler=None, size=None, rewind=False,
if_generation=None):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file pointed to by 'fp' as the
contents.
:type fp: file
:param fp: the file whose contents are to be uploaded
:type headers: dict
:param headers: additional HTTP headers to be sent with the PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
to see if an object exists in the bucket with the same key. If it
does, it won't overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
total number of bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter, this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be called
during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type md5: A tuple containing the hexdigest version of the MD5 checksum
of the file as the first element and the Base64-encoded version of
the plain checksum as the second element. This is the same format
returned by the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior to
upload, it's silly to have to do it twice so this param, if present,
will be used as the MD5 values of the file. Otherwise, the checksum
will be computed.
:type res_upload_handler: ResumableUploadHandler
:param res_upload_handler: If provided, this handler will perform the
upload.
:type size: int
:param size: (optional) The Maximum number of bytes to read from
the file pointer (fp). This is useful when uploading
a file in multiple parts where you are splitting the
file up into different ranges to be uploaded. If not
specified, the default behaviour is to read all bytes
from the file pointer. Less bytes may be available.
Notes:
1. The "size" parameter currently cannot be used when
a resumable upload handler is given but is still
useful for uploading part of a file as implemented
by the parent class.
2. At present Google Cloud Storage does not support
multipart uploads.
:type rewind: bool
:param rewind: (optional) If True, the file pointer (fp) will be
rewound to the start before any bytes are read from
it. The default behaviour is False which reads from
the current position of the file pointer (fp).
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
:rtype: int
:return: The number of bytes written to the key.
TODO: At some point we should refactor the Bucket and Key classes,
to move functionality common to all providers into a parent class,
and provider-specific functionality into subclasses (rather than
just overriding/sharing code the way it currently works).
"""
provider = self.bucket.connection.provider
if res_upload_handler and size:
# could use size instead of file_length if provided but...
raise BotoClientError('"size" param not supported for resumable uploads.')
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
if rewind:
# caller requests reading from beginning of fp.
fp.seek(0, os.SEEK_SET)
else:
# The following seek/tell/seek logic is intended
# to detect applications using the older interface to
# set_contents_from_file(), which automatically rewound the
# file each time the Key was reused. This changed with commit
# 14ee2d03f4665fe20d19a85286f78d39d924237e, to support uploads
# split into multiple parts and uploaded in parallel, and at
# the time of that commit this check was added because otherwise
# older programs would get a success status and upload an empty
# object. Unfortuantely, it's very inefficient for fp's implemented
# by KeyFile (used, for example, by gsutil when copying between
# providers). So, we skip the check for the KeyFile case.
# TODO: At some point consider removing this seek/tell/seek
# logic, after enough time has passed that it's unlikely any
# programs remain that assume the older auto-rewind interface.
if not isinstance(fp, KeyFile):
spos = fp.tell()
fp.seek(0, os.SEEK_END)
if fp.tell() == spos:
fp.seek(0, os.SEEK_SET)
if fp.tell() != spos:
# Raise an exception as this is likely a programming
# error whereby there is data before the fp but nothing
# after it.
fp.seek(spos)
raise AttributeError('fp is at EOF. Use rewind option '
'or seek() to data start.')
# seek back to the correct position.
fp.seek(spos)
if hasattr(fp, 'name'):
self.path = fp.name
if self.bucket != None:
if isinstance(fp, KeyFile):
# Avoid EOF seek for KeyFile case as it's very inefficient.
key = fp.getkey()
size = key.size - fp.tell()
self.size = size
# At present both GCS and S3 use MD5 for the etag for
# non-multipart-uploaded objects. If the etag is 32 hex
# chars use it as an MD5, to avoid having to read the file
# twice while transferring.
if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)):
etag = key.etag.strip('"')
md5 = (etag, base64.b64encode(binascii.unhexlify(etag)))
if size:
self.size = size
else:
# If md5 is provided, still need to size so
# calculate based on bytes to end of content
spos = fp.tell()
fp.seek(0, os.SEEK_END)
self.size = fp.tell() - spos
fp.seek(spos)
size = self.size
if md5 == None:
md5 = self.compute_md5(fp, size)
self.md5 = md5[0]
self.base64md5 = md5[1]
if self.name == None:
self.name = self.md5
if not replace:
if self.bucket.lookup(self.name):
return
if if_generation is not None:
headers['x-goog-if-generation-match'] = str(if_generation)
if res_upload_handler:
res_upload_handler.send_file(self, fp, headers, cb, num_cb)
else:
# Not a resumable transfer so use basic send_file mechanism.
self.send_file(fp, headers, cb, num_cb, size=size)
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=None,
res_upload_handler=None,
if_generation=None):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file named by 'filename'.
See set_contents_from_file method for details about the
parameters.
:type filename: string
:param filename: The name of the file that you want to put onto GS
:type headers: dict
:param headers: Additional headers to pass along with the request to GS.
:type replace: bool
:param replace: If True, replaces the contents of the file if it
already exists.
:type cb: function
:param cb: (optional) a callback function that will be called to report
progress on the download. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted from GS and the second representing
the total number of bytes that need to be transmitted.
:type cb: int
:param num_cb: (optional) If a callback is specified with the cb
parameter this parameter determines the granularity of the callback
by defining the maximum number of times the callback will be called
during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type md5: A tuple containing the hexdigest version of the MD5 checksum
of the file as the first element and the Base64-encoded version of
the plain checksum as the second element. This is the same format
returned by the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior to
upload, it's silly to have to do it twice so this param, if present,
will be used as the MD5 values of the file. Otherwise, the checksum
will be computed.
:type res_upload_handler: ResumableUploadHandler
:param res_upload_handler: If provided, this handler will perform the
upload.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
"""
# Clear out any previously computed md5 hashes, since we are setting the content.
self.md5 = None
self.base64md5 = None
fp = open(filename, 'rb')
self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, res_upload_handler,
if_generation=if_generation)
fp.close()
def set_contents_from_string(self, s, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
if_generation=None):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the string 's' as the contents.
See set_contents_from_file method for details about the
parameters.
:type headers: dict
:param headers: Additional headers to pass along with the
request to AWS.
:type replace: bool
:param replace: If True, replaces the contents of the file if
it already exists.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept
two integer parameters, the first representing the
number of bytes that have been successfully
transmitted to S3 and the second representing the
size of the to be transmitted object.
:type cb: int
:param num_cb: (optional) If a callback is specified with
the cb parameter this parameter determines the
granularity of the callback by defining
the maximum number of times the callback will
be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the
second element. This is the same format returned by
the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior
to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values
of the file. Otherwise, the checksum will be computed.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
"""
# Clear out any previously computed md5 hashes, since we are setting the content.
self.md5 = None
self.base64md5 = None
if isinstance(s, unicode):
s = s.encode("utf-8")
fp = StringIO.StringIO(s)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5,
if_generation=if_generation)
fp.close()
return r
def set_contents_from_stream(self, *args, **kwargs):
"""
Store an object using the name of the Key object as the key in
cloud and the contents of the data stream pointed to by 'fp' as
the contents.
The stream object is not seekable and total size is not known.
This has the implication that we can't specify the
Content-Size and Content-MD5 in the header. So for huge
uploads, the delay in calculating MD5 is avoided but with a
penalty of inability to verify the integrity of the uploaded
data.
:type fp: file
:param fp: the file whose contents are to be uploaded
:type headers: dict
:param headers: additional HTTP headers to be sent with the
PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
to see if an object exists in the bucket with the same key. If it
does, it won't overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
total number of bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter, this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type size: int
:param size: (optional) The Maximum number of bytes to read from
the file pointer (fp). This is useful when uploading a
file in multiple parts where you are splitting the file up
into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the
object will only be written to if its current generation number is
this value. If set to the value 0, the object will only be written
if it doesn't already exist.
"""
if_generation = kwargs.pop('if_generation', None)
if if_generation is not None:
headers = kwargs.get('headers', {})
headers['x-goog-if-generation-match'] = str(if_generation)
kwargs['headers'] = headers
return super(Key, self).set_contents_from_stream(*args, **kwargs)
def set_acl(self, acl_or_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
"""Sets the ACL for this object.
:type acl_or_str: string or :class:`boto.gs.acl.ACL`
:param acl_or_str: A canned ACL string (see
:data:`~.gs.acl.CannedACLStrings`) or an ACL object.
:type headers: dict
:param headers: Additional headers to set during the request.
:type generation: int
:param generation: If specified, sets the ACL for a specific generation
of a versioned object. If not specified, the current version is
modified.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the acl
will only be updated if its current generation number is this value.
:type if_metageneration: int
:param if_metageneration: (optional) If set to a metageneration number,
the acl will only be updated if its current metageneration number is
this value.
"""
if self.bucket != None:
self.bucket.set_acl(acl_or_str, self.name, headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration)
def get_acl(self, headers=None, generation=None):
"""Returns the ACL of this object.
:param dict headers: Additional headers to set during the request.
:param int generation: If specified, gets the ACL for a specific
generation of a versioned object. If not specified, the current
version is returned.
:rtype: :class:`.gs.acl.ACL`
"""
if self.bucket != None:
return self.bucket.get_acl(self.name, headers=headers,
generation=generation)
def get_xml_acl(self, headers=None, generation=None):
"""Returns the ACL string of this object.
:param dict headers: Additional headers to set during the request.
:param int generation: If specified, gets the ACL for a specific
generation of a versioned object. If not specified, the current
version is returned.
:rtype: str
"""
if self.bucket != None:
return self.bucket.get_xml_acl(self.name, headers=headers,
generation=generation)
def set_xml_acl(self, acl_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
"""Sets this objects's ACL to an XML string.
:type acl_str: string
:param acl_str: A string containing the ACL XML.
:type headers: dict
:param headers: Additional headers to set during the request.
:type generation: int
:param generation: If specified, sets the ACL for a specific generation
of a versioned object. If not specified, the current version is
modified.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the acl
will only be updated if its current generation number is this value.
:type if_metageneration: int
:param if_metageneration: (optional) If set to a metageneration number,
the acl will only be updated if its current metageneration number is
this value.
"""
if self.bucket != None:
return self.bucket.set_xml_acl(acl_str, self.name, headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration)
def set_canned_acl(self, acl_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
"""Sets this objects's ACL using a predefined (canned) value.
:type acl_str: string
:param acl_str: A canned ACL string. See
:data:`~.gs.acl.CannedACLStrings`.
:type headers: dict
:param headers: Additional headers to set during the request.
:type generation: int
:param generation: If specified, sets the ACL for a specific generation
of a versioned object. If not specified, the current version is
modified.
:type if_generation: int
:param if_generation: (optional) If set to a generation number, the acl
will only be updated if its current generation number is this value.
:type if_metageneration: int
:param if_metageneration: (optional) If set to a metageneration number,
the acl will only be updated if its current metageneration number is
this value.
"""
if self.bucket != None:
return self.bucket.set_canned_acl(
acl_str,
self.name,
headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration
)
| 45.220144 | 91 | 0.616329 |
import base64
import binascii
import os
import re
import StringIO
from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
from boto.s3.keyfile import KeyFile
class Key(S3Key):
generation = None
meta_generation = None
def endElement(self, name, value, connection):
if name == 'Key':
self.name = value
elif name == 'ETag':
self.etag = value
elif name == 'IsLatest':
if value == 'true':
self.is_latest = True
else:
self.is_latest = False
elif name == 'LastModified':
self.last_modified = value
elif name == 'Size':
self.size = int(value)
elif name == 'StorageClass':
self.storage_class = value
elif name == 'Owner':
pass
elif name == 'VersionId':
self.version_id = value
elif name == 'Generation':
self.generation = value
elif name == 'MetaGeneration':
self.meta_generation = value
else:
setattr(self, name, value)
def handle_version_headers(self, resp, force=False):
self.meta_generation = resp.getheader('x-goog-metageneration', None)
self.generation = resp.getheader('x-goog-generation', None)
def get_file(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None):
query_args = None
if self.generation:
query_args = ['generation=%s' % self.generation]
self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
override_num_retries=override_num_retries,
response_headers=response_headers,
query_args=query_args)
def delete(self):
return self.bucket.delete_key(self.name, version_id=self.version_id,
generation=self.generation)
def add_email_grant(self, permission, email_address):
acl = self.get_acl()
acl.add_email_grant(permission, email_address)
self.set_acl(acl)
def add_user_grant(self, permission, user_id):
acl = self.get_acl()
acl.add_user_grant(permission, user_id)
self.set_acl(acl)
def add_group_email_grant(self, permission, email_address, headers=None):
acl = self.get_acl(headers=headers)
acl.add_group_email_grant(permission, email_address)
self.set_acl(acl, headers=headers)
def add_group_grant(self, permission, group_id):
acl = self.get_acl()
acl.add_group_grant(permission, group_id)
self.set_acl(acl)
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
res_upload_handler=None, size=None, rewind=False,
if_generation=None):
provider = self.bucket.connection.provider
if res_upload_handler and size:
raise BotoClientError('"size" param not supported for resumable uploads.')
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
if rewind:
fp.seek(0, os.SEEK_SET)
else:
# programs remain that assume the older auto-rewind interface.
if not isinstance(fp, KeyFile):
spos = fp.tell()
fp.seek(0, os.SEEK_END)
if fp.tell() == spos:
fp.seek(0, os.SEEK_SET)
if fp.tell() != spos:
# Raise an exception as this is likely a programming
# error whereby there is data before the fp but nothing
# after it.
fp.seek(spos)
raise AttributeError('fp is at EOF. Use rewind option '
'or seek() to data start.')
# seek back to the correct position.
fp.seek(spos)
if hasattr(fp, 'name'):
self.path = fp.name
if self.bucket != None:
if isinstance(fp, KeyFile):
# Avoid EOF seek for KeyFile case as it's very inefficient.
key = fp.getkey()
size = key.size - fp.tell()
self.size = size
if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)):
etag = key.etag.strip('"')
md5 = (etag, base64.b64encode(binascii.unhexlify(etag)))
if size:
self.size = size
else:
# If md5 is provided, still need to size so
# calculate based on bytes to end of content
spos = fp.tell()
fp.seek(0, os.SEEK_END)
self.size = fp.tell() - spos
fp.seek(spos)
size = self.size
if md5 == None:
md5 = self.compute_md5(fp, size)
self.md5 = md5[0]
self.base64md5 = md5[1]
if self.name == None:
self.name = self.md5
if not replace:
if self.bucket.lookup(self.name):
return
if if_generation is not None:
headers['x-goog-if-generation-match'] = str(if_generation)
if res_upload_handler:
res_upload_handler.send_file(self, fp, headers, cb, num_cb)
else:
# Not a resumable transfer so use basic send_file mechanism.
self.send_file(fp, headers, cb, num_cb, size=size)
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=None,
res_upload_handler=None,
if_generation=None):
# Clear out any previously computed md5 hashes, since we are setting the content.
self.md5 = None
self.base64md5 = None
fp = open(filename, 'rb')
self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, res_upload_handler,
if_generation=if_generation)
fp.close()
def set_contents_from_string(self, s, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
if_generation=None):
# Clear out any previously computed md5 hashes, since we are setting the content.
self.md5 = None
self.base64md5 = None
if isinstance(s, unicode):
s = s.encode("utf-8")
fp = StringIO.StringIO(s)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5,
if_generation=if_generation)
fp.close()
return r
def set_contents_from_stream(self, *args, **kwargs):
if_generation = kwargs.pop('if_generation', None)
if if_generation is not None:
headers = kwargs.get('headers', {})
headers['x-goog-if-generation-match'] = str(if_generation)
kwargs['headers'] = headers
return super(Key, self).set_contents_from_stream(*args, **kwargs)
def set_acl(self, acl_or_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
if self.bucket != None:
self.bucket.set_acl(acl_or_str, self.name, headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration)
def get_acl(self, headers=None, generation=None):
if self.bucket != None:
return self.bucket.get_acl(self.name, headers=headers,
generation=generation)
def get_xml_acl(self, headers=None, generation=None):
if self.bucket != None:
return self.bucket.get_xml_acl(self.name, headers=headers,
generation=generation)
def set_xml_acl(self, acl_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
if self.bucket != None:
return self.bucket.set_xml_acl(acl_str, self.name, headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration)
def set_canned_acl(self, acl_str, headers=None, generation=None,
if_generation=None, if_metageneration=None):
if self.bucket != None:
return self.bucket.set_canned_acl(
acl_str,
self.name,
headers=headers,
generation=generation,
if_generation=if_generation,
if_metageneration=if_metageneration
)
| true | true |
1c45c0eac73d31615a106f4522042ae688360bab | 2,494 | py | Python | forum/models.py | boxed/forum | abb3699d310bf3a404f031a3cb0e4bdbf403da5a | [
"BSD-3-Clause"
] | 2 | 2019-06-28T16:30:44.000Z | 2020-12-28T01:46:52.000Z | forum/models.py | boxed/forum | abb3699d310bf3a404f031a3cb0e4bdbf403da5a | [
"BSD-3-Clause"
] | 14 | 2019-02-26T17:25:54.000Z | 2019-04-03T18:11:24.000Z | forum/models.py | boxed/forum | abb3699d310bf3a404f031a3cb0e4bdbf403da5a | [
"BSD-3-Clause"
] | 1 | 2019-06-14T14:21:47.000Z | 2019-06-14T14:21:47.000Z | from hashlib import md5
from django.contrib.auth.models import User
from django.core import validators
from django.db import models
from iommi import register_factory
from unread.models import UnreadModel
class Model(models.Model):
def __repr__(self):
return f'{type(self)} {self.pk}:{self}'
class Meta:
abstract = True
def get_unread_identifier(self):
return f'wiki/context/{self._meta.verbose_name}:{self.pk}'
class Room(Model):
name = models.CharField(max_length=255)
description = models.TextField(blank=True)
custom_data = models.CharField(max_length=1024, db_index=True, null=True, blank=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return f'/rooms/{self.pk}/'
def get_unread_id(self):
return f'forum/room:{self.pk}'
class BinaryField(models.Field):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.validators.append(validators.MaxLengthValidator(self.max_length * 2))
def db_type(self, connection):
assert connection.settings_dict['ENGINE'] == 'django.db.backends.mysql', 'VARBINARY is mysql only'
return f'varbinary({str(self.max_length)})'
register_factory(BinaryField, factory=None)
class Message(UnreadModel):
room = models.ForeignKey(Room, on_delete=models.PROTECT, related_name='messages')
text = models.TextField(blank=True, null=True)
parent = models.ForeignKey('self', on_delete=models.PROTECT, null=True, blank=True, related_name='replies')
path = BinaryField(max_length=1000, db_index=True, null=True)
visible = models.BooleanField(default=True)
user = models.ForeignKey(User, on_delete=models.PROTECT, related_name='messages')
has_replies = models.BooleanField(default=False)
custom_data = models.CharField(max_length=1024, db_index=True, null=True, blank=True)
def __repr__(self):
return f'<Message: {self.pk}>'
def get_absolute_url(self):
return f'/rooms/{self.room.pk}/message/{self.pk}/'
class Meta:
ordering = ('path',)
@property
def indent(self):
return (len(self.path) // 8) - 1
@property
def indent_rem(self):
return self.indent * 2 + 3
@property
def gravatar_url(self):
return f'https://www.gravatar.com/avatar/{md5(self.user.email.encode()).hexdigest()}?d=identicon'
def bytes_from_int(i):
return i.to_bytes(length=64 // 8, byteorder='big')
| 29 | 111 | 0.690056 | from hashlib import md5
from django.contrib.auth.models import User
from django.core import validators
from django.db import models
from iommi import register_factory
from unread.models import UnreadModel
class Model(models.Model):
def __repr__(self):
return f'{type(self)} {self.pk}:{self}'
class Meta:
abstract = True
def get_unread_identifier(self):
return f'wiki/context/{self._meta.verbose_name}:{self.pk}'
class Room(Model):
name = models.CharField(max_length=255)
description = models.TextField(blank=True)
custom_data = models.CharField(max_length=1024, db_index=True, null=True, blank=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return f'/rooms/{self.pk}/'
def get_unread_id(self):
return f'forum/room:{self.pk}'
class BinaryField(models.Field):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.validators.append(validators.MaxLengthValidator(self.max_length * 2))
def db_type(self, connection):
assert connection.settings_dict['ENGINE'] == 'django.db.backends.mysql', 'VARBINARY is mysql only'
return f'varbinary({str(self.max_length)})'
register_factory(BinaryField, factory=None)
class Message(UnreadModel):
room = models.ForeignKey(Room, on_delete=models.PROTECT, related_name='messages')
text = models.TextField(blank=True, null=True)
parent = models.ForeignKey('self', on_delete=models.PROTECT, null=True, blank=True, related_name='replies')
path = BinaryField(max_length=1000, db_index=True, null=True)
visible = models.BooleanField(default=True)
user = models.ForeignKey(User, on_delete=models.PROTECT, related_name='messages')
has_replies = models.BooleanField(default=False)
custom_data = models.CharField(max_length=1024, db_index=True, null=True, blank=True)
def __repr__(self):
return f'<Message: {self.pk}>'
def get_absolute_url(self):
return f'/rooms/{self.room.pk}/message/{self.pk}/'
class Meta:
ordering = ('path',)
@property
def indent(self):
return (len(self.path) // 8) - 1
@property
def indent_rem(self):
return self.indent * 2 + 3
@property
def gravatar_url(self):
return f'https://www.gravatar.com/avatar/{md5(self.user.email.encode()).hexdigest()}?d=identicon'
def bytes_from_int(i):
return i.to_bytes(length=64 // 8, byteorder='big')
| true | true |
1c45c15f9e69201656c5a6fd742639e0189553ed | 15,419 | py | Python | sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/aio/operations/_keys_operations.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 2 | 2019-08-23T21:14:00.000Z | 2021-09-07T18:32:34.000Z | sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/aio/operations/_keys_operations.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 2 | 2021-11-03T06:10:36.000Z | 2021-12-01T06:29:39.000Z | sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/aio/operations/_keys_operations.py | mohamedshabanofficial/azure-sdk-for-python | 81c585f310cd2ec23d2ad145173958914a075a58 | [
"MIT"
] | 1 | 2021-05-19T02:55:10.000Z | 2021-05-19T02:55:10.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class KeysOperations:
"""KeysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_workspace(
self,
resource_group_name: str,
workspace_name: str,
**kwargs
) -> AsyncIterable["_models.KeyInfoListResult"]:
"""Returns a list of keys in a workspace.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either KeyInfoListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.synapse.models.KeyInfoListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.KeyInfoListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_workspace.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('KeyInfoListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorContract, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys'} # type: ignore
async def get(
self,
resource_group_name: str,
workspace_name: str,
key_name: str,
**kwargs
) -> "_models.Key":
"""Gets a workspace key.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param key_name: The name of the workspace key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Key, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.Key
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Key"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys/{keyName}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
workspace_name: str,
key_name: str,
key_properties: "_models.Key",
**kwargs
) -> "_models.Key":
"""Creates or updates a workspace key.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param key_name: The name of the workspace key.
:type key_name: str
:param key_properties: Key put request properties.
:type key_properties: ~azure.mgmt.synapse.models.Key
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Key, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.Key
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Key"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(key_properties, 'Key')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys/{keyName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
workspace_name: str,
key_name: str,
**kwargs
) -> Optional["_models.Key"]:
"""Deletes a workspace key.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param key_name: The name of the workspace key.
:type key_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Key, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.Key or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Key"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys/{keyName}'} # type: ignore
| 48.640379 | 195 | 0.660938 |
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class KeysOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_workspace(
self,
resource_group_name: str,
workspace_name: str,
**kwargs
) -> AsyncIterable["_models.KeyInfoListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list_by_workspace.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('KeyInfoListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorContract, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys'}
async def get(
self,
resource_group_name: str,
workspace_name: str,
key_name: str,
**kwargs
) -> "_models.Key":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys/{keyName}'}
async def create_or_update(
self,
resource_group_name: str,
workspace_name: str,
key_name: str,
key_properties: "_models.Key",
**kwargs
) -> "_models.Key":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.create_or_update.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(key_properties, 'Key')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys/{keyName}'}
async def delete(
self,
resource_group_name: str,
workspace_name: str,
key_name: str,
**kwargs
) -> Optional["_models.Key"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01-preview"
accept = "application/json"
url = self.delete.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'keyName': self._serialize.url("key_name", key_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorContract, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Key', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/keys/{keyName}'}
| true | true |
1c45c21bce32039850b2b214ced69db7934f7418 | 5,134 | py | Python | samples/demo.py | siyuan-song/Container | 42313132af32f2edf710643b9ceb8ca84693ba5c | [
"MIT"
] | 2 | 2020-07-17T02:24:00.000Z | 2020-07-17T21:14:45.000Z | samples/demo.py | siyuan-song/Container | 42313132af32f2edf710643b9ceb8ca84693ba5c | [
"MIT"
] | null | null | null | samples/demo.py | siyuan-song/Container | 42313132af32f2edf710643b9ceb8ca84693ba5c | [
"MIT"
] | null | null | null |
# coding: utf-8
# # Mask R-CNN Demo
#
# A quick intro to using the pre-trained model to detect and segment objects.
# In[1]:
import os
import sys
import random
import math
import numpy as np
import skimage.io
import matplotlib
import matplotlib.pyplot as plt
# Root directory of the project
ROOT_DIR = os.path.abspath("../")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn import utils
import mrcnn.model as modellib
from mrcnn import visualize
# Import COCO config
sys.path.append(os.path.join(ROOT_DIR, "samples/container/")) # To find local version
import container
#get_ipython().run_line_magic('matplotlib', 'inline')
# Directory to save logs and trained model
MODEL_DIR = os.path.join(ROOT_DIR, "logs")
# Local path to trained weights file
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "logs/container20200717T1153/mask_rcnn_container_0030.h5")
# Download COCO trained weights from Releases if needed
if not os.path.exists(COCO_MODEL_PATH):
utils.download_trained_weights(COCO_MODEL_PATH)
# Directory of images to run detection on
IMAGE_DIR = os.path.join(ROOT_DIR, "samples/container/dataset/val")
# ## Configurations
#
# We'll be using a model trained on the MS-COCO dataset. The configurations of this model are in the ```CocoConfig``` class in ```coco.py```.
#
# For inferencing, modify the configurations a bit to fit the task. To do so, sub-class the ```CocoConfig``` class and override the attributes you need to change.
# In[2]:
class InferenceConfig(container.ContainerConfig):
# Set batch size to 1 since we'll be running inference on
# one image at a time. Batch size = GPU_COUNT * IMAGES_PER_GPU
GPU_COUNT = 1
IMAGES_PER_GPU = 1
config = InferenceConfig()
config.display()
# ## Create Model and Load Trained Weights
# In[3]:
# Create model object in inference mode.
model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=config)
# Load weights trained on MS-COCO
model.load_weights(COCO_MODEL_PATH, by_name=True)
# ## Class Names
#
# The model classifies objects and returns class IDs, which are integer value that identify each class. Some datasets assign integer values to their classes and some don't. For example, in the MS-COCO dataset, the 'person' class is 1 and 'teddy bear' is 88. The IDs are often sequential, but not always. The COCO dataset, for example, has classes associated with class IDs 70 and 72, but not 71.
#
# To improve consistency, and to support training on data from multiple sources at the same time, our ```Dataset``` class assigns it's own sequential integer IDs to each class. For example, if you load the COCO dataset using our ```Dataset``` class, the 'person' class would get class ID = 1 (just like COCO) and the 'teddy bear' class is 78 (different from COCO). Keep that in mind when mapping class IDs to class names.
#
# To get the list of class names, you'd load the dataset and then use the ```class_names``` property like this.
# ```
# # Load COCO dataset
# dataset = coco.CocoDataset()
# dataset.load_coco(COCO_DIR, "train")
# dataset.prepare()
#
# # Print class names
# print(dataset.class_names)
# ```
#
# We don't want to require you to download the COCO dataset just to run this demo, so we're including the list of class names below. The index of the class name in the list represent its ID (first class is 0, second is 1, third is 2, ...etc.)
# In[4]:
# COCO Class names
# Index of the class in the list is its ID. For example, to get ID of
# the teddy bear class, use: class_names.index('teddy bear')
class_names = ['BG','Cola Bottle','Fanta Bottle','Cherry Coke Bottle','Coke Zero Bottle','Mtn Dew Bottle','Cola Can','Fanta Can']
# ## Run Object Detection
# In[5]:
# Load a random image from the images folder
##file_names = next(os.walk(IMAGE_DIR))[2]
##image = skimage.io.imread(os.path.join(IMAGE_DIR, random.choice(file_names)))
test_image = skimage.io.imread(os.path.join(IMAGE_DIR,'Image0170.png'))
test_image = image[:,:,:3]
# Run detection
results = model.detect([test_image], verbose=1)
# Visualize results
r = results[0]
visualize.display_instances(test_image, r['rois'], r['masks'], r['class_ids'],
class_names, r['scores'])
# Evaluation
# Compute VOC-Style mAP @ IoU=0.5
# Running on 40 images. Increase for better accuracy.
from container import ContainerDataset
dataset_val = ContainerDataset()
dataset_val.load_container(os.path.join(ROOT_DIR, "samples/container/dataset"), "val")
dataset_val.prepare()
image_ids = np.random.choice(dataset_val.image_ids, 40)
APs = []
for image_id in image_ids:
# Load image and ground truth data
image, image_meta, gt_class_id, gt_bbox, gt_mask = modellib.load_image_gt(dataset_val, config, image_id, use_mini_mask=False)
image = image[:,:,:3]
# Run object detection
results = model.detect([image], verbose=0)
r = results[0]
# Compute AP
AP, precisions, recalls, overlaps = utils.compute_ap(gt_bbox, gt_class_id, gt_mask, r["rois"], r["class_ids"], r["scores"], r['masks'])
APs.append(AP)
print("mAP: ", np.mean(APs))
| 34.456376 | 421 | 0.730619 |
import sys
import random
import math
import numpy as np
import skimage.io
import matplotlib
import matplotlib.pyplot as plt
ROOT_DIR = os.path.abspath("../")
sys.path.append(ROOT_DIR)
from mrcnn import utils
import mrcnn.model as modellib
from mrcnn import visualize
sys.path.append(os.path.join(ROOT_DIR, "samples/container/"))
import container
MODEL_DIR = os.path.join(ROOT_DIR, "logs")
COCO_MODEL_PATH = os.path.join(ROOT_DIR, "logs/container20200717T1153/mask_rcnn_container_0030.h5")
if not os.path.exists(COCO_MODEL_PATH):
utils.download_trained_weights(COCO_MODEL_PATH)
IMAGE_DIR = os.path.join(ROOT_DIR, "samples/container/dataset/val")
he configurations a bit to fit the task. To do so, sub-class the ```CocoConfig``` class and override the attributes you need to change.
# In[2]:
class InferenceConfig(container.ContainerConfig):
# Set batch size to 1 since we'll be running inference on
GPU_COUNT = 1
IMAGES_PER_GPU = 1
config = InferenceConfig()
config.display()
onfig)
model.load_weights(COCO_MODEL_PATH, by_name=True)
ncy, and to support training on data from multiple sources at the same time, our ```Dataset``` class assigns it's own sequential integer IDs to each class. For example, if you load the COCO dataset using our ```Dataset``` class, the 'person' class would get class ID = 1 (just like COCO) and the 'teddy bear' class is 78 (different from COCO). Keep that in mind when mapping class IDs to class names.
# ```
# # Load COCO dataset
# dataset = coco.CocoDataset()
# dataset.load_coco(COCO_DIR, "train")
# dataset.prepare()
#
# # Print class names
# print(dataset.class_names)
# ```
#
# We don't want to require you to download the COCO dataset just to run this demo, so we're including the list of class names below. The index of the class name in the list represent its ID (first class is 0, second is 1, third is 2, ...etc.)
# In[4]:
# COCO Class names
# Index of the class in the list is its ID. For example, to get ID of
# the teddy bear class, use: class_names.index('teddy bear')
class_names = ['BG','Cola Bottle','Fanta Bottle','Cherry Coke Bottle','Coke Zero Bottle','Mtn Dew Bottle','Cola Can','Fanta Can']
# ## Run Object Detection
# In[5]:
# Load a random image from the images folder
##file_names = next(os.walk(IMAGE_DIR))[2]
##image = skimage.io.imread(os.path.join(IMAGE_DIR, random.choice(file_names)))
test_image = skimage.io.imread(os.path.join(IMAGE_DIR,'Image0170.png'))
test_image = image[:,:,:3]
# Run detection
results = model.detect([test_image], verbose=1)
# Visualize results
r = results[0]
visualize.display_instances(test_image, r['rois'], r['masks'], r['class_ids'],
class_names, r['scores'])
# Evaluation
# Compute VOC-Style mAP @ IoU=0.5
# Running on 40 images. Increase for better accuracy.
from container import ContainerDataset
dataset_val = ContainerDataset()
dataset_val.load_container(os.path.join(ROOT_DIR, "samples/container/dataset"), "val")
dataset_val.prepare()
image_ids = np.random.choice(dataset_val.image_ids, 40)
APs = []
for image_id in image_ids:
# Load image and ground truth data
image, image_meta, gt_class_id, gt_bbox, gt_mask = modellib.load_image_gt(dataset_val, config, image_id, use_mini_mask=False)
image = image[:,:,:3]
# Run object detection
results = model.detect([image], verbose=0)
r = results[0]
# Compute AP
AP, precisions, recalls, overlaps = utils.compute_ap(gt_bbox, gt_class_id, gt_mask, r["rois"], r["class_ids"], r["scores"], r['masks'])
APs.append(AP)
print("mAP: ", np.mean(APs))
| true | true |
1c45c26bdf9dbb63739a39e2d750920b7e4c23b2 | 1,886 | py | Python | terrascript/heroku/r.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 507 | 2017-07-26T02:58:38.000Z | 2022-01-21T12:35:13.000Z | terrascript/heroku/r.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 135 | 2017-07-20T12:01:59.000Z | 2021-10-04T22:25:40.000Z | terrascript/heroku/r.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 81 | 2018-02-20T17:55:28.000Z | 2022-01-31T07:08:40.000Z | # terrascript/heroku/r.py
# Automatically generated by tools/makecode.py ()
import warnings
warnings.warn(
"using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2
)
import terrascript
class heroku_account_feature(terrascript.Resource):
pass
class heroku_addon(terrascript.Resource):
pass
class heroku_addon_attachment(terrascript.Resource):
pass
class heroku_app(terrascript.Resource):
pass
class heroku_app_config_association(terrascript.Resource):
pass
class heroku_app_feature(terrascript.Resource):
pass
class heroku_app_release(terrascript.Resource):
pass
class heroku_app_webhook(terrascript.Resource):
pass
class heroku_build(terrascript.Resource):
pass
class heroku_cert(terrascript.Resource):
pass
class heroku_collaborator(terrascript.Resource):
pass
class heroku_config(terrascript.Resource):
pass
class heroku_domain(terrascript.Resource):
pass
class heroku_drain(terrascript.Resource):
pass
class heroku_formation(terrascript.Resource):
pass
class heroku_pipeline(terrascript.Resource):
pass
class heroku_pipeline_config_var(terrascript.Resource):
pass
class heroku_pipeline_coupling(terrascript.Resource):
pass
class heroku_review_app_config(terrascript.Resource):
pass
class heroku_slug(terrascript.Resource):
pass
class heroku_space(terrascript.Resource):
pass
class heroku_space_app_access(terrascript.Resource):
pass
class heroku_space_inbound_ruleset(terrascript.Resource):
pass
class heroku_space_peering_connection_accepter(terrascript.Resource):
pass
class heroku_space_vpn_connection(terrascript.Resource):
pass
class heroku_ssl(terrascript.Resource):
pass
class heroku_team_collaborator(terrascript.Resource):
pass
class heroku_team_member(terrascript.Resource):
pass
| 15.459016 | 79 | 0.782078 |
import warnings
warnings.warn(
"using the 'legacy layout' is deprecated", DeprecationWarning, stacklevel=2
)
import terrascript
class heroku_account_feature(terrascript.Resource):
pass
class heroku_addon(terrascript.Resource):
pass
class heroku_addon_attachment(terrascript.Resource):
pass
class heroku_app(terrascript.Resource):
pass
class heroku_app_config_association(terrascript.Resource):
pass
class heroku_app_feature(terrascript.Resource):
pass
class heroku_app_release(terrascript.Resource):
pass
class heroku_app_webhook(terrascript.Resource):
pass
class heroku_build(terrascript.Resource):
pass
class heroku_cert(terrascript.Resource):
pass
class heroku_collaborator(terrascript.Resource):
pass
class heroku_config(terrascript.Resource):
pass
class heroku_domain(terrascript.Resource):
pass
class heroku_drain(terrascript.Resource):
pass
class heroku_formation(terrascript.Resource):
pass
class heroku_pipeline(terrascript.Resource):
pass
class heroku_pipeline_config_var(terrascript.Resource):
pass
class heroku_pipeline_coupling(terrascript.Resource):
pass
class heroku_review_app_config(terrascript.Resource):
pass
class heroku_slug(terrascript.Resource):
pass
class heroku_space(terrascript.Resource):
pass
class heroku_space_app_access(terrascript.Resource):
pass
class heroku_space_inbound_ruleset(terrascript.Resource):
pass
class heroku_space_peering_connection_accepter(terrascript.Resource):
pass
class heroku_space_vpn_connection(terrascript.Resource):
pass
class heroku_ssl(terrascript.Resource):
pass
class heroku_team_collaborator(terrascript.Resource):
pass
class heroku_team_member(terrascript.Resource):
pass
| true | true |
1c45c283a6f9e86c7a63de4447cd5512835f3ca4 | 2,594 | py | Python | build/scripts/gen_linkkit_sdk.py | jinlongliu/AliOS-Things | ce051172a775f987183e7aca88bb6f3b809ea7b0 | [
"Apache-2.0"
] | 92 | 2020-02-25T11:16:15.000Z | 2021-09-20T14:45:49.000Z | build/scripts/gen_linkkit_sdk.py | IamBaoMouMou/AliOS-Things | 195a9160b871b3d78de6f8cf6c2ab09a71977527 | [
"Apache-2.0"
] | 12 | 2020-02-28T03:51:00.000Z | 2020-08-05T09:38:54.000Z | build/scripts/gen_linkkit_sdk.py | IamBaoMouMou/AliOS-Things | 195a9160b871b3d78de6f8cf6c2ab09a71977527 | [
"Apache-2.0"
] | 45 | 2020-02-26T04:31:08.000Z | 2021-10-09T17:17:23.000Z | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import sys, os
import shutil
SDKNAME = "linkkit-sdk-c"
compdirs = [
['middleware/linkkit/sdk-c', ''],
['middleware/uagent/uota', 'src/services/uota'],
['utility/libc/compilers/armlibc', 'src/infra/stdc/armlibc'],
['utility/libc/compilers/iar', 'src/infra/stdc/iarlibc'],
]
# Example file list
examples = ['linkkitapp', 'linkkit_gateway', 'mqttapp', 'coapapp']
example_files = []
for example in examples:
if example.startswith('linkkit'):
dist_example_dir = "linkkit"
elif example.startswith('mqtt'):
dist_example_dir = "mqtt"
elif example.startswith('coap'):
dist_example_dir = "coap"
for filename in os.listdir('app/example/' + example):
if (filename.find('example') != -1 and filename.endswith('.c')):
src = os.path.join('app/example', example, filename)
dest = os.path.join('examples', dist_example_dir, filename)
example_files += [[src, dest]]
elif filename == "newapi" or filename == "data":
for filename2 in os.listdir('app/example/' + example +"/" +filename):
src = os.path.join('app/example', example, filename, filename2)
dest = os.path.join('examples', dist_example_dir, filename, filename2)
example_files += [[src, dest]]
def main(argv):
source_dir = os.path.abspath(sys.argv[1])
build_dir = os.path.abspath(sys.argv[2])
sdk_dir = os.path.join(build_dir, SDKNAME)
if os.path.isdir(sdk_dir):
print("[INFO]: Removing existing %s" % sdk_dir)
shutil.rmtree(sdk_dir)
# Copy components to linkkit sdk dir
for src, dest in compdirs + example_files:
tmp_src = os.path.join(source_dir, src)
if dest:
tmp_dest = os.path.join(build_dir, SDKNAME, dest)
else:
tmp_dest = os.path.join(build_dir, SDKNAME)
dest_dir = os.path.dirname(tmp_dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
print("[INFO]: Copy %s to %s" % (tmp_src, tmp_dest))
if os.path.isdir(tmp_src):
shutil.copytree(tmp_src, tmp_dest)
else:
shutil.copyfile(tmp_src, tmp_dest)
# Generate tarball
root_dir = build_dir
base_dir = SDKNAME
archive_name = sdk_dir
archive_format = "gztar"
print("[INFO]: Creating tarball ...")
tarball = shutil.make_archive(archive_name, archive_format, root_dir, base_dir)
print("[INFO]: Linkkit SDK exported at: %s" % tarball)
if __name__ == "__main__":
main(sys.argv)
| 33.25641 | 86 | 0.623362 |
import sys, os
import shutil
SDKNAME = "linkkit-sdk-c"
compdirs = [
['middleware/linkkit/sdk-c', ''],
['middleware/uagent/uota', 'src/services/uota'],
['utility/libc/compilers/armlibc', 'src/infra/stdc/armlibc'],
['utility/libc/compilers/iar', 'src/infra/stdc/iarlibc'],
]
examples = ['linkkitapp', 'linkkit_gateway', 'mqttapp', 'coapapp']
example_files = []
for example in examples:
if example.startswith('linkkit'):
dist_example_dir = "linkkit"
elif example.startswith('mqtt'):
dist_example_dir = "mqtt"
elif example.startswith('coap'):
dist_example_dir = "coap"
for filename in os.listdir('app/example/' + example):
if (filename.find('example') != -1 and filename.endswith('.c')):
src = os.path.join('app/example', example, filename)
dest = os.path.join('examples', dist_example_dir, filename)
example_files += [[src, dest]]
elif filename == "newapi" or filename == "data":
for filename2 in os.listdir('app/example/' + example +"/" +filename):
src = os.path.join('app/example', example, filename, filename2)
dest = os.path.join('examples', dist_example_dir, filename, filename2)
example_files += [[src, dest]]
def main(argv):
source_dir = os.path.abspath(sys.argv[1])
build_dir = os.path.abspath(sys.argv[2])
sdk_dir = os.path.join(build_dir, SDKNAME)
if os.path.isdir(sdk_dir):
print("[INFO]: Removing existing %s" % sdk_dir)
shutil.rmtree(sdk_dir)
for src, dest in compdirs + example_files:
tmp_src = os.path.join(source_dir, src)
if dest:
tmp_dest = os.path.join(build_dir, SDKNAME, dest)
else:
tmp_dest = os.path.join(build_dir, SDKNAME)
dest_dir = os.path.dirname(tmp_dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
print("[INFO]: Copy %s to %s" % (tmp_src, tmp_dest))
if os.path.isdir(tmp_src):
shutil.copytree(tmp_src, tmp_dest)
else:
shutil.copyfile(tmp_src, tmp_dest)
root_dir = build_dir
base_dir = SDKNAME
archive_name = sdk_dir
archive_format = "gztar"
print("[INFO]: Creating tarball ...")
tarball = shutil.make_archive(archive_name, archive_format, root_dir, base_dir)
print("[INFO]: Linkkit SDK exported at: %s" % tarball)
if __name__ == "__main__":
main(sys.argv)
| false | true |
1c45c374300575c38d0712283bbc628b33dfa7e8 | 20,243 | py | Python | save/tokyo202112_MemGCRN_c1to1_20220208115005_time/traintest_MemGCRN.py | deepkashiwa20/TrafficAccident | c5fb26106137a4e85e5b5aa1e8ffdbb672a61988 | [
"MIT"
] | null | null | null | save/tokyo202112_MemGCRN_c1to1_20220208115005_time/traintest_MemGCRN.py | deepkashiwa20/TrafficAccident | c5fb26106137a4e85e5b5aa1e8ffdbb672a61988 | [
"MIT"
] | null | null | null | save/tokyo202112_MemGCRN_c1to1_20220208115005_time/traintest_MemGCRN.py | deepkashiwa20/TrafficAccident | c5fb26106137a4e85e5b5aa1e8ffdbb672a61988 | [
"MIT"
] | null | null | null | import sys
import os
import shutil
import math
import numpy as np
import pandas as pd
import scipy.sparse as ss
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from datetime import datetime
import time
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
from torchsummary import summary
import argparse
from configparser import ConfigParser
import logging
import Metrics
from MemGCRN import MemGCRN
from Utils import *
def refineXSYS(XS, YS):
assert opt.time or opt.history, 'it should have one covariate time or history'
XCov, YCov = XS[..., -1:], YS[..., -1:]
XS, YS = XS[:, :, :, :opt.channelin], YS[:, :, :, :opt.channelout]
return XS, YS, XCov, YCov
def print_params(model):
# print trainable params
param_count = 0
logger.info('Trainable parameter list:')
for name, param in model.named_parameters():
if param.requires_grad:
print(name, param.shape, param.numel())
param_count += param.numel()
logger.info(f'\n In total: {param_count} trainable parameters. \n')
return
def getModel(mode):
model = MemGCRN(num_nodes=num_variable, input_dim=opt.channelin, output_dim=opt.channelout, horizon=opt.seq_len,
rnn_units=opt.hiddenunits, num_layers=opt.num_layers, mem_num=opt.mem_num, mem_dim=opt.mem_dim,
decoder_type=opt.decoder, go_type=opt.go).to(device)
if mode == 'train':
summary(model, [(opt.his_len, num_variable, opt.channelin), (opt.seq_len, num_variable, opt.channelout)], device=device)
print_params(model)
for p in model.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
else:
nn.init.uniform_(p)
return model
def evaluateModel(model, data_iter, ycov_flag):
if opt.loss == 'MSE':
criterion = nn.MSELoss()
if opt.loss == 'MAE':
criterion = nn.L1Loss()
separate_loss = nn.TripletMarginLoss(margin=1.0)
compact_loss = nn.MSELoss()
model.eval()
loss_sum, n, YS_pred = 0.0, 0, []
loss_sum1, loss_sum2, loss_sum3 = 0.0, 0.0, 0.0
with torch.no_grad():
if ycov_flag:
for x, y, y_cov in data_iter:
y_pred, h_att, query, pos, neg = model(x, y_cov)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
YS_pred.append(y_pred.cpu().numpy())
else:
for x, y in data_iter:
y_pred, h_att, query, pos, neg = model(x)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
YS_pred.append(y_pred.cpu().numpy())
loss = loss_sum / n
loss1 = loss_sum1 / n
loss2 = loss_sum2 / n
loss3 = loss_sum3 / n
YS_pred = np.vstack(YS_pred)
return loss, loss1, loss2, loss3, YS_pred
def trainModel(name, mode, XS, YS, YCov):
logger.info('Model Training Started ...', time.ctime())
logger.info('TIMESTEP_IN, TIMESTEP_OUT', opt.his_len, opt.seq_len)
model = getModel(mode)
XS_torch, YS_torch = torch.Tensor(XS).to(device), torch.Tensor(YS).to(device)
logger.info('XS_torch.shape: ', XS_torch.shape)
logger.info('YS_torch.shape: ', YS_torch.shape)
if YCov is not None:
YCov_torch = torch.Tensor(YCov).to(device)
logger.info('YCov_torch.shape: ', YCov_torch.shape)
trainval_data = torch.utils.data.TensorDataset(XS_torch, YS_torch, YCov_torch)
else:
trainval_data = torch.utils.data.TensorDataset(XS_torch, YS_torch)
trainval_size = len(trainval_data)
train_size = int(trainval_size * (1 - opt.val_ratio))
train_data = torch.utils.data.Subset(trainval_data, list(range(0, train_size)))
val_data = torch.utils.data.Subset(trainval_data, list(range(train_size, trainval_size)))
train_iter = torch.utils.data.DataLoader(train_data, opt.batch_size, shuffle=False) # drop_last=True
val_iter = torch.utils.data.DataLoader(val_data, opt.batch_size, shuffle=False) # drop_last=True
trainval_iter = torch.utils.data.DataLoader(trainval_data, opt.batch_size, shuffle=False) # drop_last=True
optimizer = torch.optim.Adam(model.parameters(), lr=opt.lr)
if opt.loss == 'MSE':
criterion = nn.MSELoss()
if opt.loss == 'MAE':
criterion = nn.L1Loss()
separate_loss = nn.TripletMarginLoss(margin=1.0)
compact_loss = nn.MSELoss()
min_val_loss = np.inf
wait = 0
for epoch in range(opt.epoch):
starttime = datetime.now()
loss_sum, n = 0.0, 0
loss_sum1, loss_sum2, loss_sum3 = 0.0, 0.0, 0.0
model.train()
if YCov is not None:
for x, y, ycov in train_iter:
optimizer.zero_grad()
y_pred, h_att, query, pos, neg = model(x, ycov)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss.backward()
optimizer.step()
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
else:
for x, y in train_iter:
optimizer.zero_grad()
y_pred, h_att, query, pos, neg = model(x)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss.backward()
optimizer.step()
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
train_loss = loss_sum / n
train_loss1 = loss_sum1 / n
train_loss2 = loss_sum2 / n
train_loss3 = loss_sum3 / n
val_loss, val_loss1, val_loss2, val_loss3, _ = evaluateModel(model, val_iter, YCov is not None)
if val_loss < min_val_loss:
wait = 0
min_val_loss = val_loss
torch.save(model.state_dict(), modelpt_path)
else:
wait += 1
if wait == opt.patience:
logger.info('Early stopping at epoch: %d' % epoch)
break
endtime = datetime.now()
epoch_time = (endtime - starttime).seconds
logger.info("epoch", epoch, "time used:", epoch_time," seconds ", "train loss:", train_loss, train_loss1, train_loss2, train_loss3, "validation loss:", val_loss, val_loss1, val_loss2, val_loss3)
with open(epochlog_path, 'a') as f:
f.write("%s, %d, %s, %d, %s, %s, %.6f, %s, %.6f\n" % ("epoch", epoch, "time used", epoch_time, "seconds", "train loss", train_loss, "validation loss:", val_loss))
# torch_score = train_loss
loss, loss1, loss2, loss3, YS_pred = evaluateModel(model, trainval_iter, YCov is not None)
logger.info('trainval loss, loss1, loss2, loss3', loss, loss1, loss2, loss3)
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
YS = YS[:YS_pred.shape[0], ...]
YS, YS_pred = np.squeeze(YS), np.squeeze(YS_pred)
YS, YS_pred = YS.reshape(-1, YS.shape[-1]), YS_pred.reshape(-1, YS_pred.shape[-1])
YS, YS_pred = scaler.inverse_transform(YS), scaler.inverse_transform(YS_pred)
YS, YS_pred = YS.reshape(-1, opt.seq_len, YS.shape[-1]), YS_pred.reshape(-1, opt.seq_len, YS_pred.shape[-1])
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS, YS_pred)
logger.info('*' * 40)
logger.info("%s, %s, Torch MSE, %.6f, %.6f, %.6f, %.6f" % (name, mode, train_loss, train_loss1, train_loss2, train_loss3))
logger.info("%s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f" % (name, mode, MSE, RMSE, MAE, MAPE))
logger.info('Model Training Ended ...', time.ctime())
def testModel(name, mode, XS, YS, YCov, Mask=None):
def testScore(YS, YS_pred, message):
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS, YS_pred)
logger.info(message)
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
logger.info("%s, %s, Torch MSE, %.6f, %.6f, %.6f, %.6f" % (name, mode, loss, loss1, loss2, loss3))
logger.info("all pred steps, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f" % (name, mode, MSE, RMSE, MAE, MAPE))
with open(score_path, 'a') as f:
f.write("all pred steps, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f\n" % (name, mode, MSE, RMSE, MAE, MAPE))
for i in range(opt.seq_len):
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS[..., i], YS_pred[..., i])
logger.info("%d step, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f" % (i+1, name, mode, MSE, RMSE, MAE, MAPE))
f.write("%d step, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f\n" % (i+1, name, mode, MSE, RMSE, MAE, MAPE))
return None
logger.info('Model Testing Started ...', time.ctime())
logger.info('TIMESTEP_IN, TIMESTEP_OUT', opt.his_len, opt.seq_len)
model = getModel(mode)
model.load_state_dict(torch.load(modelpt_path))
XS_torch, YS_torch = torch.Tensor(XS).to(device), torch.Tensor(YS).to(device)
if YCov is not None:
YCov_torch = torch.Tensor(YCov).to(device)
test_data = torch.utils.data.TensorDataset(XS_torch, YS_torch, YCov_torch)
else:
test_data = torch.utils.data.TensorDataset(XS_torch, YS_torch)
test_iter = torch.utils.data.DataLoader(test_data, opt.batch_size, shuffle=False) # drop_last=True
loss, loss1, loss2, loss3, YS_pred = evaluateModel(model, test_iter, YCov is not None)
logger.info('test loss, loss1, loss2, loss3', loss, loss1, loss2, loss3)
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
YS = YS[:YS_pred.shape[0], ...]
YS, YS_pred = np.squeeze(YS), np.squeeze(YS_pred)
YS, YS_pred = YS.reshape(-1, YS.shape[-1]), YS_pred.reshape(-1, YS_pred.shape[-1])
YS, YS_pred = scaler.inverse_transform(YS), scaler.inverse_transform(YS_pred)
YS, YS_pred = YS.reshape(-1, opt.seq_len, YS.shape[-1]), YS_pred.reshape(-1, opt.seq_len, YS_pred.shape[-1])
YS, YS_pred = YS.transpose(0, 2, 1), YS_pred.transpose(0, 2, 1)
# np.save(path + f'/{name}_prediction.npy', YS_pred)
# np.save(path + f'/{name}_groundtruth.npy', YS)
# np.save(path + f'/{name}_Mask_t1.npy', Mask)
testScore(YS, YS_pred, '********* Evaluation on the whole testing dataset *********')
testScore(YS[Mask], YS_pred[Mask], '********* Evaluation on the selected testing dataset when incident happen at t+1 *********')
logger.info('Model Testing Ended ...', time.ctime())
#########################################################################################
parser = argparse.ArgumentParser()
parser.add_argument("--loss", type=str, default='MAE', help="MAE, MSE, SELF")
parser.add_argument("--epoch", type=int, default=200, help="number of epochs of training")
parser.add_argument("--batch_size", type=int, default=64, help="size of the batches")
parser.add_argument("--lr", type=float, default=0.001, help="adam: learning rate")
parser.add_argument("--patience", type=float, default=10, help="patience used for early stop")
parser.add_argument('--val_ratio', type=float, default=0.25, help='the ratio of validation data among the trainval ratio')
parser.add_argument('--seed', type=int, default=1234, help='Random seed.')
parser.add_argument('--seq_len', type=int, default=6, help='sequence length of values, which should be even nums (2,4,6,12)')
parser.add_argument('--his_len', type=int, default=6, help='sequence length of observed historical values')
parser.add_argument('--month', type=str, default='202112', help='which experiment setting (month) to run')
parser.add_argument('--city', type=str, default='tokyo', help='which experiment setting (city) to run')
parser.add_argument('--channelin', type=int, default=1, help='number of input channel')
parser.add_argument('--channelout', type=int, default=1, help='number of output channel')
parser.add_argument('--time', type=bool, default=False, help='whether to use float time embedding')
parser.add_argument('--history', type=bool, default=False, help='whether to use historical data')
parser.add_argument('--num_layers', type=int, default=1, help='number of layers')
parser.add_argument('--hiddenunits', type=int, default=32, help='number of hidden units')
parser.add_argument('--mem_num', type=int, default=10, help='number of memory')
parser.add_argument('--mem_dim', type=int, default=32, help='dimension of memory')
parser.add_argument("--decoder", type=str, default='stepwise', help="which type of decoder: stepwise or stepwise")
parser.add_argument('--ycov', type=str, default='time', help='which ycov to use: time or history')
parser.add_argument('--go', type=str, default='random', help='which type of decoder go: random or last')
parser.add_argument('--model', type=str, default='MemGCRN', help='which model to use')
parser.add_argument('--gpu', type=int, default=3, help='which gpu to use')
parser.add_argument('--lamb', type=float, default=0.01, help='lamb value for separate loss')
parser.add_argument('--lamb1', type=float, default=0.01, help='lamb1 value for compact loss')
opt = parser.parse_args()
# optimal1: --ycov=history --go=random --lamb=0.01 --lamb1=0.01
# optimal2: --ycov=time --go=last --lamb=0.01 --lamb1=0.0
config = ConfigParser()
config.read('params.txt', encoding='UTF-8')
train_month = eval(config[opt.month]['train_month'])
test_month = eval(config[opt.month]['test_month'])
traffic_path = config[opt.month]['traffic_path']
subroad_path = config[opt.city]['subroad_path']
road_path = config['common']['road_path']
adj_path = config['common']['adjdis_path']
# adj_path = config['common']['adj01_path']
num_variable = len(np.loadtxt(subroad_path).astype(int))
N_link = config.getint('common', 'N_link')
feature_list = ['speed_typea']
if opt.ycov=='time':
opt.time = True
elif opt.ycov=='history':
opt.history = True
else:
assert False, 'ycov type must be float time or float history value'
if opt.time: feature_list.append('weekdaytime')
if opt.history: feature_list.append('speed_typea_y')
# opt.channelin = len(feature_list) # Here, input for the encoder is just speed, w/o xcov is better.
# feature_list = ['speed_typea', 'accident_flag', 'real_accident_flag', 'weekdaytime', 'speed_typea_y']
_, filename = os.path.split(os.path.abspath(sys.argv[0]))
filename = os.path.splitext(filename)[0]
model_name = opt.model
timestring = time.strftime('%Y%m%d%H%M%S', time.localtime())
path = f'../save/{opt.city}{opt.month}_{model_name}_c{opt.channelin}to{opt.channelout}_{timestring}_{opt.ycov}'
logging_path = f'{path}/{model_name}_{timestring}_logging.txt'
score_path = f'{path}/{model_name}_{timestring}_scores.txt'
epochlog_path = f'{path}/{model_name}_{timestring}_epochlog.txt'
modelpt_path = f'{path}/{model_name}_{timestring}.pt'
if not os.path.exists(path): os.makedirs(path)
shutil.copy2(sys.argv[0], path)
shutil.copy2(f'{model_name}.py', path)
logger = logging.getLogger(__name__)
logger.setLevel(level = logging.INFO)
class MyFormatter(logging.Formatter):
def format(self, record):
spliter = ' '
record.msg = str(record.msg) + spliter + spliter.join(map(str, record.args))
record.args = tuple() # set empty to args
return super().format(record)
formatter = MyFormatter()
handler = logging.FileHandler(logging_path, mode='a')
handler.setLevel(logging.INFO)
handler.setFormatter(formatter)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(console)
logger.info('lamb', opt.lamb)
logger.info('lamb1', opt.lamb1)
logger.info('experiment_city', opt.city)
logger.info('experiment_month', opt.month)
logger.info('model_name', opt.model)
logger.info('mem_num', opt.mem_num)
logger.info('mem_dim', opt.mem_dim)
logger.info('decoder_type', opt.decoder)
logger.info('go_type', opt.go)
logger.info('ycov_type', opt.ycov)
logger.info('batch_size', opt.batch_size)
logger.info('rnn_units', opt.hiddenunits)
logger.info('num_layers', opt.num_layers)
logger.info('channnel_in', opt.channelin)
logger.info('channnel_out', opt.channelout)
logger.info('feature_time', opt.time)
logger.info('feature_history', opt.history)
#####################################################################################################
cpu_num = 1
os.environ ['OMP_NUM_THREADS'] = str(cpu_num)
os.environ ['OPENBLAS_NUM_THREADS'] = str(cpu_num)
os.environ ['MKL_NUM_THREADS'] = str(cpu_num)
os.environ ['VECLIB_MAXIMUM_THREADS'] = str(cpu_num)
os.environ ['NUMEXPR_NUM_THREADS'] = str(cpu_num)
torch.set_num_threads(cpu_num)
device = torch.device("cuda:{}".format(opt.gpu)) if torch.cuda.is_available() else torch.device("cpu")
np.random.seed(opt.seed)
torch.manual_seed(opt.seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(opt.seed)
scaler = StandardScaler()
def main():
train_data = [get_data(config[month]['traffic_path'], N_link, subroad_path, feature_list) for month in train_month]
test_data = [get_data(config[month]['traffic_path'], N_link, subroad_path, feature_list) for month in test_month]
test_flag = [get_data(config[month]['traffic_path'], N_link, subroad_path, ['accident_flag']) for month in test_month]
speed_data = []
for data in train_data:
speed_data.append(data[:,:,0])
for data in test_data:
speed_data.append(data[:,:,0])
speed_data = np.vstack(speed_data)
scaler.fit(speed_data)
for data in train_data:
logger.info('train_data', data.shape)
data[:,:,0] = scaler.transform(data[:,:,0])
for data in test_data:
logger.info('test_data', data.shape)
data[:,:,0] = scaler.transform(data[:,:,0])
logger.info(opt.city, opt.month, 'training started', time.ctime())
trainXS, trainYS = getXSYS(train_data, opt.his_len, opt.seq_len)
trainXS, trainYS, trainXCov, trainYCov = refineXSYS(trainXS, trainYS)
logger.info('TRAIN XS.shape YS.shape, XCov.shape, YCov.shape', trainXS.shape, trainYS.shape, trainXCov.shape, trainYCov.shape)
trainModel(model_name, 'train', trainXS, trainYS, trainYCov)
logger.info(opt.city, opt.month, 'testing started', time.ctime())
testXS, testYS = getXSYS(test_data, opt.his_len, opt.seq_len)
_, testYSFlag = getXSYS(test_flag, opt.his_len, opt.seq_len)
testYMask = testYSFlag[:, 0, :, 0] > 0 # (B, N) incident happen at the first prediction timeslot, t+1.
testXS, testYS, testXCov, testYCov = refineXSYS(testXS, testYS)
logger.info('TEST XS.shape, YS.shape, XCov.shape, YCov.shape, YMask.shape', testXS.shape, testYS.shape, testXCov.shape, testYCov.shape, testYMask.shape)
testModel(model_name, 'test', testXS, testYS, testYCov, testYMask)
if __name__ == '__main__':
main()
| 50.230769 | 202 | 0.644173 | import sys
import os
import shutil
import math
import numpy as np
import pandas as pd
import scipy.sparse as ss
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from datetime import datetime
import time
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
from torchsummary import summary
import argparse
from configparser import ConfigParser
import logging
import Metrics
from MemGCRN import MemGCRN
from Utils import *
def refineXSYS(XS, YS):
assert opt.time or opt.history, 'it should have one covariate time or history'
XCov, YCov = XS[..., -1:], YS[..., -1:]
XS, YS = XS[:, :, :, :opt.channelin], YS[:, :, :, :opt.channelout]
return XS, YS, XCov, YCov
def print_params(model):
param_count = 0
logger.info('Trainable parameter list:')
for name, param in model.named_parameters():
if param.requires_grad:
print(name, param.shape, param.numel())
param_count += param.numel()
logger.info(f'\n In total: {param_count} trainable parameters. \n')
return
def getModel(mode):
model = MemGCRN(num_nodes=num_variable, input_dim=opt.channelin, output_dim=opt.channelout, horizon=opt.seq_len,
rnn_units=opt.hiddenunits, num_layers=opt.num_layers, mem_num=opt.mem_num, mem_dim=opt.mem_dim,
decoder_type=opt.decoder, go_type=opt.go).to(device)
if mode == 'train':
summary(model, [(opt.his_len, num_variable, opt.channelin), (opt.seq_len, num_variable, opt.channelout)], device=device)
print_params(model)
for p in model.parameters():
if p.dim() > 1:
nn.init.xavier_uniform_(p)
else:
nn.init.uniform_(p)
return model
def evaluateModel(model, data_iter, ycov_flag):
if opt.loss == 'MSE':
criterion = nn.MSELoss()
if opt.loss == 'MAE':
criterion = nn.L1Loss()
separate_loss = nn.TripletMarginLoss(margin=1.0)
compact_loss = nn.MSELoss()
model.eval()
loss_sum, n, YS_pred = 0.0, 0, []
loss_sum1, loss_sum2, loss_sum3 = 0.0, 0.0, 0.0
with torch.no_grad():
if ycov_flag:
for x, y, y_cov in data_iter:
y_pred, h_att, query, pos, neg = model(x, y_cov)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
YS_pred.append(y_pred.cpu().numpy())
else:
for x, y in data_iter:
y_pred, h_att, query, pos, neg = model(x)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
YS_pred.append(y_pred.cpu().numpy())
loss = loss_sum / n
loss1 = loss_sum1 / n
loss2 = loss_sum2 / n
loss3 = loss_sum3 / n
YS_pred = np.vstack(YS_pred)
return loss, loss1, loss2, loss3, YS_pred
def trainModel(name, mode, XS, YS, YCov):
logger.info('Model Training Started ...', time.ctime())
logger.info('TIMESTEP_IN, TIMESTEP_OUT', opt.his_len, opt.seq_len)
model = getModel(mode)
XS_torch, YS_torch = torch.Tensor(XS).to(device), torch.Tensor(YS).to(device)
logger.info('XS_torch.shape: ', XS_torch.shape)
logger.info('YS_torch.shape: ', YS_torch.shape)
if YCov is not None:
YCov_torch = torch.Tensor(YCov).to(device)
logger.info('YCov_torch.shape: ', YCov_torch.shape)
trainval_data = torch.utils.data.TensorDataset(XS_torch, YS_torch, YCov_torch)
else:
trainval_data = torch.utils.data.TensorDataset(XS_torch, YS_torch)
trainval_size = len(trainval_data)
train_size = int(trainval_size * (1 - opt.val_ratio))
train_data = torch.utils.data.Subset(trainval_data, list(range(0, train_size)))
val_data = torch.utils.data.Subset(trainval_data, list(range(train_size, trainval_size)))
train_iter = torch.utils.data.DataLoader(train_data, opt.batch_size, shuffle=False)
val_iter = torch.utils.data.DataLoader(val_data, opt.batch_size, shuffle=False)
trainval_iter = torch.utils.data.DataLoader(trainval_data, opt.batch_size, shuffle=False)
optimizer = torch.optim.Adam(model.parameters(), lr=opt.lr)
if opt.loss == 'MSE':
criterion = nn.MSELoss()
if opt.loss == 'MAE':
criterion = nn.L1Loss()
separate_loss = nn.TripletMarginLoss(margin=1.0)
compact_loss = nn.MSELoss()
min_val_loss = np.inf
wait = 0
for epoch in range(opt.epoch):
starttime = datetime.now()
loss_sum, n = 0.0, 0
loss_sum1, loss_sum2, loss_sum3 = 0.0, 0.0, 0.0
model.train()
if YCov is not None:
for x, y, ycov in train_iter:
optimizer.zero_grad()
y_pred, h_att, query, pos, neg = model(x, ycov)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss.backward()
optimizer.step()
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
else:
for x, y in train_iter:
optimizer.zero_grad()
y_pred, h_att, query, pos, neg = model(x)
loss1 = criterion(y_pred, y)
loss2 = separate_loss(query, pos.detach(), neg.detach())
loss3 = compact_loss(query, pos.detach())
loss = loss1 + opt.lamb * loss2 + opt.lamb1 * loss3
loss.backward()
optimizer.step()
loss_sum += loss.item() * y.shape[0]
loss_sum1 += loss1.item() * y.shape[0]
loss_sum2 += loss2.item() * y.shape[0]
loss_sum3 += loss3.item() * y.shape[0]
n += y.shape[0]
train_loss = loss_sum / n
train_loss1 = loss_sum1 / n
train_loss2 = loss_sum2 / n
train_loss3 = loss_sum3 / n
val_loss, val_loss1, val_loss2, val_loss3, _ = evaluateModel(model, val_iter, YCov is not None)
if val_loss < min_val_loss:
wait = 0
min_val_loss = val_loss
torch.save(model.state_dict(), modelpt_path)
else:
wait += 1
if wait == opt.patience:
logger.info('Early stopping at epoch: %d' % epoch)
break
endtime = datetime.now()
epoch_time = (endtime - starttime).seconds
logger.info("epoch", epoch, "time used:", epoch_time," seconds ", "train loss:", train_loss, train_loss1, train_loss2, train_loss3, "validation loss:", val_loss, val_loss1, val_loss2, val_loss3)
with open(epochlog_path, 'a') as f:
f.write("%s, %d, %s, %d, %s, %s, %.6f, %s, %.6f\n" % ("epoch", epoch, "time used", epoch_time, "seconds", "train loss", train_loss, "validation loss:", val_loss))
loss, loss1, loss2, loss3, YS_pred = evaluateModel(model, trainval_iter, YCov is not None)
logger.info('trainval loss, loss1, loss2, loss3', loss, loss1, loss2, loss3)
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
YS = YS[:YS_pred.shape[0], ...]
YS, YS_pred = np.squeeze(YS), np.squeeze(YS_pred)
YS, YS_pred = YS.reshape(-1, YS.shape[-1]), YS_pred.reshape(-1, YS_pred.shape[-1])
YS, YS_pred = scaler.inverse_transform(YS), scaler.inverse_transform(YS_pred)
YS, YS_pred = YS.reshape(-1, opt.seq_len, YS.shape[-1]), YS_pred.reshape(-1, opt.seq_len, YS_pred.shape[-1])
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS, YS_pred)
logger.info('*' * 40)
logger.info("%s, %s, Torch MSE, %.6f, %.6f, %.6f, %.6f" % (name, mode, train_loss, train_loss1, train_loss2, train_loss3))
logger.info("%s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f" % (name, mode, MSE, RMSE, MAE, MAPE))
logger.info('Model Training Ended ...', time.ctime())
def testModel(name, mode, XS, YS, YCov, Mask=None):
def testScore(YS, YS_pred, message):
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS, YS_pred)
logger.info(message)
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
logger.info("%s, %s, Torch MSE, %.6f, %.6f, %.6f, %.6f" % (name, mode, loss, loss1, loss2, loss3))
logger.info("all pred steps, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f" % (name, mode, MSE, RMSE, MAE, MAPE))
with open(score_path, 'a') as f:
f.write("all pred steps, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f\n" % (name, mode, MSE, RMSE, MAE, MAPE))
for i in range(opt.seq_len):
MSE, RMSE, MAE, MAPE = Metrics.evaluate(YS[..., i], YS_pred[..., i])
logger.info("%d step, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f" % (i+1, name, mode, MSE, RMSE, MAE, MAPE))
f.write("%d step, %s, %s, MSE, RMSE, MAE, MAPE, %.6f, %.6f, %.6f, %.6f\n" % (i+1, name, mode, MSE, RMSE, MAE, MAPE))
return None
logger.info('Model Testing Started ...', time.ctime())
logger.info('TIMESTEP_IN, TIMESTEP_OUT', opt.his_len, opt.seq_len)
model = getModel(mode)
model.load_state_dict(torch.load(modelpt_path))
XS_torch, YS_torch = torch.Tensor(XS).to(device), torch.Tensor(YS).to(device)
if YCov is not None:
YCov_torch = torch.Tensor(YCov).to(device)
test_data = torch.utils.data.TensorDataset(XS_torch, YS_torch, YCov_torch)
else:
test_data = torch.utils.data.TensorDataset(XS_torch, YS_torch)
test_iter = torch.utils.data.DataLoader(test_data, opt.batch_size, shuffle=False)
loss, loss1, loss2, loss3, YS_pred = evaluateModel(model, test_iter, YCov is not None)
logger.info('test loss, loss1, loss2, loss3', loss, loss1, loss2, loss3)
logger.info('YS.shape, YS_pred.shape,', YS.shape, YS_pred.shape)
YS = YS[:YS_pred.shape[0], ...]
YS, YS_pred = np.squeeze(YS), np.squeeze(YS_pred)
YS, YS_pred = YS.reshape(-1, YS.shape[-1]), YS_pred.reshape(-1, YS_pred.shape[-1])
YS, YS_pred = scaler.inverse_transform(YS), scaler.inverse_transform(YS_pred)
YS, YS_pred = YS.reshape(-1, opt.seq_len, YS.shape[-1]), YS_pred.reshape(-1, opt.seq_len, YS_pred.shape[-1])
YS, YS_pred = YS.transpose(0, 2, 1), YS_pred.transpose(0, 2, 1)
testScore(YS, YS_pred, '********* Evaluation on the whole testing dataset *********')
testScore(YS[Mask], YS_pred[Mask], '********* Evaluation on the selected testing dataset when incident happen at t+1 *********')
logger.info('Model Testing Ended ...', time.ctime())
)
handler.setLevel(logging.INFO)
handler.setFormatter(formatter)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(console)
logger.info('lamb', opt.lamb)
logger.info('lamb1', opt.lamb1)
logger.info('experiment_city', opt.city)
logger.info('experiment_month', opt.month)
logger.info('model_name', opt.model)
logger.info('mem_num', opt.mem_num)
logger.info('mem_dim', opt.mem_dim)
logger.info('decoder_type', opt.decoder)
logger.info('go_type', opt.go)
logger.info('ycov_type', opt.ycov)
logger.info('batch_size', opt.batch_size)
logger.info('rnn_units', opt.hiddenunits)
logger.info('num_layers', opt.num_layers)
logger.info('channnel_in', opt.channelin)
logger.info('channnel_out', opt.channelout)
logger.info('feature_time', opt.time)
logger.info('feature_history', opt.history)
| true | true |
1c45c40bf2888f61ea031bfc6439f06b59f6dae5 | 664 | py | Python | manage.py | njiiri12/neighbourhood | e36f04f450c352f3947ff991118e4c06cc5bcb87 | [
"MIT"
] | null | null | null | manage.py | njiiri12/neighbourhood | e36f04f450c352f3947ff991118e4c06cc5bcb87 | [
"MIT"
] | null | null | null | manage.py | njiiri12/neighbourhood | e36f04f450c352f3947ff991118e4c06cc5bcb87 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'NH_watch.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.869565 | 73 | 0.679217 |
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'NH_watch.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
1c45c46dba085dae7fe79cedf838de675bd3c279 | 5,081 | py | Python | opps/articles/views.py | jeanmask/opps | 031c6136c38d43aa6d1ccb25a94f7bcd65ccbf87 | [
"MIT"
] | 159 | 2015-01-03T16:36:35.000Z | 2022-03-29T20:50:13.000Z | opps/articles/views.py | jeanmask/opps | 031c6136c38d43aa6d1ccb25a94f7bcd65ccbf87 | [
"MIT"
] | 81 | 2015-01-02T21:26:16.000Z | 2021-05-29T12:24:52.000Z | opps/articles/views.py | jeanmask/opps | 031c6136c38d43aa6d1ccb25a94f7bcd65ccbf87 | [
"MIT"
] | 75 | 2015-01-23T13:41:03.000Z | 2021-09-24T03:45:23.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from django.conf import settings
from opps.views.generic.list import ListView
from opps.containers.views import ContainerList
from opps.containers.models import Container, ContainerBox
from opps.articles.models import Album
class AlbumList(ContainerList):
model = Album
type = 'articles'
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
list_name = 'list'
templates.append('{0}/{1}/{2}.html'.format(
self.model._meta.app_label,
self.model._meta.module_name, list_name))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}/{3}_paginated.html'.format(
domain_folder, self.model._meta.app_label,
self.model._meta.module_name, list_name))
return templates
def get_queryset(self):
# TODO: refatoring, used super()
self.site = get_current_site(self.request)
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
self.articleboxes = ContainerBox.objects.filter(
channel__long_slug=self.long_slug)
is_paginated = self.page_kwarg in self.request.GET
if not is_paginated:
for box in self.articleboxes:
self.excluded_ids.update(
[a.pk for a in box.ordered_containers()])
filters = {}
filters['site_domain'] = self.site.domain
filters['date_available__lte'] = timezone.now()
filters['published'] = True
filters['child_class'] = 'Album'
if self.channel and self.channel.is_root_node() and not is_paginated:
filters['show_on_root_channel'] = True
queryset = Container.objects.filter(
**filters).exclude(pk__in=self.excluded_ids)
return queryset._clone()
class AlbumChannelList(ListView):
model = Album
type = 'articles'
template_name_suffix = 'album'
def get_template_list(self, domain_folder="containers"):
templates = []
list_name = 'list'
if self.template_name_suffix:
list_fullname = "{0}_{1}".format(self.template_name_suffix,
list_name)
if self.channel:
if self.channel.group and self.channel.parent:
templates.append('{0}/{1}/{2}.html'.format(
domain_folder,
self.channel.parent.long_slug,
list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}_paginated.html'.format(
domain_folder, self.channel.parent.long_slug,
list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}_paginated.html'.format(
domain_folder, self.channel.long_slug, list_fullname))
templates.append('{0}/{1}/{2}.html'.format(
domain_folder, self.channel.long_slug, list_fullname))
for t in self.channel.get_ancestors()[::-1]:
templates.append('{0}/{1}/{2}.html'.format(
domain_folder, t.long_slug, list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}_paginated.html'.format(
domain_folder, t.long_slug, list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}_paginated.html'.format(domain_folder,
list_fullname))
templates.append('{0}/{1}/{2}.html'.format(
self.model._meta.app_label,
self.model._meta.module_name,
list_name))
return templates
def get_template_names(self):
domain_folder = self.get_template_folder()
template_list = self.get_template_list(domain_folder)
return template_list
def get_queryset(self):
self.site = get_current_site(self.request)
queryset = super(AlbumChannelList, self).get_queryset()
filters = {}
filters['site_domain'] = self.site.domain
filters['date_available__lte'] = timezone.now()
filters['published'] = True
filters['show_on_root_channel'] = True
queryset = queryset.filter(**filters)
return queryset._clone()
| 35.531469 | 77 | 0.597717 |
from django.contrib.sites.models import get_current_site
from django.utils import timezone
from django.conf import settings
from opps.views.generic.list import ListView
from opps.containers.views import ContainerList
from opps.containers.models import Container, ContainerBox
from opps.articles.models import Album
class AlbumList(ContainerList):
model = Album
type = 'articles'
def get_template_names(self):
templates = []
domain_folder = self.get_template_folder()
list_name = 'list'
templates.append('{0}/{1}/{2}.html'.format(
self.model._meta.app_label,
self.model._meta.module_name, list_name))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}/{3}_paginated.html'.format(
domain_folder, self.model._meta.app_label,
self.model._meta.module_name, list_name))
return templates
def get_queryset(self):
self.site = get_current_site(self.request)
self.long_slug = self.get_long_slug()
if not self.long_slug:
return None
self.set_channel_rules()
self.articleboxes = ContainerBox.objects.filter(
channel__long_slug=self.long_slug)
is_paginated = self.page_kwarg in self.request.GET
if not is_paginated:
for box in self.articleboxes:
self.excluded_ids.update(
[a.pk for a in box.ordered_containers()])
filters = {}
filters['site_domain'] = self.site.domain
filters['date_available__lte'] = timezone.now()
filters['published'] = True
filters['child_class'] = 'Album'
if self.channel and self.channel.is_root_node() and not is_paginated:
filters['show_on_root_channel'] = True
queryset = Container.objects.filter(
**filters).exclude(pk__in=self.excluded_ids)
return queryset._clone()
class AlbumChannelList(ListView):
model = Album
type = 'articles'
template_name_suffix = 'album'
def get_template_list(self, domain_folder="containers"):
templates = []
list_name = 'list'
if self.template_name_suffix:
list_fullname = "{0}_{1}".format(self.template_name_suffix,
list_name)
if self.channel:
if self.channel.group and self.channel.parent:
templates.append('{0}/{1}/{2}.html'.format(
domain_folder,
self.channel.parent.long_slug,
list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}_paginated.html'.format(
domain_folder, self.channel.parent.long_slug,
list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}_paginated.html'.format(
domain_folder, self.channel.long_slug, list_fullname))
templates.append('{0}/{1}/{2}.html'.format(
domain_folder, self.channel.long_slug, list_fullname))
for t in self.channel.get_ancestors()[::-1]:
templates.append('{0}/{1}/{2}.html'.format(
domain_folder, t.long_slug, list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in\
settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}/{2}_paginated.html'.format(
domain_folder, t.long_slug, list_fullname))
if self.request.GET.get('page') and\
self.__class__.__name__ not in settings.OPPS_PAGINATE_NOT_APP:
templates.append('{0}/{1}_paginated.html'.format(domain_folder,
list_fullname))
templates.append('{0}/{1}/{2}.html'.format(
self.model._meta.app_label,
self.model._meta.module_name,
list_name))
return templates
def get_template_names(self):
domain_folder = self.get_template_folder()
template_list = self.get_template_list(domain_folder)
return template_list
def get_queryset(self):
self.site = get_current_site(self.request)
queryset = super(AlbumChannelList, self).get_queryset()
filters = {}
filters['site_domain'] = self.site.domain
filters['date_available__lte'] = timezone.now()
filters['published'] = True
filters['show_on_root_channel'] = True
queryset = queryset.filter(**filters)
return queryset._clone()
| true | true |
1c45c4b27799ee041b7c97394535e06eaba9dfb4 | 51,275 | py | Python | cogs/game/minigames/game_of_life/player.py | FellowHashbrown/omega-psi-py | 4ea33cdbef15ffaa537f2c9e382de508c58093fc | [
"MIT"
] | 4 | 2018-12-23T08:49:40.000Z | 2021-03-25T16:51:43.000Z | cogs/game/minigames/game_of_life/player.py | FellowHashbrown/omega-psi-py | 4ea33cdbef15ffaa537f2c9e382de508c58093fc | [
"MIT"
] | 23 | 2020-11-03T17:40:40.000Z | 2022-02-01T17:12:59.000Z | cogs/game/minigames/game_of_life/player.py | FellowHashbrown/omega-psi-py | 4ea33cdbef15ffaa537f2c9e382de508c58093fc | [
"MIT"
] | 1 | 2019-07-11T23:40:13.000Z | 2019-07-11T23:40:13.000Z | from asyncio import sleep
from discord import Embed
from math import ceil
from random import randint, choice
from cogs.globals import PRIMARY_EMBED_COLOR, NUMBER_EMOJIS, LEAVE
from cogs.game.minigames.base_game.player import Player
from cogs.game.minigames.game_of_life.functions import choose_house
from cogs.game.minigames.game_of_life.variables import (
MARRIED, GRADUATION, BRIEFCASE, SPIN, BABY, FAMILY, RISKY_ROAD, RETIRED, GIFTS,
BUY_HOUSE, SELL_HOUSE, DO_NOTHING, HOUSE, LOANS,
PAYDAY, GET_MONEY, PAY_MONEY, ACTION, PAYDAY_BONUS
)
from util.functions import get_embed_color
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
class GameOfLifePlayer(Player):
"""A GameOfLifePlayer object holds information regarding a player in the Game of Life minigame.
:param member: The discord.Member defining this GameOfLifePlayer object or
a str clarifying this GameOfLifePlayer object as an AI player
"""
def __init__(self, member):
super().__init__(member = member)
# Game player data
self.is_married = False
self.is_retired = False
self.is_college = False
self.move_modify = False
self.extra_turn = False
# Other player data
self.space = "c0"
self.babies = 0
self.pets = 1
self.cash = 200000
self.career = None
self.action_cards = 0
self.house_cards = []
self.pet_cards = 0
self.loans = 0
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Getter
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
@property
def member(self):
return self.__member
@property
def is_ai(self):
return self.__is_ai
# # # # # # # # # # # # # # # # # # # #
@property
def is_married(self):
return self.__is_married
@property
def is_retired(self):
return self.__is_retired
@property
def is_college(self):
return self.__is_college
@property
def move_modify(self):
return self.__move_modify
@property
def extra_turn(self):
return self.__extra_turn
# # # # # # # # # # # # # # # # # # # #
@property
def space(self):
return self.__space
@property
def babies(self):
return self.__babies
@property
def pets(self):
return self.__pets
@property
def cash(self):
return self.__cash
@property
def career(self):
return self.__career
@property
def action_cards(self):
return self.__action_cards
@property
def house_cards(self):
return self.__house_cards
@property
def pet_cards(self):
return self.__pet_cards
@property
def loans(self):
return self.__loans
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Setter
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
@member.setter
def member(self, member):
self.__member = member
@is_ai.setter
def is_ai(self, is_ai):
self.__is_ai = is_ai
# # # # # # # # # # # # # # # # # # # #
@is_married.setter
def is_married(self, is_married):
self.__is_married = is_married
@is_retired.setter
def is_retired(self, is_retired):
self.__is_retired = is_retired
@is_college.setter
def is_college(self, is_college):
self.__is_college = is_college
@move_modify.setter
def move_modify(self, move_modify):
self.__move_modify = move_modify
@extra_turn.setter
def extra_turn(self, extra_turn):
self.__extra_turn = extra_turn
# # # # # # # # # # # # # # # # # # # #
@space.setter
def space(self, space):
self.__space = space
@babies.setter
def babies(self, babies):
self.__babies = babies
@pets.setter
def pets(self, pets):
self.__pets = pets
@cash.setter
def cash(self, cash):
self.__cash = cash
@career.setter
def career(self, career):
self.__career = career
@action_cards.setter
def action_cards(self, action_cards):
self.__action_cards = action_cards
@house_cards.setter
def house_cards(self, house_cards):
self.__house_cards = house_cards
@pet_cards.setter
def pet_cards(self, pet_cards):
self.__pet_cards = pet_cards
@loans.setter
def loans(self, loans):
self.__loans = loans
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Play Methods
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
async def setup(self, game):
"""Let's the player decide if they are going to college or choosing a career.
:param game: The game object that this player is connected to
"""
# Check if the player is an AI and simulate a decision
if self.is_ai:
await sleep(2)
college = randint(1, 10) % 2 == 0
# The player is a real person
else:
# Ask the player if they want to go to college or into a career
college = False
message = await game.ctx.send(
self.member.mention,
embed = Embed(
title = "College or Career?",
description = "If you want to go to college, react with {}\nIf you want to go straight into a career, react with {}".format(
GRADUATION, BRIEFCASE
),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(GRADUATION)
await message.add_reaction(BRIEFCASE)
# Check for the user's reaction
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
str(reaction) in [GRADUATION, BRIEFCASE] and
user.id == self.member.id
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
college = str(reaction) == GRADUATION
# Check if the user is going to college
if college:
# Take 100k from the user, set their college attribute to True
# and send a message saying they are going to college
self.cash -= 100000
self.is_college = True
self.space = "c0"
await game.ctx.send(
embed = Embed(
title = "{} is going to college!".format(
self.get_name()
),
description = "{} has to pay $100,000 for tuition fees".format(
self.get_name()
),
colour = PRIMARY_EMBED_COLOR if self.is_ai else await get_embed_color(self.member)
)
)
# Check if the user is going into a career
# have them decide and then display their career
else:
self.space = "j0"
self.career = await self.ask_for_career(game)
await game.ctx.send(
embed = Embed(
title = "{} chose a career!".format(self.get_name()),
description = str(self.career),
colour = PRIMARY_EMBED_COLOR if self.is_ai else await get_embed_color(self.member)
)
)
async def process_turn(self, game):
"""Processes the current turn for this player by waiting until they
react to make their move or, if this player is an AI, choosing the best place
to go
:param game: The game object that this player is connected to
"""
# Check if the player has an extra turn, remove it
self.extra_turn = False
# Ask the player to spin
number = await self.ask_for_spin(game, allow_leave = True)
# Check if the player is leaving
if number == LEAVE:
await game.add_action("{} left the game!".format(self.get_name()))
return LEAVE
# Add the number they spun to the current turn
await game.add_action(
"{} {} spun a {}!".format(
SPIN, self.get_name(),
number
)
)
# The player is not leaving, check if there is a bonus paid to any player
await game.pay_bonus(number)
# Check what spot the player has landed on
board_space = self.next_space(game, number)
self.space = board_space.current
# Check if the player got any paydays
if board_space.paydays_passed > 0:
await game.add_action(
"{} {} got {} payday{}!".format(
PAYDAY,
self.get_name(), board_space.paydays_passed,
"s" if board_space.paydays_passed > 1 else ""
)
)
# Check if the space is a pet space
if board_space.type == "pet":
await self.process_pet_space(game)
# Check if the space is an action space
elif board_space.type == "action":
await game.process_action_card(board_space)
# Check if the space is a house space
elif board_space.type == "house":
await self.ask_for_house(game)
# Check if the space is a spin-to-win space
elif board_space.type == "spin_to_win":
await game.spin_to_win()
# Check if the space is a stop space
elif board_space.stop:
await self.process_stop_space(game, board_space)
# Check if the space is a baby space
elif board_space.type in ["baby", "twins", "triplets"]:
await self.process_baby_space(game, board_space)
# Check if the user has to pay money or receive money
elif board_space.type == "pay_money":
self.cash -= board_space.amount
await game.add_action(
"{} {} has to pay ${:0,}!".format(
PAY_MONEY,
self.get_name(), board_space.amount
)
)
elif board_space.type == "get_money":
self.cash += board_space.amount
await game.add_action(
"{} {} gets paid ${:0,}!".format(
GET_MONEY,
self.get_name(), board_space.amount
)
)
# Check if the player landed on a payday space
elif board_space.type == "payday":
self.cash += 100000
await game.add_action(
"{} {} landed on a payday and got a $100,000 bonus!".format(
PAYDAY_BONUS, self.get_name()
)
)
# Sleep for 3 seconds so everyone can read what happened
await sleep(3)
return False
async def ask_for_spin(self, game, *, is_color = False, allow_leave = False):
"""Let's the player spin for a number or a color
If getting color, when this: returns True, the color is black
returns False, the color is red
:param game: The game object this player is connected to
:param is_color: Whether or not to get the color of the result or just a number. (Defaults to False)
:param allow_leave: Whether or not to allow the player to leave during this spin. (Defaults to False)
:returns: The resulting number or whether the color is black or red
"""
# Check if the player is an AI, simulate waiting to spin
if self.is_ai:
await sleep(2)
# The player is a real person, wait for their reaction to spin
else:
# Send the message and add the valid reactions
message = await game.ctx.send(
embed = Embed(
title = "Spin!",
description = "{}, react with {} when you're ready to spin.{}".format(
self.get_name(), SPIN,
"\nIf you'd like to leave, react with {}".format(
LEAVE
) if allow_leave else ""
),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(SPIN)
if allow_leave:
await message.add_reaction(LEAVE)
# Wait for the user's reaction
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in (
[SPIN, LEAVE] if allow_leave else [SPIN]
)
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
await message.delete()
# Check if the player is leaving
if str(reaction) == LEAVE:
return LEAVE
# Choose a random number
number = None
for value in range(randint(1, 10)):
number = randint(1, 10)
is_black = number % 2 == 0
# Check if returning color or number
if is_color:
return is_black
return number
async def ask_for_career(self, game, *, new_career = False):
"""Let's the player choose their career given two cards
:param game: The game object this player is connected to
:param new_career: Whether the player is choosing between their current
career and a new one or choosing between two new careers
:returns: The career the player chose
:rtype: CareerCard
"""
# Set the target deck of career cards depending on whether or not
# this player is a college player
career_cards = game.career_cards if not self.is_college else game.college_career_cards
# If choosing between two new careers, choose two random careers from the deck
# and have them decide
if not new_career:
career_one = career_cards.pop(randint(0, len(career_cards) - 1))
career_two = career_cards.pop(randint(0, len(career_cards) - 1))
# The player is choosing between their current career and a new one
else:
career_one = self.career
career_two = career_cards.pop(randint(0, len(career_cards) - 1))
# Check if the player is an AI, simulate a decision
if self.is_ai:
await sleep(2)
return career_one if randint(1, 10) % 2 == 0 else career_two
# The player is a real person, have them decide
else:
await game.ctx.send(
embed = Embed(
title = "Choose a Career!",
description = "Check your DMs for your career choices!",
colour = await get_embed_color(self.member)
),
delete_after = 5
)
message = await self.member.send(
embed = Embed(
title = "Choose a Career!",
description = "_ _",
colour = await get_embed_color(self.member)
).add_field(
name = NUMBER_EMOJIS[0],
value = str(career_one),
inline = False
).add_field(
name = NUMBER_EMOJIS[1],
value = str(career_two),
inline = False
)
)
await message.add_reaction(NUMBER_EMOJIS[0])
await message.add_reaction(NUMBER_EMOJIS[1])
# Wait for the user to decide which card they want
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in NUMBER_EMOJIS[ : 2]
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
if str(reaction) == NUMBER_EMOJIS[0]:
return career_one
return career_two
async def ask_for_house(self, game, *, sell_house = False, house = None):
"""Let's the player decide if they want to buy a house, sell a house, or do nothing
:param game: The game that this player is connected to
:param sell_house: Whether or not to directly sell a player's house
Note that this is primarily used when finalizing the end of a game
:param house: The specific house to sell
Note that this is primarily used when finalizing the end of a game
:rtype: int
"""
# Only ask the player what they want to do if not directly selling a house
action = None if not sell_house else "sell"
if not sell_house:
# Check if the player is an AI, simulate deciding on an action
if self.is_ai:
actions = ["buy", "sell", "nothing"]
action = choice(actions)
while action == "sell" and len(self.house_cards) == 0:
action = choice(actions)
await sleep(2)
# The player is a real person, ask them what they want to do
else:
# Send a message asking them what they want to do and add
# the necessary reactions
message = await game.ctx.send(
embed = Embed(
title = "Buy, Sell, or do nothing?",
description = "{}{}{}".format(
"If you want to buy a house, react with {}\n".format(BUY_HOUSE),
"If you want to sell a house, react with {}\n".format(SELL_HOUSE) if len(self.house_cards) > 0 else "",
"If you want to do nothing, react with {}".format(DO_NOTHING)
),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(BUY_HOUSE)
if len(self.house_cards) > 0:
await message.add_reaction(SELL_HOUSE)
await message.add_reaction(DO_NOTHING)
# Wait for the player to react
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in (
[BUY_HOUSE, SELL_HOUSE, DO_NOTHING]
if len(self.house_cards) > 0 else
[BUY_HOUSE, DO_NOTHING]
)
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
if str(reaction) == BUY_HOUSE:
action = "buy"
elif str(reaction) == SELL_HOUSE:
action = "sell"
else:
action = "nothing"
await message.delete()
# The player will buy a house
if action == "buy":
# Keep track of how many loans need to be taken
# the chosen house
# and the houses to choose from
take_loans = False
chosen_house = None
house_cards = game.house_cards
house_one = house_cards.pop(0)
house_two = house_cards.pop(0)
# Check if the player is an AI, choose a house intelligently and simulate a decision
if self.is_ai:
chosen_house = choose_house(self, house_one = house_one, house_two = house_two)
take_loans = chosen_house != None
# The player is a real person, have them choose a house if they want to
else:
message = await self.member.send(
embed = Embed(
title = "Choose a house!",
description = "_ _",
colour = await get_embed_color(self.member)
).add_field(
name = NUMBER_EMOJIS[0],
value = str(house_one),
inline = False
).add_field(
name = NUMBER_EMOJIS[1],
value = str(house_two),
inline = False
)
)
await message.add_reaction(NUMBER_EMOJIS[0])
await message.add_reaction(NUMBER_EMOJIS[1])
# Wait for the player to decide which house they want
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in NUMBER_EMOJIS[ : 2]
))
# The player chose the first house
if str(reaction) == NUMBER_EMOJIS[0]:
chosen_house = house_one
# The player chose the second house
elif str(reaction) == NUMBER_EMOJIS[1]:
chosen_house = house_two
await message.delete()
# Check if the player can buy the house without loans
# add the other house back to the deck
if chosen_house.purchase <= self.cash:
if str(reaction) == NUMBER_EMOJIS[0]:
house_cards.append(house_two)
else:
house_cards.append(house_one)
take_loans = True
# The player has to take out loans for their chosen house
# ask them if they still want to buy the house
else:
# Send a message asking the player what they want to do
# and add the reactions
take_loans = await self.ask_for_split_path(
game,
title = "Loans Needed",
description = (
"""
In order to buy that house, you need to take out some loans.
If you want to take loans out, react with {}.
If you want to cancel buying the house, react with {}.
"""
),
true_path = LOANS, false_path = LEAVE
)
# The player wants to buy the house, check if they need to take out loans
if take_loans:
loans_needed = ceil((chosen_house.purchase - self.cash) / 50000)
if loans_needed > 0:
self.loans += loans_needed
self.cash += 50000 * loans_needed
# Take cash from the player to purchase the house
self.cash -= chosen_house.purchase
self.house_cards.append(chosen_house)
await game.add_action(
"{} {} bought a new house{}!\n{}".format(
HOUSE, self.get_name(),
" after taking out some loans" if loans_needed > 0 else "",
str(chosen_house)
)
)
# The player does not want to take out loans
else:
house_cards.append(house_one)
house_cards.append(house_two)
await game.add_action(
"{} {} did not want to take any loans! They will not buy the house.".format(
ACTION, self.get_name()
)
)
# The player will sell a house
elif action == "sell":
# Update the turn message
await game.add_action(
"{} {} is selling a house!".format(
HOUSE, self.get_name()
)
)
# Only ask the player to choose a house if a house has not been specified
# to sell directly
chosen_house = house
if not sell_house:
# Check if the player is an AI
# decide on what house to sell and sleep for 2 seconds to simulate the decision
if self.is_ai:
chosen_house = choose_house(self, buy = False)
self.house_cards.remove(chosen_house)
# The player is a real person, have them decide on the house
else:
# Create the embed to ask for which house to sell
# and add number fields for each house the player has
embed = Embed(
title = "Choose a house to sell!",
description = "_ _",
colour = await get_embed_color(self.member)
)
for index in range(len(self.house_cards)):
house = self.house_cards[index]
embed.add_field(
name = NUMBER_EMOJIS[index],
value = str(house),
inline = False
)
# Send the message to ask the player which house to sell
# and add the valid reactions
message = await self.member.send(embed = embed)
for emoji in NUMBER_EMOJIS[ : len(self.house_cards)]:
await message.add_reaction(emoji)
# Wait for the user to choose which house to sell and delete the message
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in NUMBER_EMOJIS[ : len(self.house_cards)]
))
await message.delete()
chosen_house = self.house_cards.pop(NUMBER_EMOJIS.index(str(reaction)))
# Update the turn message with which house the player is selling
await game.add_action(
"{} {} is selling the following house:\n{}".format(
HOUSE, self.get_name(), str(chosen_house)
)
)
# Have the player spin to see how much they sell their house for
is_black = await self.ask_for_spin(game, is_color = True)
amount = chosen_house.spin_black if is_black else chosen_house.spin_red
# Update the turn message with how much the house was sold for
self.cash += amount
await game.add_action(
"{} {} sold their house for ${:0,}".format(
GET_MONEY, self.get_name(), amount
)
)
# Return the amount that the player sold the house for
return amount
# The player will do nothing
else:
await game.add_action(
"{} {} chose not to buy nor sell a house.".format(
ACTION,
self.get_name()
)
)
async def ask_for_opponent(self, game, *, is_lawsuit = False):
"""Asks this player to choose an opponent for a competition or a lawsuit card
:param game: The game that this player is connected to
:param is_lawsuit: Whether or not this player is choosing an opponent for
a lawsuit. (Defaults to False)
:rtype: GameOfLifePlayer
"""
# Check if the player is an AI
# the AI will choose someone with the highest salary
# Also sleep for 2 seconds to simulate a decision
if self.is_ai:
opponent = max(game.players, key = lambda player: (
player.career.salary
if (player.career and player.id != self.id)
else 0
))
while opponent.id == self.id:
opponent = choice(game.players)
await sleep(2)
# The player is a real person, ask them who they want to choose
# as their opponent
else:
# Create the embed to ask for the opponent
embed = Embed(
title = "Choose an opponent!",
description = "_ _",
colour = await get_embed_color(self.member)
)
# Add the opponents as fields to choose from
# linked to number emojis
opponents = list(filter(lambda player: self.id != player.id, game.players))
for index in range(len(opponents)):
embed.add_field(
name = NUMBER_EMOJIS[index],
value = opponents[index].get_name(),
inline = False
)
# Send the message to the game's ctx and add the
# number emoji reactions for this player to react to
# in order to choose an opponent
message = await game.ctx.send(embed = embed)
for emoji in NUMBER_EMOJIS[ : len(opponents)]:
await message.add_reaction(emoji)
# Wait for the player to react with which opponent they want to choose
# and then delete the message
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.id and
str(reaction) in NUMBER_EMOJIS[ : len(opponents)]
))
await message.delete()
# Get the opponent that they chose
opponent = opponents[NUMBER_EMOJIS.index(str(reaction))]
# If this is not for a lawsuit, update the turn message in the game
if not is_lawsuit:
await game.add_action(
"{} {} chose {}!".format(
ACTION,
self.get_name(), opponent.get_name()
)
)
return opponent
async def ask_for_spot(self, game, message, spots, *, choose_from = 1):
"""Asks the player to choose a spot for their Spin to Win token.
:param game: The game that this player is connected to
:param message: The message to use to keep track of people's chosen spots
:param spots: A dict object that holds data about spots already chosen
and who chose the spot
:param choose_from: The amount of spots the player can choose from. (Default is 1)
"""
# Let the player decide on how many spots to take
for spot_choice in range(choose_from):
# Check if this player is an AI, have them choose a random spot after
# sleeping for 2 seconds to simulate a decision
if self.is_ai:
await sleep(2)
spot = choice(NUMBER_EMOJIS)
while str(spot) in spots:
spot = choice(NUMBER_EMOJIS)
# The player is a real person, have them decide on a spot as long as it's not taken yet
else:
# Create an embed showing the spots already taken
embed = Embed(
title = "Spin to Win!",
description = "{}, choose your spot{}!".format(
self.get_name(), "s" if choose_from - spot_choice > 1 else ""
),
colour = PRIMARY_EMBED_COLOR if game.get_current_player().is_ai else await get_embed_color(game.get_current_player().member)
).add_field(
name = "Spots Taken",
value = "\n".join([
"{} - {}".format(
str(spot), spots[spot].get_name()
)
for spot in spots
]) if len(spots) > 0 else "None Taken Yet!",
inline = False
)
# Edit the message to update the embed
# add the reactions, and wait for the player to react
await message.edit(embed = embed)
for emoji in NUMBER_EMOJIS:
if str(emoji) not in spots:
await message.add_reaction(emoji)
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.id and
str(reaction) in NUMBER_EMOJIS and
str(reaction) not in spots
))
await message.clear_reactions()
# Get the player's spot
spot = str(reaction)
# Add the player's spot to the spots dictionary
spots[spot] = self
return spots
async def process_pet_space(self, game):
"""Processes the pet space when a player lands on it
:param game: The game object that this player is connected to
"""
# Pull a card from the game's pet card deck
card = game.pet_cards.pop(0)
await game.add_action(str(card))
self.pet_cards += 1
# Check if the player is collecting money
if card.action == "collect":
# Give the player money and update the turn message
self.cash += card.amount
await game.add_action(
"{} {} collected ${:0,}".format(
GET_MONEY,
self.get_name(), card.amount
)
)
# Check if the player is collecting money for each pet
elif card.action == "collect_for_each":
# Give the player money for as many pets as they have
# and update the turn message
total = self.pets * card.amount
self.cash += total
await game.add_action(
"{} {} collected ${:0,}{}!".format(
GET_MONEY,
self.get_name(), card.amount,
" for each pet for a total of ${:0,}".format(total) if total != card.amount else ""
)
)
# Check if the player is collecting money from each player
elif card.action == "collect_from_each":
# Take money from each player that is not this player
# and give it to this player
total = (len(game.players) - 1) * card.amount
for player in game.players:
if player.id != self.id:
player.cash -= card.amount
# Update the turn message
await game.add_action(
"{} {} collected ${:0,}{}!".format(
GET_MONEY,
self.get_name(), card.amount,
" from everyone for a total of ${:0,}".format(total) if total != card.amount else ""
)
)
# Check if the player is paying money
elif card.action == "pay":
# Take money from the player and update the turn message
self.cash -= card.amount
await game.add_action(
"{} {} had to pay the bank ${:0,}!".format(
PAY_MONEY,
self.get_name(), card.amount
)
)
# Check if the player is paying money for each pet
elif card.action == "pay_for_each":
# Take money from the player for each pet they have
# and update the turn message
total = self.pets * card.amount
self.cash -= total
await game.add_action(
"{} {} had to pay the bank ${:0,}{}!".format(
PAY_MONEY,
self.get_name(), card.amount,
" for each pet for a total of ${:0,}".format(total) if total != card.amount else ""
)
)
# Check if the player is competing against another player
elif card.action == "compete":
# Have two players compete against each other and give the
# winning player the amount on this card
# then update the turn message
winner, _ = await game.compete(self)
winner.cash += card.amount
await game.add_action(
"{} {} collected ${:0,} for spinning higher!".format(
GET_MONEY,
self.get_name(), card.amount
)
)
async def process_stop_space(self, game, board_space):
"""Processes the stop space when this player lands on it
:param game: The game object that this player is connected to
:param board_space: The board space object where this player is current occupying
"""
# Check if the player is graduating, ask them to choose a career
if board_space.type == "graduation":
self.is_college = True
self.career = await self.ask_for_career(game)
await game.add_action(
"{} {} graduated and chose a career!\n{}".format(
GRADUATION,
self.get_name(), str(self.career)
)
)
# Check if the player is getting married
elif board_space.type == "married":
# Send a message saying the player got married
await game.add_action(
"{} {} got married!\n{}, spin for gifts from everyone!".format(
MARRIED, self.get_name(),
GIFTS, self.get_name()
)
)
# Ask the player to spin for gifs from everyone (apart from this player)
# and have each player give the gift amount
# depending on the color
is_black = await self.ask_for_spin(game, is_color = True)
amount = 100000 if is_black else 50000
total = amount * (len(game.players) - 1)
for player in filter(lambda player: player.id != self.id, game.players):
player.cash -= amount
self.cash += total
# Update the turn message saying how much money the player got in total
await game.add_action(
"{} {} collected ${:0,}{}!".format(
GET_MONEY,
self.get_name(), amount,
" from everyone for a total of ${:0,}".format(
total
) if total != amount else ""
)
)
# Check if the player is spinning for babies
elif board_space.type == "spin_for_babies":
# Update the turn message saying the player is spinning for babies
await game.add_action(
"{} {} is spinning to see if they have any more babies!".format(
BABY, self.get_name()
)
)
# Ask the player to spin to see how many babies they get
# and update the turn message
value = await self.ask_for_spin(game)
self.babies += board_space.spin[str(value)]
await game.add_action(
"{} {} had {} bab{}!".format(
BABY,
self.get_name(), board_space.spin[str(value)],
"ies" if board_space.spin[str(value)] != 1 else "y"
)
)
# Check if the player is deciding on night school
elif board_space.type == "night_school":
# Check if the player is an AI
# sleep for 2 seconds to simulate a decision
night_school = False
if self.is_ai:
night_school = randint(1, 10) % 2 == 0
await sleep(2)
# The player is a real person, let them decide
else:
# Send the message asking the player to decide
night_school = await self.ask_for_split_path(
game,
title = "Night School?",
description = (
"""
If you want to go to Night School, react with {}.
If you don't want to go, react with {}.
"""
),
true_path = GRADUATION, false_path = SPIN
)
# Make sure the player moves in the correct direction
self.move_modify = night_school
# Check if the player wants to go to night school
if night_school:
self.is_college = True
self.cash -= 100000
await game.add_action(
"{} {} had to pay ${:0,} to go to Night School!".format(
PAY_MONEY,
self.get_name(), 100000
)
)
# Ask for a new career
self.career = await self.ask_for_career(game, new_career = True)
# The player does not want to go to night school
else:
await game.add_action(
"{} {} chose not to go to Night School!".format(
ACTION, self.get_name()
)
)
# Check if the player is deciding on the family path
elif board_space.type == "family_path":
# Check if this player is an AI, sleep for 2 seconds to simulate a decision
family_path = False
if self.is_ai:
family_path = randint(1, 10) % 2 == 0
await sleep(2)
# This player is a real player, let them decide
else:
# Send the message asking the player to decide
family_path = await self.ask_for_split_path(
game,
title = "Family Path?",
description = (
"""
If you want to go down the Family Path, react with {}.
If you don't want to, react with {}.
"""
),
true_path = FAMILY, false_path = SPIN
)
# Update the turn message about the player's decision
self.move_modify = family_path
await game.add_action(
"{} {} is{} going down the Family Path!".format(
FAMILY if family_path else ACTION,
self.get_name(),
"" if family_path else " not"
)
)
# Check if the player is deciding on risky road
elif board_space.type == "risky_road":
# Check if this player is an AI, sleep for 2 seconds to simulate a decision
risky_road = False
if self.is_ai:
risky_road = randint(1, 10) % 2 == 0
await sleep(2)
# This player is a real player, let them decide
else:
# Send the message asking the player to decide
risky_road = await self.ask_for_split_path(
game,
title = "Risky Road?",
description = (
"""
If you want to go down the Risky Road, react with {}.
If you don't want to, react with {}.
"""
),
true_path = RISKY_ROAD, false_path = SPIN
)
# Update the turn message about the player's decision
self.move_modify = risky_road
await game.add_action(
"{} {} is{} going down the Risky Road!".format(
RISKY_ROAD if risky_road else ACTION,
self.get_name(),
"" if risky_road else " not"
)
)
# Check if the player is retiring
elif board_space.type == "retirement":
# Give the player their retirement money
# have the player retire
# and update the turn message
amount = 100000 * (5 - len(game.get_retired()))
self.cash += amount
self.is_retired = True
await game.add_action(
"{} {} has retired and collected ${:0,}".format(
RETIRED,
self.get_name(), amount
)
)
# The player takes another turn if the space is not the
# retirement space
if board_space.type != "retirement":
self.extra_turn = True
async def process_baby_space(self, game, board_space):
"""Processes the baby space when this player lands on it
:param game: The game object that this player is connected to
:param board_space: The board space object where this player is current occupying
"""
# Determine what action the baby space is
if board_space.type == "baby":
description = "{} {} had a baby!"
self.babies += 1
elif board_space.type == "twins":
description = "{} {} had twins!"
self.babies += 2
elif board_space.type == "triplets":
description = "{} {} had triplets!"
self.babies += 3
# Update the turn message
await game.add_action(description.format(BABY, self.get_name()))
async def ask_for_split_path(self, game, *, title = None, description = None, true_path = None, false_path = None):
"""Asks the player to decide on a split path
:param game: The game object that this player is connected to
:param title: The title of the embed to send
:param description: A formatted description of the embed to send
Note that this description must include format braces to take into account
the true_path and false_path emojis
:param true_path: An emoji for the player to go towards the new split path
:param false_path: An emoji for the player to stay on the same path
:rtype: bool
"""
# Send a message asking the player if they want to go down the new path or stay on
# the current path
message = await game.ctx.send(
embed = Embed(
title = title,
description = description.format(true_path, false_path),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(true_path)
await message.add_reaction(false_path)
# Wait for the user to react with their choice and delete the message
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.id and
str(reaction) in [true_path, false_path]
))
await message.delete()
return str(reaction) == true_path
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Other Methods
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def next_space(self, game, number):
"""Moves the player as many moves as specfied by number
:param game: The game object that this player is connected to
:param number: The amount of spaces to move the player
"""
# Keep track of how many moves have been made,
# how many paydays were passed
# and the board of the game
moves = 0
paydays = 0
board = game.board
current = self.space
while True:
# Check if the player moves in a specific way
# For example, if the player comes to a stop sign
# and there are two different paths that can be taken,
# the player will have decided on which path to take
if board[current].next_true != None:
if self.move_modify:
current = board[current].next_true
else:
current = board[current].next_false
# The player moves normally
else:
current = board[current].next
# Check if the space reached is a stop sign or all the moves have been made
moves += 1
if board[current].stop or moves == number:
# Check if the current space is a payday
# give the player a bonus payday
if board[current].type == "payday":
paydays += 1
break
# The player passed a payday
if board[current].type == "payday":
paydays += 1
# Add the player's payday to their cash
for payday in range(paydays):
self.cash += self.career.salary
# Return a JSON object describing the player's current board state
board[current].paydays_passed = paydays
board[current].current = current
return board[current]
def get_name(self):
"""Returns the name of this Player.
If the player is a discord.Member object, the name
will be their username + discriminator
:rtype: str
"""
if self.is_ai:
return self.member
return str(self.member)
def give_payday(self, *, paydays_passed = 1):
"""Gives the player a payday from their career
:param paydays_passed: The amount of paydays to give to the player
"""
self.cash += self.career.salary * paydays_passed | 38.122677 | 144 | 0.505958 | from asyncio import sleep
from discord import Embed
from math import ceil
from random import randint, choice
from cogs.globals import PRIMARY_EMBED_COLOR, NUMBER_EMOJIS, LEAVE
from cogs.game.minigames.base_game.player import Player
from cogs.game.minigames.game_of_life.functions import choose_house
from cogs.game.minigames.game_of_life.variables import (
MARRIED, GRADUATION, BRIEFCASE, SPIN, BABY, FAMILY, RISKY_ROAD, RETIRED, GIFTS,
BUY_HOUSE, SELL_HOUSE, DO_NOTHING, HOUSE, LOANS,
PAYDAY, GET_MONEY, PAY_MONEY, ACTION, PAYDAY_BONUS
)
from util.functions import get_embed_color
PAY_MONEY,
self.get_name(), board_space.amount
)
)
elif board_space.type == "get_money":
self.cash += board_space.amount
await game.add_action(
"{} {} gets paid ${:0,}!".format(
GET_MONEY,
self.get_name(), board_space.amount
)
)
# Check if the player landed on a payday space
elif board_space.type == "payday":
self.cash += 100000
await game.add_action(
"{} {} landed on a payday and got a $100,000 bonus!".format(
PAYDAY_BONUS, self.get_name()
)
)
# Sleep for 3 seconds so everyone can read what happened
await sleep(3)
return False
async def ask_for_spin(self, game, *, is_color = False, allow_leave = False):
# Check if the player is an AI, simulate waiting to spin
if self.is_ai:
await sleep(2)
# The player is a real person, wait for their reaction to spin
else:
# Send the message and add the valid reactions
message = await game.ctx.send(
embed = Embed(
title = "Spin!",
description = "{}, react with {} when you're ready to spin.{}".format(
self.get_name(), SPIN,
"\nIf you'd like to leave, react with {}".format(
LEAVE
) if allow_leave else ""
),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(SPIN)
if allow_leave:
await message.add_reaction(LEAVE)
# Wait for the user's reaction
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in (
[SPIN, LEAVE] if allow_leave else [SPIN]
)
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
await message.delete()
if str(reaction) == LEAVE:
return LEAVE
number = None
for value in range(randint(1, 10)):
number = randint(1, 10)
is_black = number % 2 == 0
if is_color:
return is_black
return number
async def ask_for_career(self, game, *, new_career = False):
career_cards = game.career_cards if not self.is_college else game.college_career_cards
if not new_career:
career_one = career_cards.pop(randint(0, len(career_cards) - 1))
career_two = career_cards.pop(randint(0, len(career_cards) - 1))
else:
career_one = self.career
career_two = career_cards.pop(randint(0, len(career_cards) - 1))
if self.is_ai:
await sleep(2)
return career_one if randint(1, 10) % 2 == 0 else career_two
else:
await game.ctx.send(
embed = Embed(
title = "Choose a Career!",
description = "Check your DMs for your career choices!",
colour = await get_embed_color(self.member)
),
delete_after = 5
)
message = await self.member.send(
embed = Embed(
title = "Choose a Career!",
description = "_ _",
colour = await get_embed_color(self.member)
).add_field(
name = NUMBER_EMOJIS[0],
value = str(career_one),
inline = False
).add_field(
name = NUMBER_EMOJIS[1],
value = str(career_two),
inline = False
)
)
await message.add_reaction(NUMBER_EMOJIS[0])
await message.add_reaction(NUMBER_EMOJIS[1])
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in NUMBER_EMOJIS[ : 2]
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
if str(reaction) == NUMBER_EMOJIS[0]:
return career_one
return career_two
async def ask_for_house(self, game, *, sell_house = False, house = None):
action = None if not sell_house else "sell"
if not sell_house:
if self.is_ai:
actions = ["buy", "sell", "nothing"]
action = choice(actions)
while action == "sell" and len(self.house_cards) == 0:
action = choice(actions)
await sleep(2)
else:
message = await game.ctx.send(
embed = Embed(
title = "Buy, Sell, or do nothing?",
description = "{}{}{}".format(
"If you want to buy a house, react with {}\n".format(BUY_HOUSE),
"If you want to sell a house, react with {}\n".format(SELL_HOUSE) if len(self.house_cards) > 0 else "",
"If you want to do nothing, react with {}".format(DO_NOTHING)
),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(BUY_HOUSE)
if len(self.house_cards) > 0:
await message.add_reaction(SELL_HOUSE)
await message.add_reaction(DO_NOTHING)
def check_reaction(reaction, user):
return (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in (
[BUY_HOUSE, SELL_HOUSE, DO_NOTHING]
if len(self.house_cards) > 0 else
[BUY_HOUSE, DO_NOTHING]
)
)
reaction, user = await game.bot.wait_for("reaction_add", check = check_reaction)
if str(reaction) == BUY_HOUSE:
action = "buy"
elif str(reaction) == SELL_HOUSE:
action = "sell"
else:
action = "nothing"
await message.delete()
if action == "buy":
take_loans = False
chosen_house = None
house_cards = game.house_cards
house_one = house_cards.pop(0)
house_two = house_cards.pop(0)
if self.is_ai:
chosen_house = choose_house(self, house_one = house_one, house_two = house_two)
take_loans = chosen_house != None
else:
message = await self.member.send(
embed = Embed(
title = "Choose a house!",
description = "_ _",
colour = await get_embed_color(self.member)
).add_field(
name = NUMBER_EMOJIS[0],
value = str(house_one),
inline = False
).add_field(
name = NUMBER_EMOJIS[1],
value = str(house_two),
inline = False
)
)
await message.add_reaction(NUMBER_EMOJIS[0])
await message.add_reaction(NUMBER_EMOJIS[1])
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in NUMBER_EMOJIS[ : 2]
))
if str(reaction) == NUMBER_EMOJIS[0]:
chosen_house = house_one
elif str(reaction) == NUMBER_EMOJIS[1]:
chosen_house = house_two
await message.delete()
if chosen_house.purchase <= self.cash:
if str(reaction) == NUMBER_EMOJIS[0]:
house_cards.append(house_two)
else:
house_cards.append(house_one)
take_loans = True
else:
take_loans = await self.ask_for_split_path(
game,
title = "Loans Needed",
description = (
"""
In order to buy that house, you need to take out some loans.
If you want to take loans out, react with {}.
If you want to cancel buying the house, react with {}.
"""
),
true_path = LOANS, false_path = LEAVE
)
if take_loans:
loans_needed = ceil((chosen_house.purchase - self.cash) / 50000)
if loans_needed > 0:
self.loans += loans_needed
self.cash += 50000 * loans_needed
self.cash -= chosen_house.purchase
self.house_cards.append(chosen_house)
await game.add_action(
"{} {} bought a new house{}!\n{}".format(
HOUSE, self.get_name(),
" after taking out some loans" if loans_needed > 0 else "",
str(chosen_house)
)
)
else:
house_cards.append(house_one)
house_cards.append(house_two)
await game.add_action(
"{} {} did not want to take any loans! They will not buy the house.".format(
ACTION, self.get_name()
)
)
elif action == "sell":
await game.add_action(
"{} {} is selling a house!".format(
HOUSE, self.get_name()
)
)
chosen_house = house
if not sell_house:
if self.is_ai:
chosen_house = choose_house(self, buy = False)
self.house_cards.remove(chosen_house)
else:
embed = Embed(
title = "Choose a house to sell!",
description = "_ _",
colour = await get_embed_color(self.member)
)
for index in range(len(self.house_cards)):
house = self.house_cards[index]
embed.add_field(
name = NUMBER_EMOJIS[index],
value = str(house),
inline = False
)
message = await self.member.send(embed = embed)
for emoji in NUMBER_EMOJIS[ : len(self.house_cards)]:
await message.add_reaction(emoji)
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.member.id and
str(reaction) in NUMBER_EMOJIS[ : len(self.house_cards)]
))
await message.delete()
chosen_house = self.house_cards.pop(NUMBER_EMOJIS.index(str(reaction)))
await game.add_action(
"{} {} is selling the following house:\n{}".format(
HOUSE, self.get_name(), str(chosen_house)
)
)
is_black = await self.ask_for_spin(game, is_color = True)
amount = chosen_house.spin_black if is_black else chosen_house.spin_red
self.cash += amount
await game.add_action(
"{} {} sold their house for ${:0,}".format(
GET_MONEY, self.get_name(), amount
)
)
return amount
else:
await game.add_action(
"{} {} chose not to buy nor sell a house.".format(
ACTION,
self.get_name()
)
)
async def ask_for_opponent(self, game, *, is_lawsuit = False):
if self.is_ai:
opponent = max(game.players, key = lambda player: (
player.career.salary
if (player.career and player.id != self.id)
else 0
))
while opponent.id == self.id:
opponent = choice(game.players)
await sleep(2)
else:
embed = Embed(
title = "Choose an opponent!",
description = "_ _",
colour = await get_embed_color(self.member)
)
opponents = list(filter(lambda player: self.id != player.id, game.players))
for index in range(len(opponents)):
embed.add_field(
name = NUMBER_EMOJIS[index],
value = opponents[index].get_name(),
inline = False
)
# number emoji reactions for this player to react to
# in order to choose an opponent
message = await game.ctx.send(embed = embed)
for emoji in NUMBER_EMOJIS[ : len(opponents)]:
await message.add_reaction(emoji)
# Wait for the player to react with which opponent they want to choose
# and then delete the message
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.id and
str(reaction) in NUMBER_EMOJIS[ : len(opponents)]
))
await message.delete()
# Get the opponent that they chose
opponent = opponents[NUMBER_EMOJIS.index(str(reaction))]
# If this is not for a lawsuit, update the turn message in the game
if not is_lawsuit:
await game.add_action(
"{} {} chose {}!".format(
ACTION,
self.get_name(), opponent.get_name()
)
)
return opponent
async def ask_for_spot(self, game, message, spots, *, choose_from = 1):
# Let the player decide on how many spots to take
for spot_choice in range(choose_from):
# Check if this player is an AI, have them choose a random spot after
# sleeping for 2 seconds to simulate a decision
if self.is_ai:
await sleep(2)
spot = choice(NUMBER_EMOJIS)
while str(spot) in spots:
spot = choice(NUMBER_EMOJIS)
# The player is a real person, have them decide on a spot as long as it's not taken yet
else:
embed = Embed(
title = "Spin to Win!",
description = "{}, choose your spot{}!".format(
self.get_name(), "s" if choose_from - spot_choice > 1 else ""
),
colour = PRIMARY_EMBED_COLOR if game.get_current_player().is_ai else await get_embed_color(game.get_current_player().member)
).add_field(
name = "Spots Taken",
value = "\n".join([
"{} - {}".format(
str(spot), spots[spot].get_name()
)
for spot in spots
]) if len(spots) > 0 else "None Taken Yet!",
inline = False
)
await message.edit(embed = embed)
for emoji in NUMBER_EMOJIS:
if str(emoji) not in spots:
await message.add_reaction(emoji)
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.id and
str(reaction) in NUMBER_EMOJIS and
str(reaction) not in spots
))
await message.clear_reactions()
spot = str(reaction)
# Add the player's spot to the spots dictionary
spots[spot] = self
return spots
async def process_pet_space(self, game):
card = game.pet_cards.pop(0)
await game.add_action(str(card))
self.pet_cards += 1
# Check if the player is collecting money
if card.action == "collect":
# Give the player money and update the turn message
self.cash += card.amount
await game.add_action(
"{} {} collected ${:0,}".format(
GET_MONEY,
self.get_name(), card.amount
)
)
# Check if the player is collecting money for each pet
elif card.action == "collect_for_each":
# Give the player money for as many pets as they have
# and update the turn message
total = self.pets * card.amount
self.cash += total
await game.add_action(
"{} {} collected ${:0,}{}!".format(
GET_MONEY,
self.get_name(), card.amount,
" for each pet for a total of ${:0,}".format(total) if total != card.amount else ""
)
)
# Check if the player is collecting money from each player
elif card.action == "collect_from_each":
# Take money from each player that is not this player
# and give it to this player
total = (len(game.players) - 1) * card.amount
for player in game.players:
if player.id != self.id:
player.cash -= card.amount
# Update the turn message
await game.add_action(
"{} {} collected ${:0,}{}!".format(
GET_MONEY,
self.get_name(), card.amount,
" from everyone for a total of ${:0,}".format(total) if total != card.amount else ""
)
)
# Check if the player is paying money
elif card.action == "pay":
# Take money from the player and update the turn message
self.cash -= card.amount
await game.add_action(
"{} {} had to pay the bank ${:0,}!".format(
PAY_MONEY,
self.get_name(), card.amount
)
)
# Check if the player is paying money for each pet
elif card.action == "pay_for_each":
# Take money from the player for each pet they have
# and update the turn message
total = self.pets * card.amount
self.cash -= total
await game.add_action(
"{} {} had to pay the bank ${:0,}{}!".format(
PAY_MONEY,
self.get_name(), card.amount,
" for each pet for a total of ${:0,}".format(total) if total != card.amount else ""
)
)
# Check if the player is competing against another player
elif card.action == "compete":
# Have two players compete against each other and give the
# winning player the amount on this card
# then update the turn message
winner, _ = await game.compete(self)
winner.cash += card.amount
await game.add_action(
"{} {} collected ${:0,} for spinning higher!".format(
GET_MONEY,
self.get_name(), card.amount
)
)
async def process_stop_space(self, game, board_space):
# Check if the player is graduating, ask them to choose a career
if board_space.type == "graduation":
self.is_college = True
self.career = await self.ask_for_career(game)
await game.add_action(
"{} {} graduated and chose a career!\n{}".format(
GRADUATION,
self.get_name(), str(self.career)
)
)
# Check if the player is getting married
elif board_space.type == "married":
# Send a message saying the player got married
await game.add_action(
"{} {} got married!\n{}, spin for gifts from everyone!".format(
MARRIED, self.get_name(),
GIFTS, self.get_name()
)
)
# Ask the player to spin for gifs from everyone (apart from this player)
# and have each player give the gift amount
# depending on the color
is_black = await self.ask_for_spin(game, is_color = True)
amount = 100000 if is_black else 50000
total = amount * (len(game.players) - 1)
for player in filter(lambda player: player.id != self.id, game.players):
player.cash -= amount
self.cash += total
# Update the turn message saying how much money the player got in total
await game.add_action(
"{} {} collected ${:0,}{}!".format(
GET_MONEY,
self.get_name(), amount,
" from everyone for a total of ${:0,}".format(
total
) if total != amount else ""
)
)
# Check if the player is spinning for babies
elif board_space.type == "spin_for_babies":
# Update the turn message saying the player is spinning for babies
await game.add_action(
"{} {} is spinning to see if they have any more babies!".format(
BABY, self.get_name()
)
)
# Ask the player to spin to see how many babies they get
# and update the turn message
value = await self.ask_for_spin(game)
self.babies += board_space.spin[str(value)]
await game.add_action(
"{} {} had {} bab{}!".format(
BABY,
self.get_name(), board_space.spin[str(value)],
"ies" if board_space.spin[str(value)] != 1 else "y"
)
)
# Check if the player is deciding on night school
elif board_space.type == "night_school":
# Check if the player is an AI
# sleep for 2 seconds to simulate a decision
night_school = False
if self.is_ai:
night_school = randint(1, 10) % 2 == 0
await sleep(2)
# The player is a real person, let them decide
else:
# Send the message asking the player to decide
night_school = await self.ask_for_split_path(
game,
title = "Night School?",
description = (
"""
If you want to go to Night School, react with {}.
If you don't want to go, react with {}.
"""
),
true_path = GRADUATION, false_path = SPIN
)
self.move_modify = night_school
if night_school:
self.is_college = True
self.cash -= 100000
await game.add_action(
"{} {} had to pay ${:0,} to go to Night School!".format(
PAY_MONEY,
self.get_name(), 100000
)
)
self.career = await self.ask_for_career(game, new_career = True)
else:
await game.add_action(
"{} {} chose not to go to Night School!".format(
ACTION, self.get_name()
)
)
elif board_space.type == "family_path":
family_path = False
if self.is_ai:
family_path = randint(1, 10) % 2 == 0
await sleep(2)
else:
family_path = await self.ask_for_split_path(
game,
title = "Family Path?",
description = (
"""
If you want to go down the Family Path, react with {}.
If you don't want to, react with {}.
"""
),
true_path = FAMILY, false_path = SPIN
)
# Update the turn message about the player's decision
self.move_modify = family_path
await game.add_action(
"{} {} is{} going down the Family Path!".format(
FAMILY if family_path else ACTION,
self.get_name(),
"" if family_path else " not"
)
)
elif board_space.type == "risky_road":
risky_road = False
if self.is_ai:
risky_road = randint(1, 10) % 2 == 0
await sleep(2)
else:
risky_road = await self.ask_for_split_path(
game,
title = "Risky Road?",
description = (
"""
If you want to go down the Risky Road, react with {}.
If you don't want to, react with {}.
"""
),
true_path = RISKY_ROAD, false_path = SPIN
)
# Update the turn message about the player's decision
self.move_modify = risky_road
await game.add_action(
"{} {} is{} going down the Risky Road!".format(
RISKY_ROAD if risky_road else ACTION,
self.get_name(),
"" if risky_road else " not"
)
)
elif board_space.type == "retirement":
amount = 100000 * (5 - len(game.get_retired()))
self.cash += amount
self.is_retired = True
await game.add_action(
"{} {} has retired and collected ${:0,}".format(
RETIRED,
self.get_name(), amount
)
)
if board_space.type != "retirement":
self.extra_turn = True
async def process_baby_space(self, game, board_space):
if board_space.type == "baby":
description = "{} {} had a baby!"
self.babies += 1
elif board_space.type == "twins":
description = "{} {} had twins!"
self.babies += 2
elif board_space.type == "triplets":
description = "{} {} had triplets!"
self.babies += 3
await game.add_action(description.format(BABY, self.get_name()))
async def ask_for_split_path(self, game, *, title = None, description = None, true_path = None, false_path = None):
message = await game.ctx.send(
embed = Embed(
title = title,
description = description.format(true_path, false_path),
colour = await get_embed_color(self.member)
)
)
await message.add_reaction(true_path)
await message.add_reaction(false_path)
reaction, user = await game.bot.wait_for("reaction_add", check = lambda reaction, user: (
reaction.message.id == message.id and
user.id == self.id and
str(reaction) in [true_path, false_path]
))
await message.delete()
return str(reaction) == true_path
| true | true |
1c45c55d868ffd36fb6e4d51f703e1ffad0a1d37 | 12,262 | py | Python | apps/usuario/view/views_perfil.py | Ajerhy/proyectosigetebr | 5b63f194bbe06adb92d1cdbba93d1e0028b4164f | [
"MIT"
] | 1 | 2020-05-11T13:29:41.000Z | 2020-05-11T13:29:41.000Z | apps/usuario/view/views_perfil.py | Ajerhy/proyectosigetebr | 5b63f194bbe06adb92d1cdbba93d1e0028b4164f | [
"MIT"
] | 11 | 2020-02-12T03:19:44.000Z | 2022-03-12T00:10:31.000Z | apps/usuario/view/views_perfil.py | Ajerhy/proyectosigetebr | 5b63f194bbe06adb92d1cdbba93d1e0028b4164f | [
"MIT"
] | null | null | null | from django.shortcuts import redirect, render
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.views.generic import (CreateView, UpdateView, DetailView, TemplateView, View, DeleteView,ListView)
from django.shortcuts import render, redirect, get_object_or_404
from django.http import (HttpResponseRedirect,JsonResponse, HttpResponse,Http404)
from django.contrib import messages
from django.contrib.auth.hashers import check_password
from django.contrib.auth import authenticate
from django.contrib.auth import login as login_django
from django.contrib.auth import logout as logout_django
from django.contrib.auth.decorators import login_required
from django.contrib.auth import update_session_auth_hash
from apps.usuario.templatetags.utils import get_ip
from django.urls import reverse_lazy, reverse
from django.contrib.auth.decorators import login_required
import json
from apps.usuario.form.forms_perfil import LoginUsuarioPerfilForm,\
PasswordUsuarioPerfilForm,EditarUsuarioPerfilForm,\
PerfilFrom
from django.db.models import Q
from apps.usuario.models import Perfil
from apps.contrato.models import Persona
from apps.contrato.models import Cliente
from apps.terreno.models import Manzano,Lote
#Login
class LoginPerfilView(TemplateView,LoginRequiredMixin):
login_url = 'usuario:index'
template_name = "sigetebr/apps/usuario/index.html"#url
success_url = reverse_lazy("usuario:dashboard")#ur
def get_context_data(self, **kwargs):
context = super(LoginPerfilView, self).get_context_data(**kwargs)
return context
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated:
return HttpResponseRedirect(self.success_url)
return super(LoginPerfilView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
form = LoginUsuarioPerfilForm(request.POST, request=request)
if form.is_valid():
#user = Perfil.objects.filter(usuario=request.POST.get('usuario')).first()
perfil = Perfil.objects.filter(usuario=request.POST.get('usuario')).first()
if perfil is not None:
if perfil.estado:
perfil = authenticate(
usuario=request.POST.get('usuario'),
password=request.POST.get('password'))
if perfil is not None:
login_django(request, perfil)
return redirect('usuario:dashboard')
#return HttpResponseRedirect('usuarios:dashboard')
return render(request, self.template_name, {
"error": True,
"message": "Tu nombre de usuario y contraseña no coinciden. Inténtalo de nuevo."}
)
return render(request, self.template_name, {
"error": True,
"message": "Su cuenta está inactiva. Por favor, póngase en contacto con el administrador"}
)
return render(request, self.template_name, {
"error": True,
"message": "Tu cuenta no se encuentra. Por favor, póngase en contacto con el administrador"}
)
return render(request, self.template_name, {
# "error": True,
# "message": "Tu nombre de Usuario y Contraseña no coinciden. Inténtalo de nuevo."
"form": form
})
#Dashboard
class DashboardView(LoginRequiredMixin,TemplateView):
template_name = 'sigetebr/apps/dashboard.html'
login_url = 'usuario:index'
def get_context_data(self, **kwargs):
context = super(DashboardView, self).get_context_data(**kwargs)
manzanostodo = Manzano.objects.all()
manzanosactiva = Manzano.objects.exclude(estado='False')
context["manzanos"] = manzanostodo
context["manzano_count"] = manzanosactiva
lotestodo = Lote.objects.all()
lotesactiva = Lote.objects.exclude(estado='False')
context["lotes"] = lotestodo
context["lote_count"] = lotesactiva
usuariotodo = Perfil.objects.all()
usuariodmin = Perfil.objects.exclude(is_superuser='True')
usuarioactiva = Perfil.objects.exclude(is_active='True')
context["usuario_count"] = usuarioactiva
context["usuarios"] = usuariotodo
personatodo = Persona.objects.all()
personaactiva = Persona.objects.exclude(estado='False')
context["persona_count"] = personaactiva
context["personas"] = personatodo
clientetodo = Cliente.objects.all()
clienteactiva = Cliente.objects.exclude(estado='False')
context["cliente_count"] = clienteactiva
context["clientes"] = clientetodo
return context
"""
Funciones
"""
#Salir
@login_required(login_url='usuario:index')
def LogoutView(request):
logout_django(request)
return redirect('usuario:index')
#Usuario Perfil Usuario
class UsuarioPerfilDetalleView(LoginRequiredMixin,DetailView):
model = Perfil
template_name = 'sigetebr/apps/usuario/configuracion/perfil_usuario.html' # url
slug_field = 'usuario'#que campo de la base de datos
slug_url_kwarg = 'usuario_url'#que campo de la url
login_url = 'usuarios:index'
#Usuario Perfil Actualizar Usuario
class UsuarioPerfilEditarView(SuccessMessageMixin,LoginRequiredMixin,UpdateView):
model = Perfil
form_class = EditarUsuarioPerfilForm
template_name = 'sigetebr/apps/usuario/configuracion/perfil_form.html' # url
success_url = reverse_lazy('usuarios:perfil_actualizar')
# success_message = "Tu usuario ha sido actualizado"
context_object_name = "user_obj"
login_url = 'usuarios:index'
def form_valid(self, form):
messages.success(self.request, "Tu Perfil Usuario ha sido actualizado")
return super(UsuarioPerfilEditarView, self).form_valid(form)
def get_object(self, queryset=None):
return self.request.user
#Usuario Perfil Actualizar Password Usuario
@login_required(login_url='usuarios:index')
def passwordusuarioview(request):
template_name = 'sigetebr/apps/usuario/configuracion/perfil_password.html' # url
form = PasswordUsuarioPerfilForm(request.POST or None)
if request.method == 'POST':
if form.is_valid():
actual = request.POST.get('password')
nuevo = request.POST.get('password')
confirma =request.POST.get('confimar_password')
print(actual)
print(nuevo)
print(confirma)
if not check_password(request.POST.get('password'), request.user.password):
messages.warning(request, 'Password Actual no coinciden!')
else:
if authenticate(usuario = request.user.usuario,password = request.POST.get('password')):
request.user.set_password(request.POST.get('new_password'))
request.user.save()
update_session_auth_hash(request, request.user)
messages.success(request, 'Password Actualizado!')
#redirect()
else:
messages.error(request, 'Verifique su Password por favor!')
context = {'form': form}
return render(request, template_name, context)
USUARIO_FIELDS = [
{'string': 'N°', 'field': 'numero'},
{'string': 'Usuario', 'field': 'usuario'},
{'string': 'Nombres', 'field': 'nombre'},
{'string': 'Email', 'field': 'email'},
{'string': 'Roles', 'field': 'roles'},
{'string': 'Estado', 'field': 'estado'},
{'string': 'Acciones', 'field': 'acciones'},
]
#class PerfilListarView(LoginRequiredMixin,generic.ListView):
class PerfilListarView(LoginRequiredMixin,TemplateView):
model = Perfil
template_name = "sigetebr/apps/usuario/perfil/listar.html"
#context_object_name = "list_usuario"
login_url = 'usuario:index'
def get_queryset(self):
queryset = self.model.objects.all()
request_post = self.request.POST
print(request_post,"Usuario")
if request_post:
if request_post.get('usuario'):
queryset = queryset.filter(
usuario__icontains=request_post.get('usuario'))
if request_post.get('email'):
queryset = queryset.filter(
email__icontains=request_post.get('email'))
print(queryset, "Resultado")
return queryset
def get_context_data(self, **kwargs):
context = super(PerfilListarView, self).get_context_data(**kwargs)
context["list_perfil"] = self.get_queryset()
context['fields'] = USUARIO_FIELDS
context["per_page"] = self.request.POST.get('per_page')
search = False
if (
self.request.POST.get('usuario') or
self.request.POST.get('email')
):
search = True
context["search"] = search
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
#Perfil Crear
class PerfilCrearView(SuccessMessageMixin,LoginRequiredMixin,CreateView):
model = Perfil
template_name = "sigetebr/apps/usuario/perfil/form.html"
context_object_name = "obj"
form_class = PerfilFrom
success_url = reverse_lazy("usuario:listar_perfil")
success_message = "Perfil de Usuario Creado Exitosamente"
login_url = 'usuario:index'
#Perfil Editar
class PerfilEditarView(SuccessMessageMixin,LoginRequiredMixin,UpdateView):
model = Perfil
template_name = "sigetebr/apps/usuario/perfil/form.html"
context_object_name = "obj_usuario"
form_class = PerfilFrom
success_url = reverse_lazy("usuario:listar_perfil")
success_message = "Perfil de Usuario Actualizada Satisfactoriamente"
login_url = 'usuario:index'
#Perfil Detalle
class PerfilDetallesView(LoginRequiredMixin,DetailView):
model = Perfil
template_name = 'sigetebr/apps/usuario/perfil/detalle.html'#url
slug_field = 'usuario'#que campo de la base de datos
context_object_name = 'obj'
slug_url_kwarg = 'usuario_url'#que campo de la url
login_url = 'usuario:index'
#Perfil Eliminar
class PerfilEliminarView(SuccessMessageMixin,LoginRequiredMixin,DeleteView):
model = Perfil
template_name='sigetebr/apps/usuario/perfil/eliminar.html'
context_object_name='obj'
success_url = reverse_lazy("usuario:listar_perfil")
success_message="Perfil de Usuario Eliminada Exitosamente"
login_url = 'usuario:index'
#Desactivar
@login_required(login_url='usuario:index')
def perfildesactivar(request, id):
perfil = Perfil.objects.filter(pk=id).first()
contexto={}
template_name = 'sigetebr/apps/usuario/perfil/estado_desactivar.html'#url
if not perfil:
return redirect('usuario:listar_perfil')
if request.method=='GET':
contexto={'obj':perfil}
if request.method=='POST':
perfil.estado=False
perfil.save()
return redirect('usuario:listar_perfil')
return render(request,template_name,contexto)
#Activar
@login_required(login_url='usuario:index')
def perfilactivar(request, id):
perfil = Perfil.objects.filter(pk=id).first()
contexto={}
template_name = 'sigetebr/apps/usuario/perfil/estado_activar.html'#url
if not perfil:
return redirect('usuario:listar_perfil')
if request.method=='GET':
contexto={'obj':perfil}
if request.method=='POST':
perfil.estado=True
perfil.save()
return redirect('usuario:listar_perfil')
return render(request,template_name,contexto)
#Estado
@login_required(login_url='usuario:index')
def cambiar_estado_perfil(request, pk):
perfil = get_object_or_404(Perfil, pk=pk)
if perfil.estado:
perfil.estado = False
messages.error(request, "Perfil de Usuario Desactivada")
else:
perfil.estado = True
messages.success(request, "Perfil de Usuario Activada")
perfil.um = request.user.id
perfil.save()
return redirect('usuario:listar_perfil')
| 38.438871 | 110 | 0.676562 | from django.shortcuts import redirect, render
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.views.generic import (CreateView, UpdateView, DetailView, TemplateView, View, DeleteView,ListView)
from django.shortcuts import render, redirect, get_object_or_404
from django.http import (HttpResponseRedirect,JsonResponse, HttpResponse,Http404)
from django.contrib import messages
from django.contrib.auth.hashers import check_password
from django.contrib.auth import authenticate
from django.contrib.auth import login as login_django
from django.contrib.auth import logout as logout_django
from django.contrib.auth.decorators import login_required
from django.contrib.auth import update_session_auth_hash
from apps.usuario.templatetags.utils import get_ip
from django.urls import reverse_lazy, reverse
from django.contrib.auth.decorators import login_required
import json
from apps.usuario.form.forms_perfil import LoginUsuarioPerfilForm,\
PasswordUsuarioPerfilForm,EditarUsuarioPerfilForm,\
PerfilFrom
from django.db.models import Q
from apps.usuario.models import Perfil
from apps.contrato.models import Persona
from apps.contrato.models import Cliente
from apps.terreno.models import Manzano,Lote
class LoginPerfilView(TemplateView,LoginRequiredMixin):
login_url = 'usuario:index'
template_name = "sigetebr/apps/usuario/index.html"
success_url = reverse_lazy("usuario:dashboard")
def get_context_data(self, **kwargs):
context = super(LoginPerfilView, self).get_context_data(**kwargs)
return context
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated:
return HttpResponseRedirect(self.success_url)
return super(LoginPerfilView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
form = LoginUsuarioPerfilForm(request.POST, request=request)
if form.is_valid():
perfil = Perfil.objects.filter(usuario=request.POST.get('usuario')).first()
if perfil is not None:
if perfil.estado:
perfil = authenticate(
usuario=request.POST.get('usuario'),
password=request.POST.get('password'))
if perfil is not None:
login_django(request, perfil)
return redirect('usuario:dashboard')
return render(request, self.template_name, {
"error": True,
"message": "Tu nombre de usuario y contraseña no coinciden. Inténtalo de nuevo."}
)
return render(request, self.template_name, {
"error": True,
"message": "Su cuenta está inactiva. Por favor, póngase en contacto con el administrador"}
)
return render(request, self.template_name, {
"error": True,
"message": "Tu cuenta no se encuentra. Por favor, póngase en contacto con el administrador"}
)
return render(request, self.template_name, {
"form": form
})
class DashboardView(LoginRequiredMixin,TemplateView):
template_name = 'sigetebr/apps/dashboard.html'
login_url = 'usuario:index'
def get_context_data(self, **kwargs):
context = super(DashboardView, self).get_context_data(**kwargs)
manzanostodo = Manzano.objects.all()
manzanosactiva = Manzano.objects.exclude(estado='False')
context["manzanos"] = manzanostodo
context["manzano_count"] = manzanosactiva
lotestodo = Lote.objects.all()
lotesactiva = Lote.objects.exclude(estado='False')
context["lotes"] = lotestodo
context["lote_count"] = lotesactiva
usuariotodo = Perfil.objects.all()
usuariodmin = Perfil.objects.exclude(is_superuser='True')
usuarioactiva = Perfil.objects.exclude(is_active='True')
context["usuario_count"] = usuarioactiva
context["usuarios"] = usuariotodo
personatodo = Persona.objects.all()
personaactiva = Persona.objects.exclude(estado='False')
context["persona_count"] = personaactiva
context["personas"] = personatodo
clientetodo = Cliente.objects.all()
clienteactiva = Cliente.objects.exclude(estado='False')
context["cliente_count"] = clienteactiva
context["clientes"] = clientetodo
return context
@login_required(login_url='usuario:index')
def LogoutView(request):
logout_django(request)
return redirect('usuario:index')
class UsuarioPerfilDetalleView(LoginRequiredMixin,DetailView):
model = Perfil
template_name = 'sigetebr/apps/usuario/configuracion/perfil_usuario.html'
slug_field = 'usuario'
slug_url_kwarg = 'usuario_url'
login_url = 'usuarios:index'
class UsuarioPerfilEditarView(SuccessMessageMixin,LoginRequiredMixin,UpdateView):
model = Perfil
form_class = EditarUsuarioPerfilForm
template_name = 'sigetebr/apps/usuario/configuracion/perfil_form.html'
success_url = reverse_lazy('usuarios:perfil_actualizar')
context_object_name = "user_obj"
login_url = 'usuarios:index'
def form_valid(self, form):
messages.success(self.request, "Tu Perfil Usuario ha sido actualizado")
return super(UsuarioPerfilEditarView, self).form_valid(form)
def get_object(self, queryset=None):
return self.request.user
@login_required(login_url='usuarios:index')
def passwordusuarioview(request):
template_name = 'sigetebr/apps/usuario/configuracion/perfil_password.html'
form = PasswordUsuarioPerfilForm(request.POST or None)
if request.method == 'POST':
if form.is_valid():
actual = request.POST.get('password')
nuevo = request.POST.get('password')
confirma =request.POST.get('confimar_password')
print(actual)
print(nuevo)
print(confirma)
if not check_password(request.POST.get('password'), request.user.password):
messages.warning(request, 'Password Actual no coinciden!')
else:
if authenticate(usuario = request.user.usuario,password = request.POST.get('password')):
request.user.set_password(request.POST.get('new_password'))
request.user.save()
update_session_auth_hash(request, request.user)
messages.success(request, 'Password Actualizado!')
else:
messages.error(request, 'Verifique su Password por favor!')
context = {'form': form}
return render(request, template_name, context)
USUARIO_FIELDS = [
{'string': 'N°', 'field': 'numero'},
{'string': 'Usuario', 'field': 'usuario'},
{'string': 'Nombres', 'field': 'nombre'},
{'string': 'Email', 'field': 'email'},
{'string': 'Roles', 'field': 'roles'},
{'string': 'Estado', 'field': 'estado'},
{'string': 'Acciones', 'field': 'acciones'},
]
class PerfilListarView(LoginRequiredMixin,TemplateView):
model = Perfil
template_name = "sigetebr/apps/usuario/perfil/listar.html"
login_url = 'usuario:index'
def get_queryset(self):
queryset = self.model.objects.all()
request_post = self.request.POST
print(request_post,"Usuario")
if request_post:
if request_post.get('usuario'):
queryset = queryset.filter(
usuario__icontains=request_post.get('usuario'))
if request_post.get('email'):
queryset = queryset.filter(
email__icontains=request_post.get('email'))
print(queryset, "Resultado")
return queryset
def get_context_data(self, **kwargs):
context = super(PerfilListarView, self).get_context_data(**kwargs)
context["list_perfil"] = self.get_queryset()
context['fields'] = USUARIO_FIELDS
context["per_page"] = self.request.POST.get('per_page')
search = False
if (
self.request.POST.get('usuario') or
self.request.POST.get('email')
):
search = True
context["search"] = search
return context
def post(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
class PerfilCrearView(SuccessMessageMixin,LoginRequiredMixin,CreateView):
model = Perfil
template_name = "sigetebr/apps/usuario/perfil/form.html"
context_object_name = "obj"
form_class = PerfilFrom
success_url = reverse_lazy("usuario:listar_perfil")
success_message = "Perfil de Usuario Creado Exitosamente"
login_url = 'usuario:index'
class PerfilEditarView(SuccessMessageMixin,LoginRequiredMixin,UpdateView):
model = Perfil
template_name = "sigetebr/apps/usuario/perfil/form.html"
context_object_name = "obj_usuario"
form_class = PerfilFrom
success_url = reverse_lazy("usuario:listar_perfil")
success_message = "Perfil de Usuario Actualizada Satisfactoriamente"
login_url = 'usuario:index'
class PerfilDetallesView(LoginRequiredMixin,DetailView):
model = Perfil
template_name = 'sigetebr/apps/usuario/perfil/detalle.html'
slug_field = 'usuario'
context_object_name = 'obj'
slug_url_kwarg = 'usuario_url'
login_url = 'usuario:index'
class PerfilEliminarView(SuccessMessageMixin,LoginRequiredMixin,DeleteView):
model = Perfil
template_name='sigetebr/apps/usuario/perfil/eliminar.html'
context_object_name='obj'
success_url = reverse_lazy("usuario:listar_perfil")
success_message="Perfil de Usuario Eliminada Exitosamente"
login_url = 'usuario:index'
@login_required(login_url='usuario:index')
def perfildesactivar(request, id):
perfil = Perfil.objects.filter(pk=id).first()
contexto={}
template_name = 'sigetebr/apps/usuario/perfil/estado_desactivar.html'
if not perfil:
return redirect('usuario:listar_perfil')
if request.method=='GET':
contexto={'obj':perfil}
if request.method=='POST':
perfil.estado=False
perfil.save()
return redirect('usuario:listar_perfil')
return render(request,template_name,contexto)
@login_required(login_url='usuario:index')
def perfilactivar(request, id):
perfil = Perfil.objects.filter(pk=id).first()
contexto={}
template_name = 'sigetebr/apps/usuario/perfil/estado_activar.html'
if not perfil:
return redirect('usuario:listar_perfil')
if request.method=='GET':
contexto={'obj':perfil}
if request.method=='POST':
perfil.estado=True
perfil.save()
return redirect('usuario:listar_perfil')
return render(request,template_name,contexto)
@login_required(login_url='usuario:index')
def cambiar_estado_perfil(request, pk):
perfil = get_object_or_404(Perfil, pk=pk)
if perfil.estado:
perfil.estado = False
messages.error(request, "Perfil de Usuario Desactivada")
else:
perfil.estado = True
messages.success(request, "Perfil de Usuario Activada")
perfil.um = request.user.id
perfil.save()
return redirect('usuario:listar_perfil')
| true | true |
1c45c58302360fe1ea1256f259b08d194601aee0 | 9,269 | py | Python | spookbot.py | carsuki/discord-spookbot | a6bd5b7e80860d7db65f3eb634bab68b9d4c50f1 | [
"BSD-3-Clause"
] | 1 | 2021-10-01T13:44:05.000Z | 2021-10-01T13:44:05.000Z | spookbot.py | carsuki/discord-spookbot | a6bd5b7e80860d7db65f3eb634bab68b9d4c50f1 | [
"BSD-3-Clause"
] | null | null | null | spookbot.py | carsuki/discord-spookbot | a6bd5b7e80860d7db65f3eb634bab68b9d4c50f1 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
import asyncio
import discord
import logging
import json
import random
logger = logging.getLogger('spookbot')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename='spookbot.log', mode='w', encoding='utf-8')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
with open("auth.json") as auth:
auth = json.load(auth)
client = discord.Client()
commands = {}
def command(func):
commands[func.__name__] = func
return func
@client.event
async def on_message(message):
for command in commands.keys():
if message.content.startswith('.' + command):
logger.info('Command `%s` called from message: %s', command, message.content)
await commands[command](message)
# {{{ Commands
@command
async def doot(message):
await message.channel.send(message.author.mention + ' doot doot')
@command
async def funnycatchphrase(message):
messages = ["Doot", "AssFuck", "Shit Bruh"]
await message.channel.send(random.choice(messages))
@command
async def calciumfix(message):
messages = ["https://mbtskoudsalg.com/images/transparent-stuff-vaporwave-3.gif", "https://media.giphy.com/media/3ohhwqrNt7rd9yuj7O/source.gif", "https://tenor.com/view/waiting-skeleton-gif-6159814", "https://tenor.com/view/waiting-gif-9030040", "https://tenor.com/view/iphone-skeleton-gif-5452826", "https://tenor.com/view/skeleton-waiting-eating-bored-playing-around-gif-14558363", "https://tenor.com/view/shaking-skeleton-skeletons-gif-4757109", "https://tenor.com/view/skeleton-waiting-keyboard-bored-life-gif-14558359", "https://tenor.com/view/skeleton-tea-gif-10625213", "https://tenor.com/view/skeleton-ruby-swipe-hearts-gif-10625183", "https://media3.giphy.com/media/26BRDDhIt8oiyEjS0/source.gif", "https://media3.giphy.com/media/3o7TKpmHsAZiTTekve/source.gif", "https://media0.giphy.com/media/3o7TKqNtiUdqSfB6EM/source.gif", "https://media0.giphy.com/media/l3fQ6Fh6Ze3rMXn4A/source.gif", "https://media2.giphy.com/media/3o7TKJNbIxU09eccuI/source.gif", "https://media0.giphy.com/media/26BRxmqeqsRPBBOpy/source.gif", "https://media1.giphy.com/media/l46CpUQRyLgvVhvfW/source.gif", "https://media3.giphy.com/media/26BRCc2VNkdZ5tjvG/source.gif", "https://media2.giphy.com/media/l0MYzSbsaFfUZ1DTa/source.gif", "https://format-magazine-production-res.cloudinary.com/image/upload/c_limit,w_540,h_540,f_gif,f_auto/jjjjjohn-skeleton-4", "https://i.gifer.com/WYLS.gif", "https://media0.giphy.com/media/3owyp71e0oJZg3RQsw/source.gif", "https://www.google.com/url?sa=i&rct=j&q=&esrc=s&source=images&cd=&cad=rja&uact=8&ved=2ahUKEwi3o7_j3vvkAhXYGrkGHSQ1AKwQjRx6BAgBEAQ&url=%2Furl%3Fsa%3Di%26rct%3Dj%26q%3D%26esrc%3Ds%26source%3Dimages%26cd%3D%26ved%3D%26url%3Dhttps%253A%252F%252Fgiphy.com%252Fstickers%252Fskeleton-cds-making-it-rain-l0MYPqg7VQWLK9yWQ%26psig%3DAOvVaw2xm1MH0j04_SUAtE8P-Zw6%26ust%3D1570042508187445&psig=AOvVaw2xm1MH0j04_SUAtE8P-Zw6&ust=1570042508187445", "https://upload-assets.vice.com/files/2016/08/02/1470174545JohnKarel7.gif", "https://i.pinimg.com/originals/09/3e/4b/093e4b66b0a3d888db0184f4dc119204.gif", "https://media2.giphy.com/media/xTiTnpT1zjQ8msSQbS/source.gif", "https://i.pinimg.com/originals/c6/db/a2/c6dba2e9a5b48db157c6a2fea4a8b692.gif", "https://media3.giphy.com/media/26gJAECj4uH3zpjAQ/200w.gif", "https://pa1.narvii.com/6991/847644c6226d4e577370be3d4ac6c09b7159ac53r1-540-540_hq.gif", "http://artfcity.com/wp-content/uploads/2016/09/tumblr_o3lj2ehrmY1qza1qzo1_500.gif", "https://i.gifer.com/PaE.gif", "https://media2.giphy.com/media/l0MYR7ATNClP1GjcI/source.gif", "https://i.pinimg.com/originals/cd/e4/e2/cde4e242d5c3ace213a72d33cea9b16e.gif", "https://media1.giphy.com/media/3o6ZtaV6slZPhE0ftu/source.gif", "https://format-magazine-production-res.cloudinary.com/image/upload/c_limit,w_540,h_540,f_gif,f_auto/jjjjjohn-skeleton-9", "https://66.media.tumblr.com/d30560fbc829bcb17b9fd92844088487/tumblr_naes2zz8im1qza1qzo1_500.gif", "https://pa1.narvii.com/6991/533e5f5d561d5c58aff06092fbf12e6fdfb52ecar1-540-540_hq.gif", "https://upload-assets.vice.com/files/2016/08/02/1470174546JohnKarel10.gif", "https://i.kym-cdn.com/photos/images/original/001/186/745/526.gif", "https://media3.giphy.com/media/3o6ZtmOg5coyOIc3OU/source.gif", "https://upload-assets.vice.com/files/2016/08/02/1470174545JohnKarel6.gif", "https://i.pinimg.com/originals/b6/85/99/b6859978fb0af8249b58a52f4755647b.gif", "https://i.kym-cdn.com/photos/images/original/001/178/761/062.gif", "https://format-magazine-production-res.cloudinary.com/image/upload/c_limit,w_540,h_540,f_gif,f_auto/jjjjjohn-skeleton-6", "https://upload-assets.vice.com/files/2016/08/02/1470174545JohnKarel4.gif", "https://78.media.tumblr.com/a7411f14760a4d7978e735d55ed438a6/tumblr_nvdixx3Jxs1qac28vo1_r2_500.gif", "https://cdn.shopify.com/s/files/1/2128/8929/files/skeleton_pizzabites.gif?v=1559674098", "https://upload-assets.vice.com/files/2016/08/02/1470174546JohnKarel8.gif", "https://pa1.narvii.com/6991/1a26e49708a6234f5e34b495b0744ea2564a0623r1-540-540_hq.gif", "https://szx3iab.files.wordpress.com/2016/11/tumblr_n995vvy5dv1qza1qzo1_500.gif?w=352", "https://upload-assets.vice.com/files/2016/08/02/1470174544JohnKarel5.gif", "https://i.pinimg.com/originals/e7/1a/fb/e71afbdcda22ae75f71ddd438074504e.gif", "https://i.pinimg.com/originals/32/80/6f/32806fc20098726a64c8ff3021f80845.gif", "https://66.media.tumblr.com/e1dc70d9af348d26a9e9bae24fea7def/tumblr_p62tyfOY9a1qza1qzo1_540.gifv", "https://szx3iab.files.wordpress.com/2016/11/tumblr_nbvddjrphx1qza1qzo1_r1_500.gif?w=352", "https://i.kym-cdn.com/photos/images/original/001/181/074/55e.gif", "https://steamuserimages-a.akamaihd.net/ugc/922557282351284530/29A2D872D9A199B15B1E5CE6BFD2C49B3CC3A96A/?imw=512&imh=512&ima=fit&impolicy=Letterbox&imcolor=%23000000&letterbox=true", "https://i.pinimg.com/originals/ea/28/e5/ea28e5e9c44c07fa5cee1011a80162cd.gif", "https://66.media.tumblr.com/6a49a3078ea1b35fc676812bd59c7bf8/tumblr_pfov6xxunm1qza1qzo1_540.gif", "https://steamuserimages-a.akamaihd.net/ugc/922557282351282579/CDA6809E7AE096DB1E099E3E478FCB7AB970B2EF/", "https://pa1.narvii.com/6992/c80cbaad5797bcbbeb497db6f97317614959575br1-500-500_hq.gif", "https://66.media.tumblr.com/6fcc55ccbccd8cdad80a4c80eca8298a/tumblr_p9ihvfJ6Bg1qza1qzo1_540.gif", "https://414foto.com/image/289002-full_halloween-magic-gif-by-jjjjjohn-find-share-on-giphy.gif", "https://pa1.narvii.com/6991/145a1099ffab05aeefc24b787f3eaa91d5c245c5r1-540-540_hq.gif", "https://aws1.discourse-cdn.com/woot/original/3X/b/c/bcc6725e388cb4f1ddf0c242ca1f4b50169b912c.gif", "https://img.buzzfeed.com/buzzfeed-static/static/2015-11/18/21/enhanced/webdr03/anigif_original-921-1447900549-1.gif", "https://66.media.tumblr.com/f034b9b7e673fc704a946f845c4775d4/tumblr_nvzf29ynN71qza1qzo1_500.gif", "https://i2.wp.com/www.doperadcool.com/wp-content/uploads/2019/08/jjjjjohn-skeleton-plants-perfect-score.gif?fit=500%2C500&ssl=1", "https://i.pinimg.com/originals/a9/86/cc/a986cc2005b7be0f650f2b92a12a787e.gif", "https://pa1.narvii.com/6991/720fe80da48d6e095924cead73f1ba4da2218789r1-540-540_hq.gif", "https://media0.giphy.com/media/3o7TKWGiPEqIhF0Yrm/source.gif", "https://media.giphy.com/media/y6Xvxvx5Q37LW/giphy.gif", "https://media1.giphy.com/media/3oriNWxJAEYUt59Ego/source.gif", "https://aws1.discourse-cdn.com/woot/original/3X/7/1/71e6d474061c0a43bb671c0e2289fddfb6f81c97.gif", "https://66.media.tumblr.com/62ab226c367aa62d7f13d042486ff083/tumblr_pk23ycWYPE1qza1qzo1_r1_540.gif", "https://media2.giphy.com/media/5t4gL5cVbiN0nSyKkd/source.gif", "https://szx3iab.files.wordpress.com/2016/11/tumblr_nljo30cjit1qza1qzo1_500.gif?w=352", "https://pa1.narvii.com/6991/1823151f01188539eee8c67dfb806241abea6532r1-540-540_hq.gif", "https://media2.giphy.com/media/lJMgI0zIW8Wz49Qc13/source.gif", "https://aws1.discourse-cdn.com/woot/original/3X/b/7/b7c97a5d9f42ddfc429220fcc63a478010bec6d2.gif", "https://img.buzzfeed.com/buzzfeed-static/static/2019-09/23/3/asset/4d8c340c3380/anigif_sub-buzz-6141-1569209960-1.gif?output-quality=auto&output-format=auto&downsize=360:*", "https://aws1.discourse-cdn.com/woot/original/3X/d/b/dbbc1a1f1cbe907160e603da114603116b315252.gif", "https://media3.giphy.com/media/l46CxfeUUs4NV2KJ2/source.gif", "https://i.pinimg.com/originals/e8/e6/c6/e8e6c608e0346ffdcbc20a2344be62bd.gif", "https://i.pinimg.com/originals/ec/ca/89/ecca896e384db32a5b975a7c79741fa3.gif"]
await message.channel.send(random.choice(messages))
@command
async def spookmeter(message):
endswith = message.content.lower().endswith
async def send(url):
await message.channel.send(url)
if endswith('not spooky') or endswith('1'):
await send('https://i.imgur.com/OtHOWy4.gif')
elif endswith('spoopy') or endswith('2'):
await send('https://i.imgur.com/UvoCUa0.gif')
elif endswith('p spoopy') or endswith('3'):
await send('https://i.imgur.com/HmJXXfh.gif')
elif endswith('spooky') or endswith('4'):
await send('https://i.imgur.com/o1aLBqG.gif')
elif endswith('2spooky') or endswith ('5'):
await send('https://i.imgur.com/FToVdJR.gif')
# }}}
print("https://discordapp.com/oauth2/authorize?&client_id=" + auth['clientId'] + "&scope=bot&permissions=0")
client.run(auth['token'])
| 130.549296 | 7,276 | 0.784659 |
import asyncio
import discord
import logging
import json
import random
logger = logging.getLogger('spookbot')
logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename='spookbot.log', mode='w', encoding='utf-8')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
with open("auth.json") as auth:
auth = json.load(auth)
client = discord.Client()
commands = {}
def command(func):
commands[func.__name__] = func
return func
@client.event
async def on_message(message):
for command in commands.keys():
if message.content.startswith('.' + command):
logger.info('Command `%s` called from message: %s', command, message.content)
await commands[command](message)
@command
async def doot(message):
await message.channel.send(message.author.mention + ' doot doot')
@command
async def funnycatchphrase(message):
messages = ["Doot", "AssFuck", "Shit Bruh"]
await message.channel.send(random.choice(messages))
@command
async def calciumfix(message):
messages = ["https://mbtskoudsalg.com/images/transparent-stuff-vaporwave-3.gif", "https://media.giphy.com/media/3ohhwqrNt7rd9yuj7O/source.gif", "https://tenor.com/view/waiting-skeleton-gif-6159814", "https://tenor.com/view/waiting-gif-9030040", "https://tenor.com/view/iphone-skeleton-gif-5452826", "https://tenor.com/view/skeleton-waiting-eating-bored-playing-around-gif-14558363", "https://tenor.com/view/shaking-skeleton-skeletons-gif-4757109", "https://tenor.com/view/skeleton-waiting-keyboard-bored-life-gif-14558359", "https://tenor.com/view/skeleton-tea-gif-10625213", "https://tenor.com/view/skeleton-ruby-swipe-hearts-gif-10625183", "https://media3.giphy.com/media/26BRDDhIt8oiyEjS0/source.gif", "https://media3.giphy.com/media/3o7TKpmHsAZiTTekve/source.gif", "https://media0.giphy.com/media/3o7TKqNtiUdqSfB6EM/source.gif", "https://media0.giphy.com/media/l3fQ6Fh6Ze3rMXn4A/source.gif", "https://media2.giphy.com/media/3o7TKJNbIxU09eccuI/source.gif", "https://media0.giphy.com/media/26BRxmqeqsRPBBOpy/source.gif", "https://media1.giphy.com/media/l46CpUQRyLgvVhvfW/source.gif", "https://media3.giphy.com/media/26BRCc2VNkdZ5tjvG/source.gif", "https://media2.giphy.com/media/l0MYzSbsaFfUZ1DTa/source.gif", "https://format-magazine-production-res.cloudinary.com/image/upload/c_limit,w_540,h_540,f_gif,f_auto/jjjjjohn-skeleton-4", "https://i.gifer.com/WYLS.gif", "https://media0.giphy.com/media/3owyp71e0oJZg3RQsw/source.gif", "https://www.google.com/url?sa=i&rct=j&q=&esrc=s&source=images&cd=&cad=rja&uact=8&ved=2ahUKEwi3o7_j3vvkAhXYGrkGHSQ1AKwQjRx6BAgBEAQ&url=%2Furl%3Fsa%3Di%26rct%3Dj%26q%3D%26esrc%3Ds%26source%3Dimages%26cd%3D%26ved%3D%26url%3Dhttps%253A%252F%252Fgiphy.com%252Fstickers%252Fskeleton-cds-making-it-rain-l0MYPqg7VQWLK9yWQ%26psig%3DAOvVaw2xm1MH0j04_SUAtE8P-Zw6%26ust%3D1570042508187445&psig=AOvVaw2xm1MH0j04_SUAtE8P-Zw6&ust=1570042508187445", "https://upload-assets.vice.com/files/2016/08/02/1470174545JohnKarel7.gif", "https://i.pinimg.com/originals/09/3e/4b/093e4b66b0a3d888db0184f4dc119204.gif", "https://media2.giphy.com/media/xTiTnpT1zjQ8msSQbS/source.gif", "https://i.pinimg.com/originals/c6/db/a2/c6dba2e9a5b48db157c6a2fea4a8b692.gif", "https://media3.giphy.com/media/26gJAECj4uH3zpjAQ/200w.gif", "https://pa1.narvii.com/6991/847644c6226d4e577370be3d4ac6c09b7159ac53r1-540-540_hq.gif", "http://artfcity.com/wp-content/uploads/2016/09/tumblr_o3lj2ehrmY1qza1qzo1_500.gif", "https://i.gifer.com/PaE.gif", "https://media2.giphy.com/media/l0MYR7ATNClP1GjcI/source.gif", "https://i.pinimg.com/originals/cd/e4/e2/cde4e242d5c3ace213a72d33cea9b16e.gif", "https://media1.giphy.com/media/3o6ZtaV6slZPhE0ftu/source.gif", "https://format-magazine-production-res.cloudinary.com/image/upload/c_limit,w_540,h_540,f_gif,f_auto/jjjjjohn-skeleton-9", "https://66.media.tumblr.com/d30560fbc829bcb17b9fd92844088487/tumblr_naes2zz8im1qza1qzo1_500.gif", "https://pa1.narvii.com/6991/533e5f5d561d5c58aff06092fbf12e6fdfb52ecar1-540-540_hq.gif", "https://upload-assets.vice.com/files/2016/08/02/1470174546JohnKarel10.gif", "https://i.kym-cdn.com/photos/images/original/001/186/745/526.gif", "https://media3.giphy.com/media/3o6ZtmOg5coyOIc3OU/source.gif", "https://upload-assets.vice.com/files/2016/08/02/1470174545JohnKarel6.gif", "https://i.pinimg.com/originals/b6/85/99/b6859978fb0af8249b58a52f4755647b.gif", "https://i.kym-cdn.com/photos/images/original/001/178/761/062.gif", "https://format-magazine-production-res.cloudinary.com/image/upload/c_limit,w_540,h_540,f_gif,f_auto/jjjjjohn-skeleton-6", "https://upload-assets.vice.com/files/2016/08/02/1470174545JohnKarel4.gif", "https://78.media.tumblr.com/a7411f14760a4d7978e735d55ed438a6/tumblr_nvdixx3Jxs1qac28vo1_r2_500.gif", "https://cdn.shopify.com/s/files/1/2128/8929/files/skeleton_pizzabites.gif?v=1559674098", "https://upload-assets.vice.com/files/2016/08/02/1470174546JohnKarel8.gif", "https://pa1.narvii.com/6991/1a26e49708a6234f5e34b495b0744ea2564a0623r1-540-540_hq.gif", "https://szx3iab.files.wordpress.com/2016/11/tumblr_n995vvy5dv1qza1qzo1_500.gif?w=352", "https://upload-assets.vice.com/files/2016/08/02/1470174544JohnKarel5.gif", "https://i.pinimg.com/originals/e7/1a/fb/e71afbdcda22ae75f71ddd438074504e.gif", "https://i.pinimg.com/originals/32/80/6f/32806fc20098726a64c8ff3021f80845.gif", "https://66.media.tumblr.com/e1dc70d9af348d26a9e9bae24fea7def/tumblr_p62tyfOY9a1qza1qzo1_540.gifv", "https://szx3iab.files.wordpress.com/2016/11/tumblr_nbvddjrphx1qza1qzo1_r1_500.gif?w=352", "https://i.kym-cdn.com/photos/images/original/001/181/074/55e.gif", "https://steamuserimages-a.akamaihd.net/ugc/922557282351284530/29A2D872D9A199B15B1E5CE6BFD2C49B3CC3A96A/?imw=512&imh=512&ima=fit&impolicy=Letterbox&imcolor=%23000000&letterbox=true", "https://i.pinimg.com/originals/ea/28/e5/ea28e5e9c44c07fa5cee1011a80162cd.gif", "https://66.media.tumblr.com/6a49a3078ea1b35fc676812bd59c7bf8/tumblr_pfov6xxunm1qza1qzo1_540.gif", "https://steamuserimages-a.akamaihd.net/ugc/922557282351282579/CDA6809E7AE096DB1E099E3E478FCB7AB970B2EF/", "https://pa1.narvii.com/6992/c80cbaad5797bcbbeb497db6f97317614959575br1-500-500_hq.gif", "https://66.media.tumblr.com/6fcc55ccbccd8cdad80a4c80eca8298a/tumblr_p9ihvfJ6Bg1qza1qzo1_540.gif", "https://414foto.com/image/289002-full_halloween-magic-gif-by-jjjjjohn-find-share-on-giphy.gif", "https://pa1.narvii.com/6991/145a1099ffab05aeefc24b787f3eaa91d5c245c5r1-540-540_hq.gif", "https://aws1.discourse-cdn.com/woot/original/3X/b/c/bcc6725e388cb4f1ddf0c242ca1f4b50169b912c.gif", "https://img.buzzfeed.com/buzzfeed-static/static/2015-11/18/21/enhanced/webdr03/anigif_original-921-1447900549-1.gif", "https://66.media.tumblr.com/f034b9b7e673fc704a946f845c4775d4/tumblr_nvzf29ynN71qza1qzo1_500.gif", "https://i2.wp.com/www.doperadcool.com/wp-content/uploads/2019/08/jjjjjohn-skeleton-plants-perfect-score.gif?fit=500%2C500&ssl=1", "https://i.pinimg.com/originals/a9/86/cc/a986cc2005b7be0f650f2b92a12a787e.gif", "https://pa1.narvii.com/6991/720fe80da48d6e095924cead73f1ba4da2218789r1-540-540_hq.gif", "https://media0.giphy.com/media/3o7TKWGiPEqIhF0Yrm/source.gif", "https://media.giphy.com/media/y6Xvxvx5Q37LW/giphy.gif", "https://media1.giphy.com/media/3oriNWxJAEYUt59Ego/source.gif", "https://aws1.discourse-cdn.com/woot/original/3X/7/1/71e6d474061c0a43bb671c0e2289fddfb6f81c97.gif", "https://66.media.tumblr.com/62ab226c367aa62d7f13d042486ff083/tumblr_pk23ycWYPE1qza1qzo1_r1_540.gif", "https://media2.giphy.com/media/5t4gL5cVbiN0nSyKkd/source.gif", "https://szx3iab.files.wordpress.com/2016/11/tumblr_nljo30cjit1qza1qzo1_500.gif?w=352", "https://pa1.narvii.com/6991/1823151f01188539eee8c67dfb806241abea6532r1-540-540_hq.gif", "https://media2.giphy.com/media/lJMgI0zIW8Wz49Qc13/source.gif", "https://aws1.discourse-cdn.com/woot/original/3X/b/7/b7c97a5d9f42ddfc429220fcc63a478010bec6d2.gif", "https://img.buzzfeed.com/buzzfeed-static/static/2019-09/23/3/asset/4d8c340c3380/anigif_sub-buzz-6141-1569209960-1.gif?output-quality=auto&output-format=auto&downsize=360:*", "https://aws1.discourse-cdn.com/woot/original/3X/d/b/dbbc1a1f1cbe907160e603da114603116b315252.gif", "https://media3.giphy.com/media/l46CxfeUUs4NV2KJ2/source.gif", "https://i.pinimg.com/originals/e8/e6/c6/e8e6c608e0346ffdcbc20a2344be62bd.gif", "https://i.pinimg.com/originals/ec/ca/89/ecca896e384db32a5b975a7c79741fa3.gif"]
await message.channel.send(random.choice(messages))
@command
async def spookmeter(message):
endswith = message.content.lower().endswith
async def send(url):
await message.channel.send(url)
if endswith('not spooky') or endswith('1'):
await send('https://i.imgur.com/OtHOWy4.gif')
elif endswith('spoopy') or endswith('2'):
await send('https://i.imgur.com/UvoCUa0.gif')
elif endswith('p spoopy') or endswith('3'):
await send('https://i.imgur.com/HmJXXfh.gif')
elif endswith('spooky') or endswith('4'):
await send('https://i.imgur.com/o1aLBqG.gif')
elif endswith('2spooky') or endswith ('5'):
await send('https://i.imgur.com/FToVdJR.gif')
print("https://discordapp.com/oauth2/authorize?&client_id=" + auth['clientId'] + "&scope=bot&permissions=0")
client.run(auth['token'])
| true | true |
1c45c59d5474af1e72f1993635d141aeccc75b6e | 416 | py | Python | products/migrations/0002_auto_20210110_1353.py | ashishkr619/dukaan_main | b236b498b95f62160959b5e84bb642a0be6063b0 | [
"MIT"
] | null | null | null | products/migrations/0002_auto_20210110_1353.py | ashishkr619/dukaan_main | b236b498b95f62160959b5e84bb642a0be6063b0 | [
"MIT"
] | null | null | null | products/migrations/0002_auto_20210110_1353.py | ashishkr619/dukaan_main | b236b498b95f62160959b5e84bb642a0be6063b0 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.17 on 2021-01-10 13:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='product',
name='category',
field=models.CharField(max_length=120, verbose_name='Product Categories'),
),
]
| 21.894737 | 86 | 0.612981 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='product',
name='category',
field=models.CharField(max_length=120, verbose_name='Product Categories'),
),
]
| true | true |
1c45c5a180008bb7c796a6f16b8559ac7395f0c5 | 1,094 | py | Python | test/fx2trt/converters/acc_op/test_relu.py | steffenerickson/pytorch | 0b656c4c69ce77ecd9aace486e471917e4660746 | [
"Intel"
] | 1 | 2022-01-31T14:15:35.000Z | 2022-01-31T14:15:35.000Z | test/fx2trt/converters/acc_op/test_relu.py | steffenerickson/pytorch | 0b656c4c69ce77ecd9aace486e471917e4660746 | [
"Intel"
] | 1 | 2022-02-03T12:43:23.000Z | 2022-02-03T12:47:53.000Z | test/fx2trt/converters/acc_op/test_relu.py | steffenerickson/pytorch | 0b656c4c69ce77ecd9aace486e471917e4660746 | [
"Intel"
] | null | null | null | # Owner(s): ["oncall: aiacc"]
import torch
import torch.fx.experimental.fx_acc.acc_ops as acc_ops
import torch.nn as nn
from torch.testing._internal.common_fx2trt import AccTestCase, InputTensorSpec
from torch.testing._internal.common_utils import run_tests
class TestReLUConverter(AccTestCase):
def test_relu(self):
class TestModule(nn.Module):
def forward(self, x):
return nn.functional.relu(x)
inputs = [torch.randn(1, 10)]
self.run_test(TestModule(), inputs, expected_ops={acc_ops.relu})
def test_relu_with_dynamic_shape(self):
class TestModule(nn.Module):
def forward(self, x):
return nn.functional.relu(x)
input_specs = [
InputTensorSpec(
shape=(-1, -1, -1),
dtype=torch.float32,
shape_ranges=[((1, 1, 1), (1, 2, 3), (3, 3, 3))],
),
]
self.run_test_with_dynamic_shape(
TestModule(), input_specs, expected_ops={acc_ops.relu}
)
if __name__ == '__main__':
run_tests()
| 29.567568 | 78 | 0.606947 |
import torch
import torch.fx.experimental.fx_acc.acc_ops as acc_ops
import torch.nn as nn
from torch.testing._internal.common_fx2trt import AccTestCase, InputTensorSpec
from torch.testing._internal.common_utils import run_tests
class TestReLUConverter(AccTestCase):
def test_relu(self):
class TestModule(nn.Module):
def forward(self, x):
return nn.functional.relu(x)
inputs = [torch.randn(1, 10)]
self.run_test(TestModule(), inputs, expected_ops={acc_ops.relu})
def test_relu_with_dynamic_shape(self):
class TestModule(nn.Module):
def forward(self, x):
return nn.functional.relu(x)
input_specs = [
InputTensorSpec(
shape=(-1, -1, -1),
dtype=torch.float32,
shape_ranges=[((1, 1, 1), (1, 2, 3), (3, 3, 3))],
),
]
self.run_test_with_dynamic_shape(
TestModule(), input_specs, expected_ops={acc_ops.relu}
)
if __name__ == '__main__':
run_tests()
| true | true |
1c45c79ba7fba114d9c50c58c0dee7cf69a990c6 | 36,332 | py | Python | scripts/validate_docstrings.py | kpflugshaupt/pandas | c9e3883c630c48b17218e6bcc5593720c1402bf1 | [
"BSD-3-Clause"
] | 80 | 2015-01-01T17:32:11.000Z | 2022-01-24T07:17:47.000Z | scripts/validate_docstrings.py | sanjusci/pandas | a1fee9199eba7ebf423880243936b9f1501d3d3a | [
"BSD-3-Clause"
] | null | null | null | scripts/validate_docstrings.py | sanjusci/pandas | a1fee9199eba7ebf423880243936b9f1501d3d3a | [
"BSD-3-Clause"
] | 28 | 2015-01-30T16:07:48.000Z | 2022-02-11T18:41:13.000Z | #!/usr/bin/env python
"""
Analyze docstrings to detect errors.
If no argument is provided, it does a quick check of docstrings and returns
a csv with all API functions and results of basic checks.
If a function or method is provided in the form "pandas.function",
"pandas.module.class.method", etc. a list of all errors in the docstring for
the specified function or method.
Usage::
$ ./validate_docstrings.py
$ ./validate_docstrings.py pandas.DataFrame.head
"""
import os
import sys
import json
import re
import glob
import functools
import collections
import argparse
import pydoc
import inspect
import importlib
import doctest
import tempfile
import ast
import textwrap
import flake8.main.application
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
# Template backend makes matplotlib to not plot anything. This is useful
# to avoid that plot windows are open from the doctests while running the
# script. Setting here before matplotlib is loaded.
# We don't warn for the number of open plots, as none is actually being opened
os.environ['MPLBACKEND'] = 'Template'
import matplotlib
matplotlib.rc('figure', max_open_warning=10000)
import numpy
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_PATH))
import pandas
from pandas.compat import signature
sys.path.insert(1, os.path.join(BASE_PATH, 'doc', 'sphinxext'))
from numpydoc.docscrape import NumpyDocString
from pandas.io.formats.printing import pprint_thing
PRIVATE_CLASSES = ['NDFrame', 'IndexOpsMixin']
DIRECTIVES = ['versionadded', 'versionchanged', 'deprecated']
ALLOWED_SECTIONS = ['Parameters', 'Attributes', 'Methods', 'Returns', 'Yields',
'Other Parameters', 'Raises', 'Warns', 'See Also', 'Notes',
'References', 'Examples']
ERROR_MSGS = {
'GL01': 'Docstring text (summary) should start in the line immediately '
'after the opening quotes (not in the same line, or leaving a '
'blank line in between)',
'GL02': 'Closing quotes should be placed in the line after the last text '
'in the docstring (do not close the quotes in the same line as '
'the text, or leave a blank line between the last text and the '
'quotes)',
'GL03': 'Double line break found; please use only one blank line to '
'separate sections or paragraphs, and do not leave blank lines '
'at the end of docstrings',
'GL04': 'Private classes ({mentioned_private_classes}) should not be '
'mentioned in public docstrings',
'GL05': 'Tabs found at the start of line "{line_with_tabs}", please use '
'whitespace only',
'GL06': 'Found unknown section "{section}". Allowed sections are: '
'{allowed_sections}',
'GL07': 'Sections are in the wrong order. Correct order is: '
'{correct_sections}',
'GL08': 'The object does not have a docstring',
'GL09': 'Deprecation warning should precede extended summary',
'SS01': 'No summary found (a short summary in a single line should be '
'present at the beginning of the docstring)',
'SS02': 'Summary does not start with a capital letter',
'SS03': 'Summary does not end with a period',
'SS04': 'Summary contains heading whitespaces',
'SS05': 'Summary must start with infinitive verb, not third person '
'(e.g. use "Generate" instead of "Generates")',
'SS06': 'Summary should fit in a single line',
'ES01': 'No extended summary found',
'PR01': 'Parameters {missing_params} not documented',
'PR02': 'Unknown parameters {unknown_params}',
'PR03': 'Wrong parameters order. Actual: {actual_params}. '
'Documented: {documented_params}',
'PR04': 'Parameter "{param_name}" has no type',
'PR05': 'Parameter "{param_name}" type should not finish with "."',
'PR06': 'Parameter "{param_name}" type should use "{right_type}" instead '
'of "{wrong_type}"',
'PR07': 'Parameter "{param_name}" has no description',
'PR08': 'Parameter "{param_name}" description should start with a '
'capital letter',
'PR09': 'Parameter "{param_name}" description should finish with "."',
'PR10': 'Parameter "{param_name}" requires a space before the colon '
'separating the parameter name and type',
'RT01': 'No Returns section found',
'RT02': 'The first line of the Returns section should contain only the '
'type, unless multiple values are being returned',
'RT03': 'Return value has no description',
'RT04': 'Return value description should start with a capital letter',
'RT05': 'Return value description should finish with "."',
'YD01': 'No Yields section found',
'SA01': 'See Also section not found',
'SA02': 'Missing period at end of description for See Also '
'"{reference_name}" reference',
'SA03': 'Description should be capitalized for See Also '
'"{reference_name}" reference',
'SA04': 'Missing description for See Also "{reference_name}" reference',
'SA05': '{reference_name} in `See Also` section does not need `pandas` '
'prefix, use {right_reference} instead.',
'EX01': 'No examples section found',
'EX02': 'Examples do not pass tests:\n{doctest_log}',
'EX03': 'flake8 error: {error_code} {error_message}{times_happening}',
'EX04': 'Do not import {imported_library}, as it is imported '
'automatically for the examples (numpy as np, pandas as pd)',
}
def error(code, **kwargs):
"""
Return a tuple with the error code and the message with variables replaced.
This is syntactic sugar so instead of:
- `('EX02', ERROR_MSGS['EX02'].format(doctest_log=log))`
We can simply use:
- `error('EX02', doctest_log=log)`
Parameters
----------
code : str
Error code.
**kwargs
Values for the variables in the error messages
Returns
-------
code : str
Error code.
message : str
Error message with varaibles replaced.
"""
return (code, ERROR_MSGS[code].format(**kwargs))
def get_api_items(api_doc_fd):
"""
Yield information about all public API items.
Parse api.rst file from the documentation, and extract all the functions,
methods, classes, attributes... This should include all pandas public API.
Parameters
----------
api_doc_fd : file descriptor
A file descriptor of the API documentation page, containing the table
of contents with all the public API.
Yields
------
name : str
The name of the object (e.g. 'pandas.Series.str.upper).
func : function
The object itself. In most cases this will be a function or method,
but it can also be classes, properties, cython objects...
section : str
The name of the section in the API page where the object item is
located.
subsection : str
The name of the subsection in the API page where the object item is
located.
"""
current_module = 'pandas'
previous_line = current_section = current_subsection = ''
position = None
for line in api_doc_fd:
line = line.strip()
if len(line) == len(previous_line):
if set(line) == set('-'):
current_section = previous_line
continue
if set(line) == set('~'):
current_subsection = previous_line
continue
if line.startswith('.. currentmodule::'):
current_module = line.replace('.. currentmodule::', '').strip()
continue
if line == '.. autosummary::':
position = 'autosummary'
continue
if position == 'autosummary':
if line == '':
position = 'items'
continue
if position == 'items':
if line == '':
position = None
continue
item = line.strip()
func = importlib.import_module(current_module)
for part in item.split('.'):
func = getattr(func, part)
yield ('.'.join([current_module, item]), func,
current_section, current_subsection)
previous_line = line
class Docstring(object):
def __init__(self, name):
self.name = name
obj = self._load_obj(name)
self.obj = obj
self.code_obj = self._to_original_callable(obj)
self.raw_doc = obj.__doc__ or ''
self.clean_doc = pydoc.getdoc(obj)
self.doc = NumpyDocString(self.clean_doc)
def __len__(self):
return len(self.raw_doc)
@staticmethod
def _load_obj(name):
"""
Import Python object from its name as string.
Parameters
----------
name : str
Object name to import (e.g. pandas.Series.str.upper)
Returns
-------
object
Python object that can be a class, method, function...
Examples
--------
>>> Docstring._load_obj('pandas.Series')
<class 'pandas.core.series.Series'>
"""
for maxsplit in range(1, name.count('.') + 1):
# TODO when py3 only replace by: module, *func_parts = ...
func_name_split = name.rsplit('.', maxsplit)
module = func_name_split[0]
func_parts = func_name_split[1:]
try:
obj = importlib.import_module(module)
except ImportError:
pass
else:
continue
if 'obj' not in locals():
raise ImportError('No module can be imported '
'from "{}"'.format(name))
for part in func_parts:
obj = getattr(obj, part)
return obj
@staticmethod
def _to_original_callable(obj):
"""
Find the Python object that contains the source code of the object.
This is useful to find the place in the source code (file and line
number) where a docstring is defined. It does not currently work for
all cases, but it should help find some (properties...).
"""
while True:
if inspect.isfunction(obj) or inspect.isclass(obj):
f = inspect.getfile(obj)
if f.startswith('<') and f.endswith('>'):
return None
return obj
if inspect.ismethod(obj):
obj = obj.__func__
elif isinstance(obj, functools.partial):
obj = obj.func
elif isinstance(obj, property):
obj = obj.fget
else:
return None
@property
def type(self):
return type(self.obj).__name__
@property
def is_function_or_method(self):
# TODO(py27): remove ismethod
return (inspect.isfunction(self.obj)
or inspect.ismethod(self.obj))
@property
def source_file_name(self):
"""
File name where the object is implemented (e.g. pandas/core/frame.py).
"""
try:
fname = inspect.getsourcefile(self.code_obj)
except TypeError:
# In some cases the object is something complex like a cython
# object that can't be easily introspected. An it's better to
# return the source code file of the object as None, than crash
pass
else:
if fname:
fname = os.path.relpath(fname, BASE_PATH)
return fname
@property
def source_file_def_line(self):
"""
Number of line where the object is defined in its file.
"""
try:
return inspect.getsourcelines(self.code_obj)[-1]
except (OSError, TypeError):
# In some cases the object is something complex like a cython
# object that can't be easily introspected. An it's better to
# return the line number as None, than crash
pass
@property
def github_url(self):
url = 'https://github.com/pandas-dev/pandas/blob/master/'
url += '{}#L{}'.format(self.source_file_name,
self.source_file_def_line)
return url
@property
def start_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(self.raw_doc.split('\n')):
if row.strip():
break
return i
@property
def end_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(reversed(self.raw_doc.split('\n'))):
if row.strip():
break
return i
@property
def double_blank_lines(self):
prev = True
for row in self.raw_doc.split('\n'):
if not prev and not row.strip():
return True
prev = row.strip()
return False
@property
def section_titles(self):
sections = []
self.doc._doc.reset()
while not self.doc._doc.eof():
content = self.doc._read_to_next_section()
if (len(content) > 1
and len(content[0]) == len(content[1])
and set(content[1]) == {'-'}):
sections.append(content[0])
return sections
@property
def summary(self):
return ' '.join(self.doc['Summary'])
@property
def num_summary_lines(self):
return len(self.doc['Summary'])
@property
def extended_summary(self):
if not self.doc['Extended Summary'] and len(self.doc['Summary']) > 1:
return ' '.join(self.doc['Summary'])
return ' '.join(self.doc['Extended Summary'])
@property
def needs_summary(self):
return not (bool(self.summary) and bool(self.extended_summary))
@property
def doc_parameters(self):
return collections.OrderedDict((name, (type_, ''.join(desc)))
for name, type_, desc
in self.doc['Parameters'])
@property
def signature_parameters(self):
if inspect.isclass(self.obj):
if hasattr(self.obj, '_accessors') and (
self.name.split('.')[-1] in
self.obj._accessors):
# accessor classes have a signature but don't want to show this
return tuple()
try:
sig = signature(self.obj)
except (TypeError, ValueError):
# Some objects, mainly in C extensions do not support introspection
# of the signature
return tuple()
params = sig.args
if sig.varargs:
params.append("*" + sig.varargs)
if sig.keywords:
params.append("**" + sig.keywords)
params = tuple(params)
if params and params[0] in ('self', 'cls'):
return params[1:]
return params
@property
def parameter_mismatches(self):
errs = []
signature_params = self.signature_parameters
doc_params = tuple(self.doc_parameters)
missing = set(signature_params) - set(doc_params)
if missing:
errs.append(error('PR01', missing_params=pprint_thing(missing)))
extra = set(doc_params) - set(signature_params)
if extra:
errs.append(error('PR02', unknown_params=pprint_thing(extra)))
if (not missing and not extra and signature_params != doc_params
and not (not signature_params and not doc_params)):
errs.append(error('PR03',
actual_params=signature_params,
documented_params=doc_params))
return errs
@property
def correct_parameters(self):
return not bool(self.parameter_mismatches)
def parameter_type(self, param):
return self.doc_parameters[param][0]
def parameter_desc(self, param):
desc = self.doc_parameters[param][1]
# Find and strip out any sphinx directives
for directive in DIRECTIVES:
full_directive = '.. {}'.format(directive)
if full_directive in desc:
# Only retain any description before the directive
desc = desc[:desc.index(full_directive)]
return desc
@property
def see_also(self):
return collections.OrderedDict((name, ''.join(desc))
for name, desc, _
in self.doc['See Also'])
@property
def examples(self):
return self.doc['Examples']
@property
def returns(self):
return self.doc['Returns']
@property
def yields(self):
return self.doc['Yields']
@property
def method_source(self):
try:
source = inspect.getsource(self.obj)
except TypeError:
return ''
return textwrap.dedent(source)
@property
def method_returns_something(self):
'''
Check if the docstrings method can return something.
Bare returns, returns valued None and returns from nested functions are
disconsidered.
Returns
-------
bool
Whether the docstrings method can return something.
'''
def get_returns_not_on_nested_functions(node):
returns = [node] if isinstance(node, ast.Return) else []
for child in ast.iter_child_nodes(node):
# Ignore nested functions and its subtrees.
if not isinstance(child, ast.FunctionDef):
child_returns = get_returns_not_on_nested_functions(child)
returns.extend(child_returns)
return returns
tree = ast.parse(self.method_source).body
if tree:
returns = get_returns_not_on_nested_functions(tree[0])
return_values = [r.value for r in returns]
# Replace NameConstant nodes valued None for None.
for i, v in enumerate(return_values):
if isinstance(v, ast.NameConstant) and v.value is None:
return_values[i] = None
return any(return_values)
else:
return False
@property
def first_line_ends_in_dot(self):
if self.doc:
return self.doc.split('\n')[0][-1] == '.'
@property
def deprecated_with_directive(self):
return '.. deprecated:: ' in (self.summary + self.extended_summary)
@property
def deprecated(self):
return (self.name.startswith('pandas.Panel')
or self.deprecated_with_directive)
@property
def mentioned_private_classes(self):
return [klass for klass in PRIVATE_CLASSES if klass in self.raw_doc]
@property
def examples_errors(self):
flags = doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL
finder = doctest.DocTestFinder()
runner = doctest.DocTestRunner(optionflags=flags)
context = {'np': numpy, 'pd': pandas}
error_msgs = ''
for test in finder.find(self.raw_doc, self.name, globs=context):
f = StringIO()
runner.run(test, out=f.write)
error_msgs += f.getvalue()
return error_msgs
@property
def examples_source_code(self):
lines = doctest.DocTestParser().get_examples(self.raw_doc)
return [line.source for line in lines]
def validate_pep8(self):
if not self.examples:
return
# F401 is needed to not generate flake8 errors in examples
# that do not user numpy or pandas
content = ''.join(('import numpy as np # noqa: F401\n',
'import pandas as pd # noqa: F401\n',
*self.examples_source_code))
application = flake8.main.application.Application()
application.initialize(["--quiet"])
with tempfile.NamedTemporaryFile(mode='w') as file:
file.write(content)
file.flush()
application.run_checks([file.name])
# We need this to avoid flake8 printing the names of the files to
# the standard output
application.formatter.write = lambda line, source: None
application.report()
yield from application.guide.stats.statistics_for('')
def get_validation_data(doc):
"""
Validate the docstring.
Parameters
----------
doc : Docstring
A Docstring object with the given function name.
Returns
-------
tuple
errors : list of tuple
Errors occurred during validation.
warnings : list of tuple
Warnings occurred during validation.
examples_errs : str
Examples usage displayed along the error, otherwise empty string.
Notes
-----
The errors codes are defined as:
- First two characters: Section where the error happens:
* GL: Global (no section, like section ordering errors)
* SS: Short summary
* ES: Extended summary
* PR: Parameters
* RT: Returns
* YD: Yields
* RS: Raises
* WN: Warns
* SA: See Also
* NT: Notes
* RF: References
* EX: Examples
- Last two characters: Numeric error code inside the section
For example, EX02 is the second codified error in the Examples section
(which in this case is assigned to examples that do not pass the tests).
The error codes, their corresponding error messages, and the details on how
they are validated, are not documented more than in the source code of this
function.
"""
errs = []
wrns = []
if not doc.raw_doc:
errs.append(error('GL08'))
return errs, wrns, ''
if doc.start_blank_lines != 1:
errs.append(error('GL01'))
if doc.end_blank_lines != 1:
errs.append(error('GL02'))
if doc.double_blank_lines:
errs.append(error('GL03'))
mentioned_errs = doc.mentioned_private_classes
if mentioned_errs:
errs.append(error('GL04',
mentioned_private_classes=', '.join(mentioned_errs)))
for line in doc.raw_doc.splitlines():
if re.match("^ *\t", line):
errs.append(error('GL05', line_with_tabs=line.lstrip()))
unexpected_sections = [section for section in doc.section_titles
if section not in ALLOWED_SECTIONS]
for section in unexpected_sections:
errs.append(error('GL06',
section=section,
allowed_sections=', '.join(ALLOWED_SECTIONS)))
correct_order = [section for section in ALLOWED_SECTIONS
if section in doc.section_titles]
if correct_order != doc.section_titles:
errs.append(error('GL07',
correct_sections=', '.join(correct_order)))
if (doc.deprecated_with_directive
and not doc.extended_summary.startswith('.. deprecated:: ')):
errs.append(error('GL09'))
if not doc.summary:
errs.append(error('SS01'))
else:
if not doc.summary[0].isupper():
errs.append(error('SS02'))
if doc.summary[-1] != '.':
errs.append(error('SS03'))
if doc.summary != doc.summary.lstrip():
errs.append(error('SS04'))
elif (doc.is_function_or_method
and doc.summary.split(' ')[0][-1] == 's'):
errs.append(error('SS05'))
if doc.num_summary_lines > 1:
errs.append(error('SS06'))
if not doc.extended_summary:
wrns.append(('ES01', 'No extended summary found'))
# PR01: Parameters not documented
# PR02: Unknown parameters
# PR03: Wrong parameters order
errs += doc.parameter_mismatches
for param in doc.doc_parameters:
if not param.startswith("*"): # Check can ignore var / kwargs
if not doc.parameter_type(param):
if ':' in param:
errs.append(error('PR10',
param_name=param.split(':')[0]))
else:
errs.append(error('PR04', param_name=param))
else:
if doc.parameter_type(param)[-1] == '.':
errs.append(error('PR05', param_name=param))
common_type_errors = [('integer', 'int'),
('boolean', 'bool'),
('string', 'str')]
for wrong_type, right_type in common_type_errors:
if wrong_type in doc.parameter_type(param):
errs.append(error('PR06',
param_name=param,
right_type=right_type,
wrong_type=wrong_type))
if not doc.parameter_desc(param):
errs.append(error('PR07', param_name=param))
else:
if not doc.parameter_desc(param)[0].isupper():
errs.append(error('PR08', param_name=param))
if doc.parameter_desc(param)[-1] != '.':
errs.append(error('PR09', param_name=param))
if doc.is_function_or_method:
if not doc.returns:
if doc.method_returns_something:
errs.append(error('RT01'))
else:
if len(doc.returns) == 1 and doc.returns[0][1]:
errs.append(error('RT02'))
for name_or_type, type_, desc in doc.returns:
if not desc:
errs.append(error('RT03'))
else:
desc = ' '.join(desc)
if not desc[0].isupper():
errs.append(error('RT04'))
if not desc.endswith('.'):
errs.append(error('RT05'))
if not doc.yields and 'yield' in doc.method_source:
errs.append(error('YD01'))
if not doc.see_also:
wrns.append(error('SA01'))
else:
for rel_name, rel_desc in doc.see_also.items():
if rel_desc:
if not rel_desc.endswith('.'):
errs.append(error('SA02', reference_name=rel_name))
if not rel_desc[0].isupper():
errs.append(error('SA03', reference_name=rel_name))
else:
errs.append(error('SA04', reference_name=rel_name))
if rel_name.startswith('pandas.'):
errs.append(error('SA05',
reference_name=rel_name,
right_reference=rel_name[len('pandas.'):]))
examples_errs = ''
if not doc.examples:
wrns.append(error('EX01'))
else:
examples_errs = doc.examples_errors
if examples_errs:
errs.append(error('EX02', doctest_log=examples_errs))
for err in doc.validate_pep8():
errs.append(error('EX03',
error_code=err.error_code,
error_message=err.message,
times_happening=' ({} times)'.format(err.count)
if err.count > 1 else ''))
examples_source_code = ''.join(doc.examples_source_code)
for wrong_import in ('numpy', 'pandas'):
if 'import {}'.format(wrong_import) in examples_source_code:
errs.append(error('EX04', imported_library=wrong_import))
return errs, wrns, examples_errs
def validate_one(func_name):
"""
Validate the docstring for the given func_name
Parameters
----------
func_name : function
Function whose docstring will be evaluated (e.g. pandas.read_csv).
Returns
-------
dict
A dictionary containing all the information obtained from validating
the docstring.
"""
doc = Docstring(func_name)
errs, wrns, examples_errs = get_validation_data(doc)
return {'type': doc.type,
'docstring': doc.clean_doc,
'deprecated': doc.deprecated,
'file': doc.source_file_name,
'file_line': doc.source_file_def_line,
'github_link': doc.github_url,
'errors': errs,
'warnings': wrns,
'examples_errors': examples_errs}
def validate_all(prefix, ignore_deprecated=False):
"""
Execute the validation of all docstrings, and return a dict with the
results.
Parameters
----------
prefix : str or None
If provided, only the docstrings that start with this pattern will be
validated. If None, all docstrings will be validated.
ignore_deprecated: bool, default False
If True, deprecated objects are ignored when validating docstrings.
Returns
-------
dict
A dictionary with an item for every function/method... containing
all the validation information.
"""
result = {}
seen = {}
# functions from the API docs
api_doc_fnames = os.path.join(
BASE_PATH, 'doc', 'source', 'reference', '*.rst')
api_items = []
for api_doc_fname in glob.glob(api_doc_fnames):
with open(api_doc_fname) as f:
api_items += list(get_api_items(f))
for func_name, func_obj, section, subsection in api_items:
if prefix and not func_name.startswith(prefix):
continue
doc_info = validate_one(func_name)
if ignore_deprecated and doc_info['deprecated']:
continue
result[func_name] = doc_info
shared_code_key = doc_info['file'], doc_info['file_line']
shared_code = seen.get(shared_code_key, '')
result[func_name].update({'in_api': True,
'section': section,
'subsection': subsection,
'shared_code_with': shared_code})
seen[shared_code_key] = func_name
# functions from introspecting Series, DataFrame and Panel
api_item_names = set(list(zip(*api_items))[0])
for class_ in (pandas.Series, pandas.DataFrame, pandas.Panel):
for member in inspect.getmembers(class_):
func_name = 'pandas.{}.{}'.format(class_.__name__, member[0])
if (not member[0].startswith('_')
and func_name not in api_item_names):
if prefix and not func_name.startswith(prefix):
continue
doc_info = validate_one(func_name)
if ignore_deprecated and doc_info['deprecated']:
continue
result[func_name] = doc_info
result[func_name]['in_api'] = False
return result
def main(func_name, prefix, errors, output_format, ignore_deprecated):
def header(title, width=80, char='#'):
full_line = char * width
side_len = (width - len(title) - 2) // 2
adj = '' if len(title) % 2 == 0 else ' '
title_line = '{side} {title}{adj} {side}'.format(side=char * side_len,
title=title,
adj=adj)
return '\n{full_line}\n{title_line}\n{full_line}\n\n'.format(
full_line=full_line, title_line=title_line)
exit_status = 0
if func_name is None:
result = validate_all(prefix, ignore_deprecated)
if output_format == 'json':
output = json.dumps(result)
else:
if output_format == 'default':
output_format = '{text}\n'
elif output_format == 'azure':
output_format = ('##vso[task.logissue type=error;'
'sourcepath={path};'
'linenumber={row};'
'code={code};'
']{text}\n')
else:
raise ValueError('Unknown output_format "{}"'.format(
output_format))
output = ''
for name, res in result.items():
for err_code, err_desc in res['errors']:
# The script would be faster if instead of filtering the
# errors after validating them, it didn't validate them
# initially. But that would complicate the code too much
if errors and err_code not in errors:
continue
exit_status += 1
output += output_format.format(
name=name,
path=res['file'],
row=res['file_line'],
code=err_code,
text='{}: {}'.format(name, err_desc))
sys.stdout.write(output)
else:
result = validate_one(func_name)
sys.stderr.write(header('Docstring ({})'.format(func_name)))
sys.stderr.write('{}\n'.format(result['docstring']))
sys.stderr.write(header('Validation'))
if result['errors']:
sys.stderr.write('{} Errors found:\n'.format(
len(result['errors'])))
for err_code, err_desc in result['errors']:
# Failing examples are printed at the end
if err_code == 'EX02':
sys.stderr.write('\tExamples do not pass tests\n')
continue
sys.stderr.write('\t{}\n'.format(err_desc))
if result['warnings']:
sys.stderr.write('{} Warnings found:\n'.format(
len(result['warnings'])))
for wrn_code, wrn_desc in result['warnings']:
sys.stderr.write('\t{}\n'.format(wrn_desc))
if not result['errors']:
sys.stderr.write('Docstring for "{}" correct. :)\n'.format(
func_name))
if result['examples_errors']:
sys.stderr.write(header('Doctests'))
sys.stderr.write(result['examples_errors'])
return exit_status
if __name__ == '__main__':
format_opts = 'default', 'json', 'azure'
func_help = ('function or method to validate (e.g. pandas.DataFrame.head) '
'if not provided, all docstrings are validated and returned '
'as JSON')
argparser = argparse.ArgumentParser(
description='validate pandas docstrings')
argparser.add_argument('function',
nargs='?',
default=None,
help=func_help)
argparser.add_argument('--format', default='default', choices=format_opts,
help='format of the output when validating '
'multiple docstrings (ignored when validating one).'
'It can be {}'.format(str(format_opts)[1:-1]))
argparser.add_argument('--prefix', default=None, help='pattern for the '
'docstring names, in order to decide which ones '
'will be validated. A prefix "pandas.Series.str.'
'will make the script validate all the docstrings'
'of methods starting by this pattern. It is '
'ignored if parameter function is provided')
argparser.add_argument('--errors', default=None, help='comma separated '
'list of error codes to validate. By default it '
'validates all errors (ignored when validating '
'a single docstring)')
argparser.add_argument('--ignore_deprecated', default=False,
action='store_true', help='if this flag is set, '
'deprecated objects are ignored when validating '
'all docstrings')
args = argparser.parse_args()
sys.exit(main(args.function, args.prefix,
args.errors.split(',') if args.errors else None,
args.format,
args.ignore_deprecated))
| 36.588117 | 79 | 0.572168 |
import os
import sys
import json
import re
import glob
import functools
import collections
import argparse
import pydoc
import inspect
import importlib
import doctest
import tempfile
import ast
import textwrap
import flake8.main.application
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
os.environ['MPLBACKEND'] = 'Template'
import matplotlib
matplotlib.rc('figure', max_open_warning=10000)
import numpy
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_PATH))
import pandas
from pandas.compat import signature
sys.path.insert(1, os.path.join(BASE_PATH, 'doc', 'sphinxext'))
from numpydoc.docscrape import NumpyDocString
from pandas.io.formats.printing import pprint_thing
PRIVATE_CLASSES = ['NDFrame', 'IndexOpsMixin']
DIRECTIVES = ['versionadded', 'versionchanged', 'deprecated']
ALLOWED_SECTIONS = ['Parameters', 'Attributes', 'Methods', 'Returns', 'Yields',
'Other Parameters', 'Raises', 'Warns', 'See Also', 'Notes',
'References', 'Examples']
ERROR_MSGS = {
'GL01': 'Docstring text (summary) should start in the line immediately '
'after the opening quotes (not in the same line, or leaving a '
'blank line in between)',
'GL02': 'Closing quotes should be placed in the line after the last text '
'in the docstring (do not close the quotes in the same line as '
'the text, or leave a blank line between the last text and the '
'quotes)',
'GL03': 'Double line break found; please use only one blank line to '
'separate sections or paragraphs, and do not leave blank lines '
'at the end of docstrings',
'GL04': 'Private classes ({mentioned_private_classes}) should not be '
'mentioned in public docstrings',
'GL05': 'Tabs found at the start of line "{line_with_tabs}", please use '
'whitespace only',
'GL06': 'Found unknown section "{section}". Allowed sections are: '
'{allowed_sections}',
'GL07': 'Sections are in the wrong order. Correct order is: '
'{correct_sections}',
'GL08': 'The object does not have a docstring',
'GL09': 'Deprecation warning should precede extended summary',
'SS01': 'No summary found (a short summary in a single line should be '
'present at the beginning of the docstring)',
'SS02': 'Summary does not start with a capital letter',
'SS03': 'Summary does not end with a period',
'SS04': 'Summary contains heading whitespaces',
'SS05': 'Summary must start with infinitive verb, not third person '
'(e.g. use "Generate" instead of "Generates")',
'SS06': 'Summary should fit in a single line',
'ES01': 'No extended summary found',
'PR01': 'Parameters {missing_params} not documented',
'PR02': 'Unknown parameters {unknown_params}',
'PR03': 'Wrong parameters order. Actual: {actual_params}. '
'Documented: {documented_params}',
'PR04': 'Parameter "{param_name}" has no type',
'PR05': 'Parameter "{param_name}" type should not finish with "."',
'PR06': 'Parameter "{param_name}" type should use "{right_type}" instead '
'of "{wrong_type}"',
'PR07': 'Parameter "{param_name}" has no description',
'PR08': 'Parameter "{param_name}" description should start with a '
'capital letter',
'PR09': 'Parameter "{param_name}" description should finish with "."',
'PR10': 'Parameter "{param_name}" requires a space before the colon '
'separating the parameter name and type',
'RT01': 'No Returns section found',
'RT02': 'The first line of the Returns section should contain only the '
'type, unless multiple values are being returned',
'RT03': 'Return value has no description',
'RT04': 'Return value description should start with a capital letter',
'RT05': 'Return value description should finish with "."',
'YD01': 'No Yields section found',
'SA01': 'See Also section not found',
'SA02': 'Missing period at end of description for See Also '
'"{reference_name}" reference',
'SA03': 'Description should be capitalized for See Also '
'"{reference_name}" reference',
'SA04': 'Missing description for See Also "{reference_name}" reference',
'SA05': '{reference_name} in `See Also` section does not need `pandas` '
'prefix, use {right_reference} instead.',
'EX01': 'No examples section found',
'EX02': 'Examples do not pass tests:\n{doctest_log}',
'EX03': 'flake8 error: {error_code} {error_message}{times_happening}',
'EX04': 'Do not import {imported_library}, as it is imported '
'automatically for the examples (numpy as np, pandas as pd)',
}
def error(code, **kwargs):
return (code, ERROR_MSGS[code].format(**kwargs))
def get_api_items(api_doc_fd):
current_module = 'pandas'
previous_line = current_section = current_subsection = ''
position = None
for line in api_doc_fd:
line = line.strip()
if len(line) == len(previous_line):
if set(line) == set('-'):
current_section = previous_line
continue
if set(line) == set('~'):
current_subsection = previous_line
continue
if line.startswith('.. currentmodule::'):
current_module = line.replace('.. currentmodule::', '').strip()
continue
if line == '.. autosummary::':
position = 'autosummary'
continue
if position == 'autosummary':
if line == '':
position = 'items'
continue
if position == 'items':
if line == '':
position = None
continue
item = line.strip()
func = importlib.import_module(current_module)
for part in item.split('.'):
func = getattr(func, part)
yield ('.'.join([current_module, item]), func,
current_section, current_subsection)
previous_line = line
class Docstring(object):
def __init__(self, name):
self.name = name
obj = self._load_obj(name)
self.obj = obj
self.code_obj = self._to_original_callable(obj)
self.raw_doc = obj.__doc__ or ''
self.clean_doc = pydoc.getdoc(obj)
self.doc = NumpyDocString(self.clean_doc)
def __len__(self):
return len(self.raw_doc)
@staticmethod
def _load_obj(name):
for maxsplit in range(1, name.count('.') + 1):
# TODO when py3 only replace by: module, *func_parts = ...
func_name_split = name.rsplit('.', maxsplit)
module = func_name_split[0]
func_parts = func_name_split[1:]
try:
obj = importlib.import_module(module)
except ImportError:
pass
else:
continue
if 'obj' not in locals():
raise ImportError('No module can be imported '
'from "{}"'.format(name))
for part in func_parts:
obj = getattr(obj, part)
return obj
@staticmethod
def _to_original_callable(obj):
while True:
if inspect.isfunction(obj) or inspect.isclass(obj):
f = inspect.getfile(obj)
if f.startswith('<') and f.endswith('>'):
return None
return obj
if inspect.ismethod(obj):
obj = obj.__func__
elif isinstance(obj, functools.partial):
obj = obj.func
elif isinstance(obj, property):
obj = obj.fget
else:
return None
@property
def type(self):
return type(self.obj).__name__
@property
def is_function_or_method(self):
# TODO(py27): remove ismethod
return (inspect.isfunction(self.obj)
or inspect.ismethod(self.obj))
@property
def source_file_name(self):
try:
fname = inspect.getsourcefile(self.code_obj)
except TypeError:
# In some cases the object is something complex like a cython
# object that can't be easily introspected. An it's better to
# return the source code file of the object as None, than crash
pass
else:
if fname:
fname = os.path.relpath(fname, BASE_PATH)
return fname
@property
def source_file_def_line(self):
try:
return inspect.getsourcelines(self.code_obj)[-1]
except (OSError, TypeError):
# In some cases the object is something complex like a cython
# object that can't be easily introspected. An it's better to
# return the line number as None, than crash
pass
@property
def github_url(self):
url = 'https://github.com/pandas-dev/pandas/blob/master/'
url += '{}
self.source_file_def_line)
return url
@property
def start_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(self.raw_doc.split('\n')):
if row.strip():
break
return i
@property
def end_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(reversed(self.raw_doc.split('\n'))):
if row.strip():
break
return i
@property
def double_blank_lines(self):
prev = True
for row in self.raw_doc.split('\n'):
if not prev and not row.strip():
return True
prev = row.strip()
return False
@property
def section_titles(self):
sections = []
self.doc._doc.reset()
while not self.doc._doc.eof():
content = self.doc._read_to_next_section()
if (len(content) > 1
and len(content[0]) == len(content[1])
and set(content[1]) == {'-'}):
sections.append(content[0])
return sections
@property
def summary(self):
return ' '.join(self.doc['Summary'])
@property
def num_summary_lines(self):
return len(self.doc['Summary'])
@property
def extended_summary(self):
if not self.doc['Extended Summary'] and len(self.doc['Summary']) > 1:
return ' '.join(self.doc['Summary'])
return ' '.join(self.doc['Extended Summary'])
@property
def needs_summary(self):
return not (bool(self.summary) and bool(self.extended_summary))
@property
def doc_parameters(self):
return collections.OrderedDict((name, (type_, ''.join(desc)))
for name, type_, desc
in self.doc['Parameters'])
@property
def signature_parameters(self):
if inspect.isclass(self.obj):
if hasattr(self.obj, '_accessors') and (
self.name.split('.')[-1] in
self.obj._accessors):
# accessor classes have a signature but don't want to show this
return tuple()
try:
sig = signature(self.obj)
except (TypeError, ValueError):
return tuple()
params = sig.args
if sig.varargs:
params.append("*" + sig.varargs)
if sig.keywords:
params.append("**" + sig.keywords)
params = tuple(params)
if params and params[0] in ('self', 'cls'):
return params[1:]
return params
@property
def parameter_mismatches(self):
errs = []
signature_params = self.signature_parameters
doc_params = tuple(self.doc_parameters)
missing = set(signature_params) - set(doc_params)
if missing:
errs.append(error('PR01', missing_params=pprint_thing(missing)))
extra = set(doc_params) - set(signature_params)
if extra:
errs.append(error('PR02', unknown_params=pprint_thing(extra)))
if (not missing and not extra and signature_params != doc_params
and not (not signature_params and not doc_params)):
errs.append(error('PR03',
actual_params=signature_params,
documented_params=doc_params))
return errs
@property
def correct_parameters(self):
return not bool(self.parameter_mismatches)
def parameter_type(self, param):
return self.doc_parameters[param][0]
def parameter_desc(self, param):
desc = self.doc_parameters[param][1]
for directive in DIRECTIVES:
full_directive = '.. {}'.format(directive)
if full_directive in desc:
desc = desc[:desc.index(full_directive)]
return desc
@property
def see_also(self):
return collections.OrderedDict((name, ''.join(desc))
for name, desc, _
in self.doc['See Also'])
@property
def examples(self):
return self.doc['Examples']
@property
def returns(self):
return self.doc['Returns']
@property
def yields(self):
return self.doc['Yields']
@property
def method_source(self):
try:
source = inspect.getsource(self.obj)
except TypeError:
return ''
return textwrap.dedent(source)
@property
def method_returns_something(self):
def get_returns_not_on_nested_functions(node):
returns = [node] if isinstance(node, ast.Return) else []
for child in ast.iter_child_nodes(node):
if not isinstance(child, ast.FunctionDef):
child_returns = get_returns_not_on_nested_functions(child)
returns.extend(child_returns)
return returns
tree = ast.parse(self.method_source).body
if tree:
returns = get_returns_not_on_nested_functions(tree[0])
return_values = [r.value for r in returns]
for i, v in enumerate(return_values):
if isinstance(v, ast.NameConstant) and v.value is None:
return_values[i] = None
return any(return_values)
else:
return False
@property
def first_line_ends_in_dot(self):
if self.doc:
return self.doc.split('\n')[0][-1] == '.'
@property
def deprecated_with_directive(self):
return '.. deprecated:: ' in (self.summary + self.extended_summary)
@property
def deprecated(self):
return (self.name.startswith('pandas.Panel')
or self.deprecated_with_directive)
@property
def mentioned_private_classes(self):
return [klass for klass in PRIVATE_CLASSES if klass in self.raw_doc]
@property
def examples_errors(self):
flags = doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL
finder = doctest.DocTestFinder()
runner = doctest.DocTestRunner(optionflags=flags)
context = {'np': numpy, 'pd': pandas}
error_msgs = ''
for test in finder.find(self.raw_doc, self.name, globs=context):
f = StringIO()
runner.run(test, out=f.write)
error_msgs += f.getvalue()
return error_msgs
@property
def examples_source_code(self):
lines = doctest.DocTestParser().get_examples(self.raw_doc)
return [line.source for line in lines]
def validate_pep8(self):
if not self.examples:
return
content = ''.join(('import numpy as np # noqa: F401\n',
'import pandas as pd # noqa: F401\n',
*self.examples_source_code))
application = flake8.main.application.Application()
application.initialize(["--quiet"])
with tempfile.NamedTemporaryFile(mode='w') as file:
file.write(content)
file.flush()
application.run_checks([file.name])
application.formatter.write = lambda line, source: None
application.report()
yield from application.guide.stats.statistics_for('')
def get_validation_data(doc):
errs = []
wrns = []
if not doc.raw_doc:
errs.append(error('GL08'))
return errs, wrns, ''
if doc.start_blank_lines != 1:
errs.append(error('GL01'))
if doc.end_blank_lines != 1:
errs.append(error('GL02'))
if doc.double_blank_lines:
errs.append(error('GL03'))
mentioned_errs = doc.mentioned_private_classes
if mentioned_errs:
errs.append(error('GL04',
mentioned_private_classes=', '.join(mentioned_errs)))
for line in doc.raw_doc.splitlines():
if re.match("^ *\t", line):
errs.append(error('GL05', line_with_tabs=line.lstrip()))
unexpected_sections = [section for section in doc.section_titles
if section not in ALLOWED_SECTIONS]
for section in unexpected_sections:
errs.append(error('GL06',
section=section,
allowed_sections=', '.join(ALLOWED_SECTIONS)))
correct_order = [section for section in ALLOWED_SECTIONS
if section in doc.section_titles]
if correct_order != doc.section_titles:
errs.append(error('GL07',
correct_sections=', '.join(correct_order)))
if (doc.deprecated_with_directive
and not doc.extended_summary.startswith('.. deprecated:: ')):
errs.append(error('GL09'))
if not doc.summary:
errs.append(error('SS01'))
else:
if not doc.summary[0].isupper():
errs.append(error('SS02'))
if doc.summary[-1] != '.':
errs.append(error('SS03'))
if doc.summary != doc.summary.lstrip():
errs.append(error('SS04'))
elif (doc.is_function_or_method
and doc.summary.split(' ')[0][-1] == 's'):
errs.append(error('SS05'))
if doc.num_summary_lines > 1:
errs.append(error('SS06'))
if not doc.extended_summary:
wrns.append(('ES01', 'No extended summary found'))
errs += doc.parameter_mismatches
for param in doc.doc_parameters:
if not param.startswith("*"):
if not doc.parameter_type(param):
if ':' in param:
errs.append(error('PR10',
param_name=param.split(':')[0]))
else:
errs.append(error('PR04', param_name=param))
else:
if doc.parameter_type(param)[-1] == '.':
errs.append(error('PR05', param_name=param))
common_type_errors = [('integer', 'int'),
('boolean', 'bool'),
('string', 'str')]
for wrong_type, right_type in common_type_errors:
if wrong_type in doc.parameter_type(param):
errs.append(error('PR06',
param_name=param,
right_type=right_type,
wrong_type=wrong_type))
if not doc.parameter_desc(param):
errs.append(error('PR07', param_name=param))
else:
if not doc.parameter_desc(param)[0].isupper():
errs.append(error('PR08', param_name=param))
if doc.parameter_desc(param)[-1] != '.':
errs.append(error('PR09', param_name=param))
if doc.is_function_or_method:
if not doc.returns:
if doc.method_returns_something:
errs.append(error('RT01'))
else:
if len(doc.returns) == 1 and doc.returns[0][1]:
errs.append(error('RT02'))
for name_or_type, type_, desc in doc.returns:
if not desc:
errs.append(error('RT03'))
else:
desc = ' '.join(desc)
if not desc[0].isupper():
errs.append(error('RT04'))
if not desc.endswith('.'):
errs.append(error('RT05'))
if not doc.yields and 'yield' in doc.method_source:
errs.append(error('YD01'))
if not doc.see_also:
wrns.append(error('SA01'))
else:
for rel_name, rel_desc in doc.see_also.items():
if rel_desc:
if not rel_desc.endswith('.'):
errs.append(error('SA02', reference_name=rel_name))
if not rel_desc[0].isupper():
errs.append(error('SA03', reference_name=rel_name))
else:
errs.append(error('SA04', reference_name=rel_name))
if rel_name.startswith('pandas.'):
errs.append(error('SA05',
reference_name=rel_name,
right_reference=rel_name[len('pandas.'):]))
examples_errs = ''
if not doc.examples:
wrns.append(error('EX01'))
else:
examples_errs = doc.examples_errors
if examples_errs:
errs.append(error('EX02', doctest_log=examples_errs))
for err in doc.validate_pep8():
errs.append(error('EX03',
error_code=err.error_code,
error_message=err.message,
times_happening=' ({} times)'.format(err.count)
if err.count > 1 else ''))
examples_source_code = ''.join(doc.examples_source_code)
for wrong_import in ('numpy', 'pandas'):
if 'import {}'.format(wrong_import) in examples_source_code:
errs.append(error('EX04', imported_library=wrong_import))
return errs, wrns, examples_errs
def validate_one(func_name):
doc = Docstring(func_name)
errs, wrns, examples_errs = get_validation_data(doc)
return {'type': doc.type,
'docstring': doc.clean_doc,
'deprecated': doc.deprecated,
'file': doc.source_file_name,
'file_line': doc.source_file_def_line,
'github_link': doc.github_url,
'errors': errs,
'warnings': wrns,
'examples_errors': examples_errs}
def validate_all(prefix, ignore_deprecated=False):
result = {}
seen = {}
api_doc_fnames = os.path.join(
BASE_PATH, 'doc', 'source', 'reference', '*.rst')
api_items = []
for api_doc_fname in glob.glob(api_doc_fnames):
with open(api_doc_fname) as f:
api_items += list(get_api_items(f))
for func_name, func_obj, section, subsection in api_items:
if prefix and not func_name.startswith(prefix):
continue
doc_info = validate_one(func_name)
if ignore_deprecated and doc_info['deprecated']:
continue
result[func_name] = doc_info
shared_code_key = doc_info['file'], doc_info['file_line']
shared_code = seen.get(shared_code_key, '')
result[func_name].update({'in_api': True,
'section': section,
'subsection': subsection,
'shared_code_with': shared_code})
seen[shared_code_key] = func_name
api_item_names = set(list(zip(*api_items))[0])
for class_ in (pandas.Series, pandas.DataFrame, pandas.Panel):
for member in inspect.getmembers(class_):
func_name = 'pandas.{}.{}'.format(class_.__name__, member[0])
if (not member[0].startswith('_')
and func_name not in api_item_names):
if prefix and not func_name.startswith(prefix):
continue
doc_info = validate_one(func_name)
if ignore_deprecated and doc_info['deprecated']:
continue
result[func_name] = doc_info
result[func_name]['in_api'] = False
return result
def main(func_name, prefix, errors, output_format, ignore_deprecated):
def header(title, width=80, char='#'):
full_line = char * width
side_len = (width - len(title) - 2) // 2
adj = '' if len(title) % 2 == 0 else ' '
title_line = '{side} {title}{adj} {side}'.format(side=char * side_len,
title=title,
adj=adj)
return '\n{full_line}\n{title_line}\n{full_line}\n\n'.format(
full_line=full_line, title_line=title_line)
exit_status = 0
if func_name is None:
result = validate_all(prefix, ignore_deprecated)
if output_format == 'json':
output = json.dumps(result)
else:
if output_format == 'default':
output_format = '{text}\n'
elif output_format == 'azure':
output_format = ('##vso[task.logissue type=error;'
'sourcepath={path};'
'linenumber={row};'
'code={code};'
']{text}\n')
else:
raise ValueError('Unknown output_format "{}"'.format(
output_format))
output = ''
for name, res in result.items():
for err_code, err_desc in res['errors']:
# initially. But that would complicate the code too much
if errors and err_code not in errors:
continue
exit_status += 1
output += output_format.format(
name=name,
path=res['file'],
row=res['file_line'],
code=err_code,
text='{}: {}'.format(name, err_desc))
sys.stdout.write(output)
else:
result = validate_one(func_name)
sys.stderr.write(header('Docstring ({})'.format(func_name)))
sys.stderr.write('{}\n'.format(result['docstring']))
sys.stderr.write(header('Validation'))
if result['errors']:
sys.stderr.write('{} Errors found:\n'.format(
len(result['errors'])))
for err_code, err_desc in result['errors']:
# Failing examples are printed at the end
if err_code == 'EX02':
sys.stderr.write('\tExamples do not pass tests\n')
continue
sys.stderr.write('\t{}\n'.format(err_desc))
if result['warnings']:
sys.stderr.write('{} Warnings found:\n'.format(
len(result['warnings'])))
for wrn_code, wrn_desc in result['warnings']:
sys.stderr.write('\t{}\n'.format(wrn_desc))
if not result['errors']:
sys.stderr.write('Docstring for "{}" correct. :)\n'.format(
func_name))
if result['examples_errors']:
sys.stderr.write(header('Doctests'))
sys.stderr.write(result['examples_errors'])
return exit_status
if __name__ == '__main__':
format_opts = 'default', 'json', 'azure'
func_help = ('function or method to validate (e.g. pandas.DataFrame.head) '
'if not provided, all docstrings are validated and returned '
'as JSON')
argparser = argparse.ArgumentParser(
description='validate pandas docstrings')
argparser.add_argument('function',
nargs='?',
default=None,
help=func_help)
argparser.add_argument('--format', default='default', choices=format_opts,
help='format of the output when validating '
'multiple docstrings (ignored when validating one).'
'It can be {}'.format(str(format_opts)[1:-1]))
argparser.add_argument('--prefix', default=None, help='pattern for the '
'docstring names, in order to decide which ones '
'will be validated. A prefix "pandas.Series.str.'
'will make the script validate all the docstrings'
'of methods starting by this pattern. It is '
'ignored if parameter function is provided')
argparser.add_argument('--errors', default=None, help='comma separated '
'list of error codes to validate. By default it '
'validates all errors (ignored when validating '
'a single docstring)')
argparser.add_argument('--ignore_deprecated', default=False,
action='store_true', help='if this flag is set, '
'deprecated objects are ignored when validating '
'all docstrings')
args = argparser.parse_args()
sys.exit(main(args.function, args.prefix,
args.errors.split(',') if args.errors else None,
args.format,
args.ignore_deprecated))
| true | true |
1c45c79cdc783d17fc365a898f4c6a3109e9d344 | 2,508 | py | Python | api_app/models/schemas/workspace.py | tanya-borisova/AzureTRE | 02e1745785a75a7dc676d9b9853ae4d4de7d87af | [
"MIT"
] | null | null | null | api_app/models/schemas/workspace.py | tanya-borisova/AzureTRE | 02e1745785a75a7dc676d9b9853ae4d4de7d87af | [
"MIT"
] | 1 | 2022-02-02T14:52:06.000Z | 2022-02-02T15:00:01.000Z | api_app/models/schemas/workspace.py | tanya-borisova/AzureTRE | 02e1745785a75a7dc676d9b9853ae4d4de7d87af | [
"MIT"
] | null | null | null | from enum import Enum
from typing import List
from pydantic import BaseModel, Field
from models.domain.resource import ResourceType
from models.domain.workspace import Workspace
def get_sample_workspace(workspace_id: str, spec_workspace_id: str = "0001") -> dict:
return {
"id": workspace_id,
"isActive": True,
"templateName": "tre-workspace-base",
"templateVersion": "0.1.0",
"properties": {
"azure_location": "westeurope",
"workspace_id": spec_workspace_id,
"tre_id": "mytre-dev-1234",
"address_space_size": "small",
},
"resourceType": ResourceType.Workspace,
"workspaceURL": "",
"authInformation": {}
}
class AuthProvider(str, Enum):
"""
Auth Provider
"""
AAD = "AAD"
class AuthenticationConfiguration(BaseModel):
provider: AuthProvider = Field(AuthProvider.AAD, title="Authentication Provider")
data: dict = Field({}, title="Authentication information")
class WorkspaceInResponse(BaseModel):
workspace: Workspace
class Config:
schema_extra = {
"example": {
"workspace": get_sample_workspace("933ad738-7265-4b5f-9eae-a1a62928772e")
}
}
class WorkspacesInList(BaseModel):
workspaces: List[Workspace]
class Config:
schema_extra = {
"example": {
"workspaces": [
get_sample_workspace("933ad738-7265-4b5f-9eae-a1a62928772e", "0001"),
get_sample_workspace("2fdc9fba-726e-4db6-a1b8-9018a2165748", "0002"),
]
}
}
class WorkspaceInCreate(BaseModel):
templateName: str = Field(title="Workspace type", description="Bundle name")
properties: dict = Field({}, title="Workspace parameters", description="Values for the parameters required by the workspace resource specification")
class Config:
schema_extra = {
"example": {
"templateName": "tre-workspace-base",
"properties": {
"display_name": "the workspace display name",
"description": "workspace description",
"app_id": "9d52b04f-89cf-47b4-868a-e12be7133b36"
}
}
}
class WorkspacePatchEnabled(BaseModel):
enabled: bool
class Config:
schema_extra = {
"example": {
"enabled": False
}
}
| 27.56044 | 152 | 0.585726 | from enum import Enum
from typing import List
from pydantic import BaseModel, Field
from models.domain.resource import ResourceType
from models.domain.workspace import Workspace
def get_sample_workspace(workspace_id: str, spec_workspace_id: str = "0001") -> dict:
return {
"id": workspace_id,
"isActive": True,
"templateName": "tre-workspace-base",
"templateVersion": "0.1.0",
"properties": {
"azure_location": "westeurope",
"workspace_id": spec_workspace_id,
"tre_id": "mytre-dev-1234",
"address_space_size": "small",
},
"resourceType": ResourceType.Workspace,
"workspaceURL": "",
"authInformation": {}
}
class AuthProvider(str, Enum):
AAD = "AAD"
class AuthenticationConfiguration(BaseModel):
provider: AuthProvider = Field(AuthProvider.AAD, title="Authentication Provider")
data: dict = Field({}, title="Authentication information")
class WorkspaceInResponse(BaseModel):
workspace: Workspace
class Config:
schema_extra = {
"example": {
"workspace": get_sample_workspace("933ad738-7265-4b5f-9eae-a1a62928772e")
}
}
class WorkspacesInList(BaseModel):
workspaces: List[Workspace]
class Config:
schema_extra = {
"example": {
"workspaces": [
get_sample_workspace("933ad738-7265-4b5f-9eae-a1a62928772e", "0001"),
get_sample_workspace("2fdc9fba-726e-4db6-a1b8-9018a2165748", "0002"),
]
}
}
class WorkspaceInCreate(BaseModel):
templateName: str = Field(title="Workspace type", description="Bundle name")
properties: dict = Field({}, title="Workspace parameters", description="Values for the parameters required by the workspace resource specification")
class Config:
schema_extra = {
"example": {
"templateName": "tre-workspace-base",
"properties": {
"display_name": "the workspace display name",
"description": "workspace description",
"app_id": "9d52b04f-89cf-47b4-868a-e12be7133b36"
}
}
}
class WorkspacePatchEnabled(BaseModel):
enabled: bool
class Config:
schema_extra = {
"example": {
"enabled": False
}
}
| true | true |
1c45c8088030d2b6425eb6a785a0705fba310bdf | 694 | py | Python | Menu/BaseScripts/updateBlock.py | fortiersteven/Narikiri-Dungeon-X | 49e5716fa5aa81a25048bcbe212eb74828cf0e10 | [
"MIT"
] | 10 | 2021-06-04T10:17:48.000Z | 2022-01-23T13:23:37.000Z | Menu/BaseScripts/updateBlock.py | fortiersteven/Narikiri-Dungeon-X | 49e5716fa5aa81a25048bcbe212eb74828cf0e10 | [
"MIT"
] | 1 | 2021-06-05T17:05:04.000Z | 2021-06-05T17:05:04.000Z | Menu/BaseScripts/updateBlock.py | fortiersteven/Narikiri-Dungeon-X | 49e5716fa5aa81a25048bcbe212eb74828cf0e10 | [
"MIT"
] | 4 | 2021-05-21T11:21:04.000Z | 2022-01-06T18:50:12.000Z | from HelperfunctionsNew import *
import sys
import os
if __name__ == "__main__":
blockDesc = sys.argv[1]
helper = Helper()
herlper.get
if blockDesc in ["Skit Name", "Synopsis", "Minigame"]:
helper.createBlock_Multi(blockDesc)
elif blockDesc != "All":
print("Create the script based on google sheet")
helper.createAtlasScript_Block(blockDesc)
print("Create the SLPS for this block")
helper.reinsertText_Block(blockDesc)
else:
helper.createAtlasScript_All()
print("Create the SLPS for this block")
helper.reinsertText_All(blockDesc) | 23.133333 | 58 | 0.602305 | from HelperfunctionsNew import *
import sys
import os
if __name__ == "__main__":
blockDesc = sys.argv[1]
helper = Helper()
herlper.get
if blockDesc in ["Skit Name", "Synopsis", "Minigame"]:
helper.createBlock_Multi(blockDesc)
elif blockDesc != "All":
print("Create the script based on google sheet")
helper.createAtlasScript_Block(blockDesc)
print("Create the SLPS for this block")
helper.reinsertText_Block(blockDesc)
else:
helper.createAtlasScript_All()
print("Create the SLPS for this block")
helper.reinsertText_All(blockDesc) | true | true |
1c45c86d301eb86719539dd517c54c2d7968b0d2 | 2,061 | py | Python | sdk/python/pulumi_aws/__init__.py | Charliekenney23/pulumi-aws | 55bd0390160d27350b297834026fee52114a2d41 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/__init__.py | Charliekenney23/pulumi-aws | 55bd0390160d27350b297834026fee52114a2d41 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/__init__.py | Charliekenney23/pulumi-aws | 55bd0390160d27350b297834026fee52114a2d41 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import importlib
# Make subpackages available:
__all__ = ['acm', 'acmpca', 'apigateway', 'appautoscaling', 'applicationloadbalancing', 'appmesh', 'appsync', 'athena', 'autoscaling', 'backup', 'batch', 'budgets', 'cfg', 'cloud9', 'cloudformation', 'cloudfront', 'cloudhsmv2', 'cloudtrail', 'cloudwatch', 'codebuild', 'codecommit', 'codedeploy', 'codepipeline', 'cognito', 'config', 'cur', 'datasync', 'dax', 'devicefarm', 'directconnect', 'directoryservice', 'dlm', 'dms', 'docdb', 'dynamodb', 'ebs', 'ec2', 'ec2clientvpn', 'ec2transitgateway', 'ecr', 'ecs', 'efs', 'eks', 'elasticache', 'elasticbeanstalk', 'elasticloadbalancing', 'elasticloadbalancingv2', 'elasticsearch', 'elastictranscoder', 'emr', 'gamelift', 'glacier', 'globalaccelerator', 'glue', 'guardduty', 'iam', 'inspector', 'iot', 'kinesis', 'kms', 'lambda_', 'licensemanager', 'lightsail', 'macie', 'mediapackage', 'mediastore', 'mq', 'msk', 'neptune', 'opsworks', 'organizations', 'pinpoint', 'pricing', 'ram', 'rds', 'redshift', 'resourcegroups', 'route53', 's3', 'sagemaker', 'secretsmanager', 'securityhub', 'servicecatalog', 'servicediscovery', 'ses', 'sfn', 'shield', 'simpledb', 'sns', 'sqs', 'ssm', 'storagegateway', 'swf', 'transfer', 'waf', 'wafregional', 'worklink', 'workspaces', 'xray']
for pkg in __all__:
if pkg != 'config':
importlib.import_module(f'{__name__}.{pkg}')
# Export this package's modules as members:
from .get_ami import *
from .get_ami_ids import *
from .get_arn import *
from .get_autoscaling_groups import *
from .get_availability_zone import *
from .get_availability_zones import *
from .get_billing_service_account import *
from .get_caller_identity import *
from .get_canonical_user_id import *
from .get_elastic_ip import *
from .get_ip_ranges import *
from .get_partition import *
from .get_prefix_list import *
from .get_region import *
from .provider import *
| 73.607143 | 1,216 | 0.706938 |
import importlib
# Make subpackages available:
__all__ = ['acm', 'acmpca', 'apigateway', 'appautoscaling', 'applicationloadbalancing', 'appmesh', 'appsync', 'athena', 'autoscaling', 'backup', 'batch', 'budgets', 'cfg', 'cloud9', 'cloudformation', 'cloudfront', 'cloudhsmv2', 'cloudtrail', 'cloudwatch', 'codebuild', 'codecommit', 'codedeploy', 'codepipeline', 'cognito', 'config', 'cur', 'datasync', 'dax', 'devicefarm', 'directconnect', 'directoryservice', 'dlm', 'dms', 'docdb', 'dynamodb', 'ebs', 'ec2', 'ec2clientvpn', 'ec2transitgateway', 'ecr', 'ecs', 'efs', 'eks', 'elasticache', 'elasticbeanstalk', 'elasticloadbalancing', 'elasticloadbalancingv2', 'elasticsearch', 'elastictranscoder', 'emr', 'gamelift', 'glacier', 'globalaccelerator', 'glue', 'guardduty', 'iam', 'inspector', 'iot', 'kinesis', 'kms', 'lambda_', 'licensemanager', 'lightsail', 'macie', 'mediapackage', 'mediastore', 'mq', 'msk', 'neptune', 'opsworks', 'organizations', 'pinpoint', 'pricing', 'ram', 'rds', 'redshift', 'resourcegroups', 'route53', 's3', 'sagemaker', 'secretsmanager', 'securityhub', 'servicecatalog', 'servicediscovery', 'ses', 'sfn', 'shield', 'simpledb', 'sns', 'sqs', 'ssm', 'storagegateway', 'swf', 'transfer', 'waf', 'wafregional', 'worklink', 'workspaces', 'xray']
for pkg in __all__:
if pkg != 'config':
importlib.import_module(f'{__name__}.{pkg}')
# Export this package's modules as members:
from .get_ami import *
from .get_ami_ids import *
from .get_arn import *
from .get_autoscaling_groups import *
from .get_availability_zone import *
from .get_availability_zones import *
from .get_billing_service_account import *
from .get_caller_identity import *
from .get_canonical_user_id import *
from .get_elastic_ip import *
from .get_ip_ranges import *
from .get_partition import *
from .get_prefix_list import *
from .get_region import *
from .provider import *
| true | true |
1c45c9928167414ac58f3e156afea4d5426540e6 | 122 | py | Python | ecommerce/api/category/admin.py | jigyasudhingra/E-commerce-Store-Using-React-And-Django | 128e0e3d78dd7aca309c851eff2d02e2452d4d1f | [
"MIT"
] | 1 | 2021-12-04T08:47:29.000Z | 2021-12-04T08:47:29.000Z | ecommerce/api/category/admin.py | jigyasudhingra/E-commerce-Store-Using-React-And-Django | 128e0e3d78dd7aca309c851eff2d02e2452d4d1f | [
"MIT"
] | null | null | null | ecommerce/api/category/admin.py | jigyasudhingra/E-commerce-Store-Using-React-And-Django | 128e0e3d78dd7aca309c851eff2d02e2452d4d1f | [
"MIT"
] | 1 | 2021-05-15T07:23:37.000Z | 2021-05-15T07:23:37.000Z | from django.contrib import admin
from .models import Category
# Register your models here.
admin.site.register(Category)
| 20.333333 | 32 | 0.811475 | from django.contrib import admin
from .models import Category
admin.site.register(Category)
| true | true |
1c45ca09ffeae3aabe2a3a4553e18bbea7714321 | 595 | py | Python | pyaz/eventgrid/topic_type/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/eventgrid/topic_type/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/eventgrid/topic_type/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | from ... pyaz_utils import _call_az
def list():
'''
List registered topic types.
'''
return _call_az("az eventgrid topic-type list", locals())
def show(name):
'''
Get the details for a topic type.
Required Parameters:
- name -- Name of the topic type.
'''
return _call_az("az eventgrid topic-type show", locals())
def list_event_types(name):
'''
List the event types supported by a topic type.
Required Parameters:
- name -- Name of the topic type.
'''
return _call_az("az eventgrid topic-type list-event-types", locals())
| 20.517241 | 73 | 0.636975 | from ... pyaz_utils import _call_az
def list():
return _call_az("az eventgrid topic-type list", locals())
def show(name):
return _call_az("az eventgrid topic-type show", locals())
def list_event_types(name):
return _call_az("az eventgrid topic-type list-event-types", locals())
| true | true |
1c45ca0d1bfde8aae6ad6466c099bac46b3121a0 | 4,702 | py | Python | sdk/python/pulumi_aws/iot/thing_principal_attachment.py | sibuthomasmathew/pulumi-aws | 6351f2182eb6f693d4e09e4136c385adfa0ab674 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/iot/thing_principal_attachment.py | sibuthomasmathew/pulumi-aws | 6351f2182eb6f693d4e09e4136c385adfa0ab674 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/iot/thing_principal_attachment.py | sibuthomasmathew/pulumi-aws | 6351f2182eb6f693d4e09e4136c385adfa0ab674 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = ['ThingPrincipalAttachment']
class ThingPrincipalAttachment(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
principal: Optional[pulumi.Input[str]] = None,
thing: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Attaches Principal to AWS IoT Thing.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.iot.Thing("example")
cert = aws.iot.Certificate("cert",
csr=(lambda path: open(path).read())("csr.pem"),
active=True)
att = aws.iot.ThingPrincipalAttachment("att",
principal=cert.arn,
thing=example.name)
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] principal: The AWS IoT Certificate ARN or Amazon Cognito Identity ID.
:param pulumi.Input[str] thing: The name of the thing.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if principal is None and not opts.urn:
raise TypeError("Missing required property 'principal'")
__props__['principal'] = principal
if thing is None and not opts.urn:
raise TypeError("Missing required property 'thing'")
__props__['thing'] = thing
super(ThingPrincipalAttachment, __self__).__init__(
'aws:iot/thingPrincipalAttachment:ThingPrincipalAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
principal: Optional[pulumi.Input[str]] = None,
thing: Optional[pulumi.Input[str]] = None) -> 'ThingPrincipalAttachment':
"""
Get an existing ThingPrincipalAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] principal: The AWS IoT Certificate ARN or Amazon Cognito Identity ID.
:param pulumi.Input[str] thing: The name of the thing.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["principal"] = principal
__props__["thing"] = thing
return ThingPrincipalAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def principal(self) -> pulumi.Output[str]:
"""
The AWS IoT Certificate ARN or Amazon Cognito Identity ID.
"""
return pulumi.get(self, "principal")
@property
@pulumi.getter
def thing(self) -> pulumi.Output[str]:
"""
The name of the thing.
"""
return pulumi.get(self, "thing")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 38.859504 | 134 | 0.630795 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = ['ThingPrincipalAttachment']
class ThingPrincipalAttachment(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
principal: Optional[pulumi.Input[str]] = None,
thing: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if principal is None and not opts.urn:
raise TypeError("Missing required property 'principal'")
__props__['principal'] = principal
if thing is None and not opts.urn:
raise TypeError("Missing required property 'thing'")
__props__['thing'] = thing
super(ThingPrincipalAttachment, __self__).__init__(
'aws:iot/thingPrincipalAttachment:ThingPrincipalAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
principal: Optional[pulumi.Input[str]] = None,
thing: Optional[pulumi.Input[str]] = None) -> 'ThingPrincipalAttachment':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["principal"] = principal
__props__["thing"] = thing
return ThingPrincipalAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def principal(self) -> pulumi.Output[str]:
return pulumi.get(self, "principal")
@property
@pulumi.getter
def thing(self) -> pulumi.Output[str]:
return pulumi.get(self, "thing")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
1c45ca1a60b4a3f9502678a3a66a9ba2d024ce6a | 3,966 | py | Python | coursera-compinvest1-master/coursera-compinvest1-master/homework/Homework4.py | telefar/stockEye | 2556c951af5e813460ee44951542cd918986578b | [
"BSD-3-Clause"
] | null | null | null | coursera-compinvest1-master/coursera-compinvest1-master/homework/Homework4.py | telefar/stockEye | 2556c951af5e813460ee44951542cd918986578b | [
"BSD-3-Clause"
] | null | null | null | coursera-compinvest1-master/coursera-compinvest1-master/homework/Homework4.py | telefar/stockEye | 2556c951af5e813460ee44951542cd918986578b | [
"BSD-3-Clause"
] | null | null | null | '''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
'''
import pandas as pd
import numpy as np
import math
import copy
import QSTK.qstkutil.qsdateutil as du
import datetime as dt
import QSTK.qstkutil.DataAccess as da
import QSTK.qstkutil.tsutil as tsu
import csv
import QSTK.qstkstudy.EventProfiler as ep
"""
Accepts a list of symbols along with start and end date
Returns the Event Matrix which is a pandas Datamatrix
Event matrix has the following structure :
|IBM |GOOG|XOM |MSFT| GS | JP |
(d1)|nan |nan | 1 |nan |nan | 1 |
(d2)|nan | 1 |nan |nan |nan |nan |
(d3)| 1 |nan | 1 |nan | 1 |nan |
(d4)|nan | 1 |nan | 1 |nan |nan |
...................................
...................................
Also, d1 = start date
nan = no information about any event.
1 = status bit(positively confirms the event occurence)
"""
def find_events(ls_symbols, d_data):
''' Finding the event dataframe '''
df_close = d_data['actual_close']
ts_market = df_close['SPY']
print "Finding Events"
# Creating an empty dataframe
df_events = copy.deepcopy(df_close)
df_events = df_events * np.NAN
# Time stamps for the event range
ldt_timestamps = df_close.index
writer = csv.writer(open('orders.csv', 'wb'), delimiter=',')
for s_sym in ls_symbols:
for i in range(1, len(ldt_timestamps)):
# Calculating the returns for this timestamp
f_symprice_today = df_close[s_sym].ix[ldt_timestamps[i]]
f_symprice_yest = df_close[s_sym].ix[ldt_timestamps[i - 1]]
f_marketprice_today = ts_market.ix[ldt_timestamps[i]]
f_marketprice_yest = ts_market.ix[ldt_timestamps[i - 1]]
f_symreturn_today = (f_symprice_today / f_symprice_yest) - 1
f_marketreturn_today = (f_marketprice_today / f_marketprice_yest) - 1
i_shares = 100
# Event is found if the symbol is down more then 3% while the
# market is up more then 2%
# if f_symreturn_today <= -0.03 and f_marketreturn_today >= 0.02:
# df_events[s_sym].ix[ldt_timestamps[i]] = 1
f_cutoff = 10.0
if f_symprice_today < f_cutoff and f_symprice_yest >= f_cutoff:
df_events[s_sym].ix[ldt_timestamps[i]] = 1
row_to_enter = [str(ldt_timestamps[i].year), str(ldt_timestamps[i].month), \
str(ldt_timestamps[i].day), s_sym, 'Buy', i_shares]
writer.writerow(row_to_enter)
try:
time_n = ldt_timestamps[i + 5]
except:
time_n = ldt_timestamps[-1]
row_to_enter = [str(time_n.year), str(time_n.month), \
str(time_n.day), s_sym, 'Sell', i_shares]
writer.writerow(row_to_enter)
return df_events
if __name__ == '__main__':
dt_start = dt.datetime(2008, 1, 1)
dt_end = dt.datetime(2009, 12, 31)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16))
dataobj = da.DataAccess('Yahoo')
ls_symbols = dataobj.get_symbols_from_list('sp5002012')
ls_symbols.append('SPY')
ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close']
ldf_data = dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method = 'ffill')
d_data[s_key] = d_data[s_key].fillna(method = 'bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
df_events = find_events(ls_symbols, d_data)
# print "Creating Study"
# ep.eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
# s_filename='MyEventStudy.pdf', b_market_neutral=True, b_errorbars=True,
# s_market_sym='SPY')
| 37.771429 | 92 | 0.631367 | '''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
'''
import pandas as pd
import numpy as np
import math
import copy
import QSTK.qstkutil.qsdateutil as du
import datetime as dt
import QSTK.qstkutil.DataAccess as da
import QSTK.qstkutil.tsutil as tsu
import csv
import QSTK.qstkstudy.EventProfiler as ep
"""
Accepts a list of symbols along with start and end date
Returns the Event Matrix which is a pandas Datamatrix
Event matrix has the following structure :
|IBM |GOOG|XOM |MSFT| GS | JP |
(d1)|nan |nan | 1 |nan |nan | 1 |
(d2)|nan | 1 |nan |nan |nan |nan |
(d3)| 1 |nan | 1 |nan | 1 |nan |
(d4)|nan | 1 |nan | 1 |nan |nan |
...................................
...................................
Also, d1 = start date
nan = no information about any event.
1 = status bit(positively confirms the event occurence)
"""
def find_events(ls_symbols, d_data):
''' Finding the event dataframe '''
df_close = d_data['actual_close']
ts_market = df_close['SPY']
print "Finding Events"
df_events = copy.deepcopy(df_close)
df_events = df_events * np.NAN
ldt_timestamps = df_close.index
writer = csv.writer(open('orders.csv', 'wb'), delimiter=',')
for s_sym in ls_symbols:
for i in range(1, len(ldt_timestamps)):
f_symprice_today = df_close[s_sym].ix[ldt_timestamps[i]]
f_symprice_yest = df_close[s_sym].ix[ldt_timestamps[i - 1]]
f_marketprice_today = ts_market.ix[ldt_timestamps[i]]
f_marketprice_yest = ts_market.ix[ldt_timestamps[i - 1]]
f_symreturn_today = (f_symprice_today / f_symprice_yest) - 1
f_marketreturn_today = (f_marketprice_today / f_marketprice_yest) - 1
i_shares = 100
f_cutoff = 10.0
if f_symprice_today < f_cutoff and f_symprice_yest >= f_cutoff:
df_events[s_sym].ix[ldt_timestamps[i]] = 1
row_to_enter = [str(ldt_timestamps[i].year), str(ldt_timestamps[i].month), \
str(ldt_timestamps[i].day), s_sym, 'Buy', i_shares]
writer.writerow(row_to_enter)
try:
time_n = ldt_timestamps[i + 5]
except:
time_n = ldt_timestamps[-1]
row_to_enter = [str(time_n.year), str(time_n.month), \
str(time_n.day), s_sym, 'Sell', i_shares]
writer.writerow(row_to_enter)
return df_events
if __name__ == '__main__':
dt_start = dt.datetime(2008, 1, 1)
dt_end = dt.datetime(2009, 12, 31)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16))
dataobj = da.DataAccess('Yahoo')
ls_symbols = dataobj.get_symbols_from_list('sp5002012')
ls_symbols.append('SPY')
ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close']
ldf_data = dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method = 'ffill')
d_data[s_key] = d_data[s_key].fillna(method = 'bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
df_events = find_events(ls_symbols, d_data)
| false | true |
1c45cb5a65393e34123182c95336941251f13bac | 4,748 | py | Python | preGame.py | Unf-Team/Hang-Man | 330ce502de95da48d127cad1b9c4d6c0328864ca | [
"Apache-2.0"
] | 15 | 2015-11-19T19:42:43.000Z | 2019-08-16T05:49:02.000Z | preGame.py | Unf-Team/Hang-Man | 330ce502de95da48d127cad1b9c4d6c0328864ca | [
"Apache-2.0"
] | 3 | 2016-11-06T19:12:26.000Z | 2017-10-27T00:51:35.000Z | preGame.py | Unf-Team/Hang-Man | 330ce502de95da48d127cad1b9c4d6c0328864ca | [
"Apache-2.0"
] | 29 | 2015-12-19T06:51:15.000Z | 2020-04-07T18:10:35.000Z | #-*- coding: utf-8 -*-
#Contém a lógica do preGame
#Importa os comandos
import comandos as c
import bds
import game as g
from random import randint, shuffle
def categorias(chat_id, u_id, message_id):
l = c.getLanguage(chat_id)
if bds.checkAdm(chat_id, u_id):
bds.setCategorias(chat_id,True)
return [c.toDict(chat_id, l.cat_msg, replyTo = message_id, replyMarkup = c.makeFr(True, selective = True))]
def setCategorias(chat_id, text, message_id, u_id):
l = c.getLanguage(chat_id)
cats = []
text = text.split(' ')
try:
for i in range(len(text)):
if ((int(text[i]) <= len(l.palavras)) and (int(text[i]) >= 0) and (int(text[i]) not in cats)): #Tratamento de macacagem
cats.append(int(text[i]))
else:
int('a') # T_T
except Exception, e: #Caso existam elementos diferentes de numeros
return [c.toDict(chat_id, l.cat_erro_msg, replyTo = message_id, replyMarkup = c.makeFr(True, selective = True))]
bds.setCats(chat_id,cats)
kb = c.makeKb(c.getKb(chat_id,'main',u_id = u_id)[1], selective = True, resize_keyboard = True)
return [c.toDict(chat_id, l.categorias_msg, replyTo = message_id, replyMarkup = kb)]
def entrar(chat_id, u_id, u_name, message_id):
l = c.getLanguage(chat_id)
if bds.addPlayer(chat_id, u_id, u_name, message_id):
kb = c.makeKb(c.getKb(chat_id,'main',u_id = u_id)[0], selective = True, resize_keyboard = True)
return [c.toDict(chat_id, l.entrarMsg(u_name), replyTo = message_id, replyMarkup = kb )]
return [c.toDict(chat_id, l.esta_dentro_msg)]
def sair(chat_id, u_id, u_name, message_id):
aux = bds.rmPlayer(chat_id, u_id, message_id)
l = c.getLanguage(chat_id)
if aux == False:
kb = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
return [c.toDict(chat_id, l.sem_jogador_msg, replyMarkup = kb)]
elif aux == 'setAdm':
rpl = []
pl = bds.getPlayers(chat_id)
kb = c.makeKb(c.getKb(chat_id, 'main', u_id = u_id)[0], selective = True, resize_keyboard = True)
rpl.append(c.toDict(chat_id, l.playerQuitMsg(u_name), replyTo = message_id, replyMarkup = kb))
kb = c.makeKb(c.getKb(chat_id,'main')[1], resize_keyboard = True, selective = True)
rpl.append(c.toDict(chat_id, l.novoAdmMsg(pl[1][0]), replyTo = pl[2][0], replyMarkup = kb))
return rpl
elif aux == True:
kb = c.makeKb(c.getKb(chat_id,'main',u_id = u_id)[0], selective = True, resize_keyboard = True)
return [c.toDict(chat_id, l.playerQuitMsg(u_name), replyTo = message_id, replyMarkup = kb)]
elif aux == 'semPlayer':
return [c.toDict(chat_id, l.is_out_msg)]
def fecharJogo(chat_id, u_id, message_id, date):
l = c.getLanguage(chat_id)
rpl = []
if bds.checkAdm(chat_id, u_id):
bds.setPreGame(chat_id, False)
bds.setInGame(chat_id, True)
lista = bds.getCats(chat_id)
if ((len(lista) == 0) or (0 in lista)):
categoria = randint(0, (len(l.palavras)-1))
else:
cat = randint(0, len(lista)-1)
categoria = lista[cat]-1
palavra = randint(1, (len(l.palavras[categoria])-1))
palavra = l.palavras[categoria][palavra].decode('utf-8')
categoria = l.palavras[categoria][0]
mascara = bds.setCP(chat_id, categoria, palavra)
vidas = bds.setVidas(chat_id)
bds.shufflePlayers(chat_id, date)
u_names = bds.getPlayers(chat_id)[1]
message_ids = bds.getPlayers(chat_id)[2]
ordem = ''
for i in range(len(u_names)):
ordem = ordem+u_names[i]+'\n'
kb = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True, selective = True)
rpl.append(c.toDict(chat_id, l.close_game_msg, replyMarkup = c.makeKbh(True)))
rpl.append(c.toDict(chat_id, ordem + '\n' + l.categoria_msg+categoria +
'\n' + l.palavra_msg+mascara + '\n' +
l.vidas_msg+g.vidasEmoji(chat_id),
replyTo = message_ids[0], replyMarkup = kb))
"""rpl.append(c.toDict(chat_id, l.categoria_msg+categoria))
rpl.append(c.toDict(chat_id, l.palavra_msg+mascara))
rpl.append(c.toDict(chat_id, l.vidas_msg+g.vidasEmoji(chat_id)))"""
return rpl
return [c.toDict(chat_id, l.cantdo_msg)]
def cancelarJogo(chat_id, u_id):
l = c.getLanguage(chat_id)
if bds.checkAdm(chat_id, u_id):
bds.delGame(chat_id)
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
return [c.toDict(chat_id, l.cancelar_jogo_msg, replyMarkup = keyboard)]
return [c.toDict(chat_id, l.cantdo_msg)]
| 45.653846 | 131 | 0.625527 |
import comandos as c
import bds
import game as g
from random import randint, shuffle
def categorias(chat_id, u_id, message_id):
l = c.getLanguage(chat_id)
if bds.checkAdm(chat_id, u_id):
bds.setCategorias(chat_id,True)
return [c.toDict(chat_id, l.cat_msg, replyTo = message_id, replyMarkup = c.makeFr(True, selective = True))]
def setCategorias(chat_id, text, message_id, u_id):
l = c.getLanguage(chat_id)
cats = []
text = text.split(' ')
try:
for i in range(len(text)):
if ((int(text[i]) <= len(l.palavras)) and (int(text[i]) >= 0) and (int(text[i]) not in cats)):
cats.append(int(text[i]))
else:
int('a')
except Exception, e:
return [c.toDict(chat_id, l.cat_erro_msg, replyTo = message_id, replyMarkup = c.makeFr(True, selective = True))]
bds.setCats(chat_id,cats)
kb = c.makeKb(c.getKb(chat_id,'main',u_id = u_id)[1], selective = True, resize_keyboard = True)
return [c.toDict(chat_id, l.categorias_msg, replyTo = message_id, replyMarkup = kb)]
def entrar(chat_id, u_id, u_name, message_id):
l = c.getLanguage(chat_id)
if bds.addPlayer(chat_id, u_id, u_name, message_id):
kb = c.makeKb(c.getKb(chat_id,'main',u_id = u_id)[0], selective = True, resize_keyboard = True)
return [c.toDict(chat_id, l.entrarMsg(u_name), replyTo = message_id, replyMarkup = kb )]
return [c.toDict(chat_id, l.esta_dentro_msg)]
def sair(chat_id, u_id, u_name, message_id):
aux = bds.rmPlayer(chat_id, u_id, message_id)
l = c.getLanguage(chat_id)
if aux == False:
kb = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
return [c.toDict(chat_id, l.sem_jogador_msg, replyMarkup = kb)]
elif aux == 'setAdm':
rpl = []
pl = bds.getPlayers(chat_id)
kb = c.makeKb(c.getKb(chat_id, 'main', u_id = u_id)[0], selective = True, resize_keyboard = True)
rpl.append(c.toDict(chat_id, l.playerQuitMsg(u_name), replyTo = message_id, replyMarkup = kb))
kb = c.makeKb(c.getKb(chat_id,'main')[1], resize_keyboard = True, selective = True)
rpl.append(c.toDict(chat_id, l.novoAdmMsg(pl[1][0]), replyTo = pl[2][0], replyMarkup = kb))
return rpl
elif aux == True:
kb = c.makeKb(c.getKb(chat_id,'main',u_id = u_id)[0], selective = True, resize_keyboard = True)
return [c.toDict(chat_id, l.playerQuitMsg(u_name), replyTo = message_id, replyMarkup = kb)]
elif aux == 'semPlayer':
return [c.toDict(chat_id, l.is_out_msg)]
def fecharJogo(chat_id, u_id, message_id, date):
l = c.getLanguage(chat_id)
rpl = []
if bds.checkAdm(chat_id, u_id):
bds.setPreGame(chat_id, False)
bds.setInGame(chat_id, True)
lista = bds.getCats(chat_id)
if ((len(lista) == 0) or (0 in lista)):
categoria = randint(0, (len(l.palavras)-1))
else:
cat = randint(0, len(lista)-1)
categoria = lista[cat]-1
palavra = randint(1, (len(l.palavras[categoria])-1))
palavra = l.palavras[categoria][palavra].decode('utf-8')
categoria = l.palavras[categoria][0]
mascara = bds.setCP(chat_id, categoria, palavra)
vidas = bds.setVidas(chat_id)
bds.shufflePlayers(chat_id, date)
u_names = bds.getPlayers(chat_id)[1]
message_ids = bds.getPlayers(chat_id)[2]
ordem = ''
for i in range(len(u_names)):
ordem = ordem+u_names[i]+'\n'
kb = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True, selective = True)
rpl.append(c.toDict(chat_id, l.close_game_msg, replyMarkup = c.makeKbh(True)))
rpl.append(c.toDict(chat_id, ordem + '\n' + l.categoria_msg+categoria +
'\n' + l.palavra_msg+mascara + '\n' +
l.vidas_msg+g.vidasEmoji(chat_id),
replyTo = message_ids[0], replyMarkup = kb))
"""rpl.append(c.toDict(chat_id, l.categoria_msg+categoria))
rpl.append(c.toDict(chat_id, l.palavra_msg+mascara))
rpl.append(c.toDict(chat_id, l.vidas_msg+g.vidasEmoji(chat_id)))"""
return rpl
return [c.toDict(chat_id, l.cantdo_msg)]
def cancelarJogo(chat_id, u_id):
l = c.getLanguage(chat_id)
if bds.checkAdm(chat_id, u_id):
bds.delGame(chat_id)
keyboard = c.makeKb(c.getKb(chat_id, 'main')[0], resize_keyboard = True)
return [c.toDict(chat_id, l.cancelar_jogo_msg, replyMarkup = keyboard)]
return [c.toDict(chat_id, l.cantdo_msg)]
| false | true |
1c45cb82a5676f74fee689944bd273eb30fc85e0 | 4,151 | py | Python | Tests/Plot/LamWind/test_Slot_LSRPM_plot.py | tobsen2code/pyleecan | 5b1ded9e389e0c79ed7b7c878b6e939f2d9962e9 | [
"Apache-2.0"
] | 95 | 2019-01-23T04:19:45.000Z | 2022-03-17T18:22:10.000Z | Tests/Plot/LamWind/test_Slot_LSRPM_plot.py | Eomys/Pyleecan | 4d7f0cbabf0311006963e7a2f435db2ecd901118 | [
"Apache-2.0"
] | 366 | 2019-02-20T07:15:08.000Z | 2022-03-31T13:37:23.000Z | Tests/Plot/LamWind/test_Slot_LSRPM_plot.py | Eomys/Pyleecan | 4d7f0cbabf0311006963e7a2f435db2ecd901118 | [
"Apache-2.0"
] | 74 | 2019-01-24T01:47:31.000Z | 2022-02-25T05:44:42.000Z | # -*- coding: utf-8 -*-
from os.path import join
import pytest
import matplotlib.pyplot as plt
from numpy import array, pi, zeros
from pyleecan.Classes.Frame import Frame
from pyleecan.Classes.LamSlotWind import LamSlotWind
from pyleecan.Classes.LamSquirrelCage import LamSquirrelCage
from pyleecan.Classes.MachineDFIM import MachineDFIM
from pyleecan.Classes.Shaft import Shaft
from pyleecan.Classes.VentilationCirc import VentilationCirc
from pyleecan.Classes.VentilationPolar import VentilationPolar
from pyleecan.Classes.VentilationTrap import VentilationTrap
from pyleecan.Classes.Winding import Winding
from pyleecan.Classes.WindingUD import WindingUD
from pyleecan.Classes.SlotWLSRPM import SlotWLSRPM
from Tests import save_plot_path as save_path
# from Tests.Plot.LamWind import wind_mat, wind_mat2
"""unittest for Lamination with winding plot"""
@pytest.fixture
def machine():
"""Run at the begining of every test to setup the machine"""
plt.close("all")
test_obj = LamSlotWind(
Rint=50.7e-3,
Rext=72.5e-3,
is_internal=False,
is_stator=True,
L1=0.95,
Nrvd=0,
Wrvd=0,
)
test_obj.slot = SlotWLSRPM(
Zs=12, W1=8e-3, W3=11.6e-3, H2=14.8e-3, R1=0.75e-3, H3=2e-3
)
return test_obj
# wind_mat = zeros((2, 2, 6, 4)) # Nrad, Ntan, Zs, qs
# wind_mat[0, 0, :, :] = array(
# [[1, 0, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, -1, -1, 0], [0, -1, 0, 0, 0, 1]]
# ).T
# wind_mat[1, 0, :, :] = array(
# [[0, 0, 0, 0, 0, 0], [-1, 0, -1, 0, 0, -1], [0, 0, 0, 0, 1, 0], [0, 1, 0, 1, 0, 0]]
# ).T
# wind_mat[0, 1, :, :] = array(
# [[-1, 0, 0, 0, 1, 0], [0, 0, 0, 1, 0, 0], [0, 1, 0, 0, 0, 0], [0, 0, -1, 0, 0, -1]]
# ).T
# wind_mat[1, 1, :, :] = array(
# [[0, 0, 0, -1, -1, 0], [1, 0, 0, 0, 0, 1], [0, -1, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0]]
# ).T
###
wind_mat_LSRPM = zeros((2, 2, 12, 6)) # Nrad, Ntan, Zs, qs
wind_mat_LSRPM[0, 0, :, :] = array(
[
[-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0],
[0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0],
[0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
).T
wind_mat_LSRPM[1, 0, :, :] = array(
[
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0],
[0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0],
[0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1],
]
).T
wind_mat_LSRPM[0, 1, :, :] = array(
[
[0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
).T
wind_mat_LSRPM[1, 1, :, :] = array(
[
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0],
]
).T
def test_Lam_Wind_LSRPM_wind_tan(machine):
"""Test machine plot with Slot LSRPM and winding rad=1, tan=2"""
machine.winding = WindingUD(wind_mat=wind_mat_LSRPM, qs=6, p=4, Lewout=0)
machine.plot(is_show_fig=False)
fig = plt.gcf()
fig.savefig(join(save_path, "test_Lam_Wind_sLSRPM_2-tan-wind.png"))
# 2 for lam + Zs*2 for wind
# assert len(fig.axes[0].patches) == 26
def test_stator_slot_angle_opening(machine):
"""Test calculate the angle opening"""
machine.slot.comp_angle_opening()
def test_stator_slot_height_damper(machine):
"""Test calculate the damper height"""
machine.slot.comp_height_damper()
def test_stator_slot_height_wind(machine):
"""Test calculate the winding height"""
machine.slot.comp_height_wind()
def test_stator_slot_height(machine):
"""Test calculate the total height"""
machine.slot.comp_height()
| 29.863309 | 89 | 0.530956 |
from os.path import join
import pytest
import matplotlib.pyplot as plt
from numpy import array, pi, zeros
from pyleecan.Classes.Frame import Frame
from pyleecan.Classes.LamSlotWind import LamSlotWind
from pyleecan.Classes.LamSquirrelCage import LamSquirrelCage
from pyleecan.Classes.MachineDFIM import MachineDFIM
from pyleecan.Classes.Shaft import Shaft
from pyleecan.Classes.VentilationCirc import VentilationCirc
from pyleecan.Classes.VentilationPolar import VentilationPolar
from pyleecan.Classes.VentilationTrap import VentilationTrap
from pyleecan.Classes.Winding import Winding
from pyleecan.Classes.WindingUD import WindingUD
from pyleecan.Classes.SlotWLSRPM import SlotWLSRPM
from Tests import save_plot_path as save_path
@pytest.fixture
def machine():
plt.close("all")
test_obj = LamSlotWind(
Rint=50.7e-3,
Rext=72.5e-3,
is_internal=False,
is_stator=True,
L1=0.95,
Nrvd=0,
Wrvd=0,
)
test_obj.slot = SlotWLSRPM(
Zs=12, W1=8e-3, W3=11.6e-3, H2=14.8e-3, R1=0.75e-3, H3=2e-3
)
return test_obj
at_LSRPM = zeros((2, 2, 12, 6))
wind_mat_LSRPM[0, 0, :, :] = array(
[
[-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0],
[0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0],
[0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
).T
wind_mat_LSRPM[1, 0, :, :] = array(
[
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0],
[0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0],
[0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1],
]
).T
wind_mat_LSRPM[0, 1, :, :] = array(
[
[0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
).T
wind_mat_LSRPM[1, 1, :, :] = array(
[
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0],
]
).T
def test_Lam_Wind_LSRPM_wind_tan(machine):
machine.winding = WindingUD(wind_mat=wind_mat_LSRPM, qs=6, p=4, Lewout=0)
machine.plot(is_show_fig=False)
fig = plt.gcf()
fig.savefig(join(save_path, "test_Lam_Wind_sLSRPM_2-tan-wind.png"))
def test_stator_slot_angle_opening(machine):
machine.slot.comp_angle_opening()
def test_stator_slot_height_damper(machine):
machine.slot.comp_height_damper()
def test_stator_slot_height_wind(machine):
machine.slot.comp_height_wind()
def test_stator_slot_height(machine):
machine.slot.comp_height()
| true | true |
1c45cc01daf7a254c2fe16fed376b3fc58df574f | 1,643 | py | Python | src/nucleotide/component/windows/msvc/atom/version.py | dmilos/nucleotide | aad5d60508c9e4baf4888069284f2cb5c9fd7c55 | [
"Apache-2.0"
] | 1 | 2020-09-04T13:00:04.000Z | 2020-09-04T13:00:04.000Z | src/nucleotide/component/windows/msvc/atom/version.py | dmilos/nucleotide | aad5d60508c9e4baf4888069284f2cb5c9fd7c55 | [
"Apache-2.0"
] | 1 | 2020-04-10T01:52:32.000Z | 2020-04-10T09:11:29.000Z | src/nucleotide/component/windows/msvc/atom/version.py | dmilos/nucleotide | aad5d60508c9e4baf4888069284f2cb5c9fd7c55 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python2
# Copyright 2015 Dejan D. M. Milosavljevic
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import nucleotide
import nucleotide.component
import nucleotide.component.function
def _windows_version_MSVC_VERSION( P_data ):
if( False == ( 'msvc' in P_data ) ):
return None
print( " ||" + str( P_data ) + "||" )
return P_data[ 'msvc' ][0]
atom_windows_CCVERSION = {
'platform' : {
'host' : 'Windows',
'guest' : 'Windows'
},
'cc' : {
'vendor': 'Microsoft',
'name': 'msvc',
'version': 'X'
},
'config' : {
'MSVC_VERSION' : _windows_version_MSVC_VERSION
},
'name' :'compiler:version',
'class': [ 'compiler:version', 'windows:compiler:version' ]
}
class Version:
def __init__(self):
pass
@staticmethod
def extend(P_option):
nucleotide.component.function.extend( P_option, 'windows:compiler:version', atom_windows_CCVERSION )
@staticmethod
def check(self):
pass
| 27.847458 | 109 | 0.625685 |
import os
import subprocess
import nucleotide
import nucleotide.component
import nucleotide.component.function
def _windows_version_MSVC_VERSION( P_data ):
if( False == ( 'msvc' in P_data ) ):
return None
print( " ||" + str( P_data ) + "||" )
return P_data[ 'msvc' ][0]
atom_windows_CCVERSION = {
'platform' : {
'host' : 'Windows',
'guest' : 'Windows'
},
'cc' : {
'vendor': 'Microsoft',
'name': 'msvc',
'version': 'X'
},
'config' : {
'MSVC_VERSION' : _windows_version_MSVC_VERSION
},
'name' :'compiler:version',
'class': [ 'compiler:version', 'windows:compiler:version' ]
}
class Version:
def __init__(self):
pass
@staticmethod
def extend(P_option):
nucleotide.component.function.extend( P_option, 'windows:compiler:version', atom_windows_CCVERSION )
@staticmethod
def check(self):
pass
| true | true |
1c45ccb173f55ce3b5f37cb85aa9fc13c1fdd831 | 791 | py | Python | david/modules/event/admin.py | ktmud/david | 4b8d6f804b73cdfa1a8ddf784077fa9a39f1e36f | [
"MIT"
] | 2 | 2016-04-07T08:21:32.000Z | 2020-11-26T11:49:20.000Z | david/modules/event/admin.py | ktmud/david | 4b8d6f804b73cdfa1a8ddf784077fa9a39f1e36f | [
"MIT"
] | null | null | null | david/modules/event/admin.py | ktmud/david | 4b8d6f804b73cdfa1a8ddf784077fa9a39f1e36f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from david.core.article.admin import ArticleAdmin, ModelAdmin
from david.ext.admin import _
from .model import Event
class EventAdmin(ArticleAdmin):
column_labels = dict(
title=_('Title'),
slug=_('Slug'),
id=_('ID'),
tags=_('Tags'),
create_at=_('Create at'),
update_at=_('Update at'),
summary=_('Summary'),
link=_('Out link'),
content=_('Content')
)
column_list = ('id', 'title', 'create_at')
column_sortable_list = ('id', 'title')
form_columns = ('title', 'content', 'link', 'create_at',)
form_widget_args = dict(
link=dict(placeholder='http://...')
)
views = [
(EventAdmin(Event, name=_('Event')), 20)
]
| 27.275862 | 61 | 0.542351 |
from david.core.article.admin import ArticleAdmin, ModelAdmin
from david.ext.admin import _
from .model import Event
class EventAdmin(ArticleAdmin):
column_labels = dict(
title=_('Title'),
slug=_('Slug'),
id=_('ID'),
tags=_('Tags'),
create_at=_('Create at'),
update_at=_('Update at'),
summary=_('Summary'),
link=_('Out link'),
content=_('Content')
)
column_list = ('id', 'title', 'create_at')
column_sortable_list = ('id', 'title')
form_columns = ('title', 'content', 'link', 'create_at',)
form_widget_args = dict(
link=dict(placeholder='http://...')
)
views = [
(EventAdmin(Event, name=_('Event')), 20)
]
| true | true |
1c45ccf0ef4a8a26e47030e104f785132e53c97d | 31,133 | py | Python | flexmock_test.py | sagara-/flexmock | 0b24b769cd04e234d4921089053707a5565aa007 | [
"BSD-2-Clause"
] | null | null | null | flexmock_test.py | sagara-/flexmock | 0b24b769cd04e234d4921089053707a5565aa007 | [
"BSD-2-Clause"
] | null | null | null | flexmock_test.py | sagara-/flexmock | 0b24b769cd04e234d4921089053707a5565aa007 | [
"BSD-2-Clause"
] | null | null | null | #-*- coding: utf8 -*-
from flexmock import FlexMock
from flexmock import AlreadyMocked
from flexmock import AndExecuteNotSupportedForClassMocks
from flexmock import AttemptingToMockBuiltin
from flexmock import Expectation
from flexmock import FlexmockContainer
from flexmock import FlexmockException
from flexmock import InvalidMethodSignature
from flexmock import InvalidExceptionClass
from flexmock import InvalidExceptionMessage
from flexmock import MethodDoesNotExist
from flexmock import MethodNotCalled
from flexmock import MethodCalledOutOfOrder
from flexmock import ReturnValue
from flexmock import flexmock
from flexmock import flexmock_nose
from flexmock import _format_args
import sys
import unittest
def module_level_function(some, args):
return "%s, %s" % (some, args)
def _tear_down(runner):
return unittest.TestCase.tearDown(runner)
def assertRaises(exception, method, *kargs, **kwargs):
try:
method(*kargs, **kwargs)
except exception:
assert True
return
except:
pass
raise Exception('%s not raised' % exception.__name__)
class TestFlexmock(unittest.TestCase):
def test_flexmock_should_create_mock_object(self):
mock = flexmock()
assert isinstance(mock, FlexMock)
def test_flexmock_should_create_mock_object_from_dict(self):
mock = flexmock(foo='foo', bar='bar')
assert 'foo' == mock.foo
assert 'bar' == mock.bar
def test_flexmock_should_add_expectations(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assert 'method_foo' in [x.method for x in mock._flexmock_expectations]
def test_flexmock_should_return_value(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar')
mock.should_receive('method_bar').and_return('value_baz')
assert 'value_bar' == mock.method_foo()
assert 'value_baz' == mock.method_bar()
def test_flexmock_should_accept_shortcuts_for_creating_mock_object(self):
mock = flexmock(attr1='value 1', attr2=lambda: 'returning 2')
assert 'value 1' == mock.attr1
assert 'returning 2' == mock.attr2()
def test_flexmock_should_accept_shortcuts_for_creating_expectations(self):
class Foo:
def method1(self): pass
def method2(self): pass
foo = Foo()
flexmock(foo, method1='returning 1', method2='returning 2')
assert 'returning 1' == foo.method1()
assert 'returning 2' == foo.method2()
assert 'returning 2' == foo.method2()
def test_flexmock_expectations_returns_all(self):
mock = flexmock(name='temp')
assert 0 == len(mock._flexmock_expectations)
mock.should_receive('method_foo')
mock.should_receive('method_bar')
assert 2 == len(mock._flexmock_expectations)
def test_flexmock_expectations_returns_named_expectation(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assert 'method_foo' == mock._get_flexmock_expectation('method_foo').method
def test_flexmock_expectations_returns_none_if_not_found(self):
mock = flexmock(name='temp')
assert mock._get_flexmock_expectation('method_foo') is None
def test_flexmock_should_check_parameters(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('bar').and_return(1)
mock.should_receive('method_foo').with_args('baz').and_return(2)
assert 1 == mock.method_foo('bar')
assert 2 == mock.method_foo('baz')
def test_flexmock_should_keep_track_of_calls(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('foo').and_return(0)
mock.should_receive('method_foo').with_args('bar').and_return(1)
mock.should_receive('method_foo').with_args('baz').and_return(2)
mock.method_foo('bar')
mock.method_foo('bar')
mock.method_foo('baz')
expectation = mock._get_flexmock_expectation('method_foo', ('foo',))
assert 0 == expectation.times_called
expectation = mock._get_flexmock_expectation('method_foo', ('bar',))
assert 2 == expectation.times_called
expectation = mock._get_flexmock_expectation('method_foo', ('baz',))
assert 1 == expectation.times_called
def test_flexmock_should_set_expectation_call_numbers(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').times(1)
expectation = mock._get_flexmock_expectation('method_foo')
assertRaises(MethodNotCalled, expectation.verify)
mock.method_foo()
expectation.verify()
def test_flexmock_should_check_raised_exceptions(self):
mock = flexmock(name='temp')
class FakeException(Exception):
pass
mock.should_receive('method_foo').and_raise(FakeException)
assertRaises(FakeException, mock.method_foo)
assert 1 == mock._get_flexmock_expectation('method_foo').times_called
def test_flexmock_should_check_raised_exceptions_instance_with_args(self):
mock = flexmock(name='temp')
class FakeException(Exception):
def __init__(self, arg, arg2):
pass
mock.should_receive('method_foo').and_raise(FakeException(1, arg2=2))
assertRaises(FakeException, mock.method_foo)
assert 1 == mock._get_flexmock_expectation('method_foo').times_called
def test_flexmock_should_check_raised_exceptions_class_with_args(self):
mock = flexmock(name='temp')
class FakeException(Exception):
def __init__(self, arg, arg2):
pass
mock.should_receive('method_foo').and_raise(FakeException, 1, arg2=2)
assertRaises(FakeException, mock.method_foo)
assert 1 == mock._get_flexmock_expectation('method_foo').times_called
def test_flexmock_should_match_any_args_by_default(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar')
mock.should_receive('method_foo').with_args('baz').and_return('baz')
assert 'bar' == mock.method_foo()
assert 'bar' == mock.method_foo(1)
assert 'bar', mock.method_foo('foo' == 'bar')
assert 'baz' == mock.method_foo('baz')
def test_expectation_dot_mock_should_return_mock(self):
mock = flexmock(name='temp')
assert mock == mock.should_receive('method_foo').mock
def test_flexmock_should_create_partial_new_style_object_mock(self):
class User(object):
def __init__(self, name=None):
self.name = name
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
user = User()
flexmock(user)
user.should_receive('get_name').and_return('john')
user.set_name('mike')
assert 'john' == user.get_name()
def test_flexmock_should_create_partial_old_style_object_mock(self):
class User:
def __init__(self, name=None):
self.name = name
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
user = User()
flexmock(user)
user.should_receive('get_name').and_return('john')
user.set_name('mike')
assert 'john' == user.get_name()
def test_flexmock_should_create_partial_new_style_class_mock(self):
class User(object):
def __init__(self): pass
def get_name(self): pass
flexmock(User)
User.should_receive('get_name').and_return('mike')
user = User()
assert 'mike' == user.get_name()
def test_flexmock_should_create_partial_old_style_class_mock(self):
class User:
def __init__(self): pass
def get_name(self): pass
flexmock(User)
User.should_receive('get_name').and_return('mike')
user = User()
assert 'mike' == user.get_name()
def test_flexmock_should_match_expectations_against_builtin_classes(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args(str).and_return('got a string')
mock.should_receive('method_foo').with_args(int).and_return('got an int')
assert 'got a string' == mock.method_foo('string!')
assert 'got an int' == mock.method_foo(23)
assertRaises(InvalidMethodSignature, mock.method_foo, 2.0)
def test_flexmock_should_match_expectations_against_user_defined_classes(self):
mock = flexmock(name='temp')
class Foo:
pass
mock.should_receive('method_foo').with_args(Foo).and_return('got a Foo')
assert 'got a Foo' == mock.method_foo(Foo())
assertRaises(InvalidMethodSignature, mock.method_foo, 1)
def test_flexmock_configures_global_mocks_dict(self):
mock = flexmock(name='temp')
for expectations in FlexmockContainer.flexmock_objects.values():
assert 0 == len(expectations)
mock.should_receive('method_foo')
for expectations in FlexmockContainer.flexmock_objects.values():
assert 1 == len(expectations)
def test_flexmock_teardown_verifies_mocks(self):
mock = flexmock(name='temp')
mock.should_receive('verify_expectations').times(1)
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_teardown_does_not_verify_stubs(self):
mock = flexmock(name='temp')
mock.should_receive('verify_expectations')
_tear_down(self)
def test_flexmock_preserves_stubbed_object_methods_between_tests(self):
class User:
def get_name(self):
return 'mike'
user = User()
flexmock(user).should_receive('get_name').and_return('john')
assert 'john' == user.get_name()
_tear_down(self)
assert 'mike' == user.get_name()
def test_flexmock_preserves_stubbed_class_methods_between_tests(self):
class User:
def get_name(self):
return 'mike'
user = User()
flexmock(User).should_receive('get_name').and_return('john')
assert 'john' == user.get_name()
_tear_down(self)
assert 'mike' == user.get_name()
def test_flexmock_removes_new_stubs_from_objects_after_tests(self):
class User:
def get_name(self): pass
user = User()
saved = user.get_name
flexmock(user).should_receive('get_name').and_return('john')
assert saved != user.get_name
assert 'john' == user.get_name()
_tear_down(self)
assert saved == user.get_name
def test_flexmock_removes_new_stubs_from_classes_after_tests(self):
class User:
def get_name(self): pass
user = User()
saved = user.get_name
flexmock(User).should_receive('get_name').and_return('john')
assert saved != user.get_name
assert 'john' == user.get_name()
_tear_down(self)
assert saved == user.get_name
def test_flexmock_removes_stubs_from_multiple_objects_on_teardown(self):
class User:
def get_name(self): pass
class Group:
def get_name(self): pass
user = User()
group = User()
saved1 = user.get_name
saved2 = group.get_name
flexmock(user).should_receive('get_name').and_return('john').once
flexmock(group).should_receive('get_name').and_return('john').once
assert saved1 != user.get_name
assert saved2 != group.get_name
assert 'john' == user.get_name()
assert 'john' == group.get_name()
_tear_down(self)
assert saved1 == user.get_name
assert saved2 == group.get_name
def test_flexmock_removes_stubs_from_multiple_classes_on_teardown(self):
class User:
def get_name(self): pass
class Group:
def get_name(self): pass
user = User()
group = User()
saved1 = user.get_name
saved2 = group.get_name
flexmock(User).should_receive('get_name').and_return('john')
flexmock(Group).should_receive('get_name').and_return('john')
assert saved1 != user.get_name
assert saved2 != group.get_name
assert 'john' == user.get_name()
assert 'john' == group.get_name()
_tear_down(self)
assert saved1 == user.get_name
assert saved2 == group.get_name
def test_flexmock_respects_at_least_when_called_less_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar').at_least.twice
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_LEAST == expectation.modifier
mock.method_foo()
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_respects_at_least_when_called_requested_number(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_least.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_LEAST == expectation.modifier
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_least_when_called_more_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_least.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_LEAST == expectation.modifier
mock.method_foo()
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_most_when_called_less_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar').at_most.twice
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_MOST == expectation.modifier
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_most_when_called_requested_number(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_most.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_MOST == expectation.modifier
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_most_when_called_more_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_most.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_MOST == expectation.modifier
mock.method_foo()
mock.method_foo()
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_treats_once_as_times_one(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').once
expectation = mock._get_flexmock_expectation('method_foo')
assert 1 == expectation.expected_calls
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_treats_twice_as_times_two(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').twice.and_return('value_bar')
expectation = mock._get_flexmock_expectation('method_foo')
assert 2 == expectation.expected_calls
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_works_with_never_when_true(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').never
expectation = mock._get_flexmock_expectation('method_foo')
assert 0 == expectation.expected_calls
_tear_down(self)
def test_flexmock_works_with_never_when_false(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').never
mock.method_foo()
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_get_flexmock_expectation_should_work_with_args(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('value_bar')
assert mock._get_flexmock_expectation('method_foo', 'value_bar')
def test_flexmock_function_should_return_previously_mocked_object(self):
class User(object): pass
user = User()
foo = flexmock(user)
assert foo._mock == flexmock(user)
def test_flexmock_should_not_return_class_object_if_mocking_instance(self):
class User:
def method(self): pass
user = User()
user2 = User()
class_mock = flexmock(User).should_receive(
'method').and_return('class').mock
user_mock = flexmock(user).should_receive(
'method').and_return('instance').mock
assert class_mock is not user_mock
assert 'instance' == user.method()
assert 'class' == user2.method()
def test_flexmock_should_blow_up_on_and_execute_for_class_mock(self):
class User:
def foo(self):
return 'class'
try:
flexmock(User).should_receive('foo').and_execute
raise Exception('and_execute should have raised an exception')
except AndExecuteNotSupportedForClassMocks:
pass
def test_flexmock_should_mock_new_instances(self):
class User(object): pass
class Group(object): pass
user = User()
flexmock(Group, new_instances=user)
assert user is Group()
def test_flexmock_should_mock_new_instances_with_multiple_params(self):
class User(object): pass
class Group(object):
def __init__(self, arg, arg2):
pass
user = User()
flexmock(Group, new_instances=user)
assert user is Group(1, 2)
def test_flexmock_should_revert_new_instances_on_teardown(self):
class User(object): pass
class Group(object): pass
user = User()
group = Group()
flexmock(Group, new_instances=user)
assert user is Group()
_tear_down(self)
assert group.__class__ == Group().__class__
def test_flexmock_should_cleanup_added_methods_and_attributes(self):
class Group(object): pass
flexmock(Group)
_tear_down(self)
for method in FlexMock.UPDATED_ATTRS:
assert method not in dir(Group)
def test_flexmock_should_cleanup_after_exception(self):
class User:
def method2(self): pass
class Group:
def method1(self): pass
flexmock(Group)
flexmock(User)
Group.should_receive('method1').once
User.should_receive('method2').once
assertRaises(MethodNotCalled, _tear_down, self)
for method in FlexMock.UPDATED_ATTRS:
assert method not in dir(Group)
for method in FlexMock.UPDATED_ATTRS:
assert method not in dir(User)
def test_flexmock_and_execute_respects_matched_expectations(self):
class Group(object):
def method1(self, arg1, arg2='b'):
return '%s:%s' % (arg1, arg2)
def method2(self, arg):
return arg
group = Group()
flexmock(group).should_receive('method1').twice.and_execute
assert 'a:c' == group.method1('a', arg2='c')
assert 'a:b' == group.method1('a')
group.should_receive('method2').once.with_args('c').and_execute
assert 'c' == group.method2('c')
_tear_down(self)
def test_flexmock_and_execute_respects_unmatched_expectations(self):
class Group(object):
def method1(self, arg1, arg2='b'):
return '%s:%s' % (arg1, arg2)
def method2(self): pass
group = Group()
flexmock(group).should_receive('method1').at_least.once.and_execute
assertRaises(MethodNotCalled, _tear_down, self)
flexmock(group)
group.should_receive('method2').with_args('a').once.and_execute
group.should_receive('method2').with_args('not a')
group.method2('not a')
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_doesnt_error_on_properly_ordered_expectations(self):
class Foo(object):
def foo(self): pass
def method1(self): pass
def bar(self): pass
def baz(self): pass
flexmock(Foo).should_receive('foo')
flexmock(Foo).should_receive('method1').with_args('a').ordered
flexmock(Foo).should_receive('bar')
flexmock(Foo).should_receive('method1').with_args('b').ordered
flexmock(Foo).should_receive('baz')
Foo.bar()
Foo.method1('a')
Foo.method1('b')
Foo.baz()
Foo.foo()
def test_flexmock_errors_on_improperly_ordered_expectations(self):
class Foo(object):
def foo(self): pass
def method1(self): pass
def bar(self): pass
def baz(self): pass
flexmock(Foo)
Foo.should_receive('foo')
Foo.should_receive('method1').with_args('a').ordered
Foo.should_receive('bar')
Foo.should_receive('method1').with_args('b').ordered
Foo.should_receive('baz')
Foo.bar()
Foo.bar()
Foo.foo()
assertRaises(MethodCalledOutOfOrder, Foo.method1, 'b')
def test_flexmock_should_accept_multiple_return_values(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1, 5).and_return(2)
assert (1, 5) == foo.method1()
assert 2 == foo.method1()
assert (1, 5) == foo.method1()
assert 2 == foo.method1()
def test_flexmock_should_accept_multiple_return_values_with_shortcut(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1, 2).one_by_one
assert 1 == foo.method1()
assert 2 == foo.method1()
assert 1 == foo.method1()
assert 2 == foo.method1()
def test_flexmock_should_mix_multiple_return_values_with_exceptions(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1).and_raise(Exception)
assert 1 == foo.method1()
assertRaises(Exception, foo.method1)
assert 1 == foo.method1()
assertRaises(Exception, foo.method1)
def test_flexmock_should_match_types_on_multiple_arguments(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(str, int).and_return('ok')
assert 'ok', foo.method1('some string' == 12)
assertRaises(InvalidMethodSignature, foo.method1, 12, 32)
assertRaises(InvalidMethodSignature, foo.method1, 12, 'some string')
assertRaises(InvalidMethodSignature, foo.method1, 'string', 12, 14)
def test_flexmock_should_match_types_on_multiple_arguments_generic(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(
object, object, object).and_return('ok')
assert 'ok', foo.method1('some string', None == 12)
assert 'ok', foo.method1((1,), None == 12)
assert 'ok', foo.method1(12, 14 == [])
assert 'ok', foo.method1('some string', 'another one' == False)
assertRaises(InvalidMethodSignature, foo.method1, 'string', 12)
assertRaises(InvalidMethodSignature, foo.method1, 'string', 12, 13, 14)
def test_flexmock_should_match_types_on_multiple_arguments_classes(self):
class Foo:
def method1(self): pass
class Bar: pass
foo = Foo()
bar = Bar()
flexmock(foo).should_receive('method1').with_args(
object, Bar).and_return('ok')
assert 'ok', foo.method1('some string' == bar)
assertRaises(InvalidMethodSignature, foo.method1, bar, 'some string')
assertRaises(InvalidMethodSignature, foo.method1, 12, 'some string')
def test_flexmock_should_match_keyword_arguments(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2).twice
foo.method1(1, arg2=2, arg3=3)
foo.method1(1, arg3=3, arg2=2)
_tear_down(self)
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2)
assertRaises(InvalidMethodSignature, foo.method1, arg2=2, arg3=3)
assertRaises(InvalidMethodSignature, foo.method1, 1, arg2=2, arg3=4)
assertRaises(InvalidMethodSignature, foo.method1, 1)
def test_flexmock_should_match_keyword_arguments_works_with_and_execute(self):
class Foo:
def method1(self, arg1, arg2=None, arg3=None):
return '%s%s%s' % (arg1, arg2, arg3)
foo = Foo()
flexmock(foo).should_receive('method1').with_args(
1, arg3=3, arg2=2).and_execute.once
assert '123' == foo.method1(1, arg2=2, arg3=3)
def test_flexmock_should_mock_private_methods(self):
class Foo:
def __private_method(self):
return 'foo'
def public_method(self):
return self.__private_method()
foo = Foo()
flexmock(foo).should_receive('__private_method').and_return('bar')
assert 'bar' == foo.public_method()
def test_flexmock_should_mock_private_class_methods(self):
class Foo:
pass
flexmock(Foo).should_receive('__iter__').and_yield(1, 2, 3)
assert [1, 2, 3] == [x for x in Foo()]
def test_flexmock_should_mock_generators(self):
class Gen:
def foo(self): pass
gen = Gen()
flexmock(gen).should_receive('foo').and_yield(*range(1, 10))
output = [val for val in gen.foo()]
assert [val for val in range(1, 10)] == output
def test_flexmock_should_verify_correct_spy_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_return('real', 'stuff')
assert ('real', 'stuff') == user.get_stuff()
def test_flexmock_should_verify_spy_raises_correct_exception_class(self):
class FakeException(Exception):
def __init__(self, param, param2):
self.message = '%s, %s' % (param, param2)
Exception.__init__(self)
class User:
def get_stuff(self): raise FakeException(1, 2)
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_raise(FakeException, 1, 2)
user.get_stuff()
def test_flexmock_should_verify_spy_matches_exception_message(self):
class FakeException(Exception):
def __init__(self, param, param2):
self.p1 = param
self.p2 = param2
Exception.__init__(self, param)
def __str__(self):
return '%s, %s' % (self.p1, self.p2)
class User:
def get_stuff(self): raise FakeException(1, 2)
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_raise(FakeException, 2, 1)
assertRaises(InvalidExceptionMessage, user.get_stuff)
def test_flexmock_should_blow_up_on_wrong_exception_type(self):
class User:
def get_stuff(self): raise AlreadyMocked('foo')
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_raise(MethodNotCalled)
assertRaises(InvalidExceptionClass, user.get_stuff)
def test_flexmock_should_blow_up_on_wrong_spy_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
def get_more_stuff(self): return 'other', 'stuff'
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_return('other', 'stuff')
assertRaises(InvalidMethodSignature, user.get_stuff)
flexmock(user).should_receive(
'get_more_stuff').and_execute.and_return()
assertRaises(InvalidMethodSignature, user.get_more_stuff)
def test_flexmock_should_mock_same_class_twice(self):
class Foo: pass
flexmock(Foo)
flexmock(Foo)
def test_flexmock_and_execute_should_not_clobber_original_method(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_receive('get_stuff').and_execute
flexmock(user).should_receive('get_stuff').and_execute
assert ('real', 'stuff') == user.get_stuff()
def test_flexmock_should_properly_restore_static_methods(self):
class User:
@staticmethod
def get_stuff(): return 'ok!'
assert 'ok!' == User.get_stuff()
flexmock(User).should_receive('get_stuff')
assert User.get_stuff() is None
_tear_down(self)
assert 'ok!' == User.get_stuff()
def test_flexmock_should_properly_restore_undecorated_static_methods(self):
class User:
def get_stuff(): return 'ok!'
get_stuff = staticmethod(get_stuff)
assert 'ok!' == User.get_stuff()
flexmock(User).should_receive('get_stuff')
assert User.get_stuff() is None
_tear_down(self)
assert 'ok!' == User.get_stuff()
def test_flexmock_should_properly_restore_module_level_functions(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod).should_receive('module_level_function')
assert None == module_level_function(1, 2)
_tear_down(self)
assert '1, 2' == module_level_function(1, 2)
def test_flexmock_should_properly_restore_class_methods(self):
class User:
@classmethod
def get_stuff(cls):
return cls.__name__
assert 'User' == User.get_stuff()
flexmock(User).should_receive('get_stuff').and_return('foo')
assert 'foo' == User.get_stuff()
_tear_down(self)
assert 'User' == User.get_stuff()
def test_and_execute_should_match_return_value_class(self):
class User: pass
user = User()
foo = flexmock(foo=lambda: ('bar', 'baz'),
bar=lambda: user,
baz=lambda: None,
bax=lambda: None)
foo.should_receive('foo').and_execute.and_return(str, str)
foo.should_receive('bar').and_execute.and_return(User)
foo.should_receive('baz').and_execute.and_return(object)
foo.should_receive('bax').and_execute.and_return(None)
assert ('bar', 'baz') == foo.foo()
assert user == foo.bar()
assert None == foo.baz()
assert None == foo.bax()
def test_new_instances_should_blow_up_on_should_receive(self):
class User(object): pass
mock = flexmock(User, new_instances=None)
assertRaises(FlexmockException, mock.should_receive, 'foo')
def test_should_call_alias_should_receive_and_execute(self):
class Foo:
def get_stuff(self):
return 'yay'
foo = Foo()
flexmock(foo).should_call('get_stuff').and_return('yay').once
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_should_fail_mocking_nonexistent_methods(self):
class User: pass
user = User()
assertRaises(MethodDoesNotExist,
flexmock(user).should_receive, 'nonexistent')
def test_flexmock_should_not_explode_on_unicode_formatting(self):
if sys.version_info >= (3, 0):
formatted = _format_args(
'method', {'kargs' : (chr(0x86C7),), 'kwargs' : {}})
assert formatted == 'method("蛇")'
else:
formatted = _format_args(
'method', {'kargs' : (unichr(0x86C7),), 'kwargs' : {}})
assert formatted == 'method("%s")' % unichr(0x86C7)
def test_return_value_should_not_explode_on_unicode_values(self):
class Foo:
def method(self): pass
if sys.version_info >= (3, 0):
return_value = ReturnValue(chr(0x86C7))
assert '%s' % return_value == '蛇'
else:
return_value = ReturnValue(unichr(0x86C7))
assert unicode(return_value) == unichr(0x86C7)
def test_pass_thru_should_call_original_method_only_once(self):
class Nyan(object):
def __init__(self):
self.n = 0
def method(self):
self.n += 1
obj = Nyan()
flexmock(obj)
obj.should_call('method')
obj.method()
self.assertEqual(obj.n, 1)
def test_should_call_works_for_same_method_with_different_args(self):
class Foo:
def method(self, arg):
pass
foo = Foo()
flexmock(foo).should_call('method').with_args('foo').once
flexmock(foo).should_call('method').with_args('bar').once
foo.method('foo')
foo.method('bar')
_tear_down(self)
def test_should_call_fails_properly_for_same_method_with_different_args(self):
class Foo:
def method(self, arg):
pass
foo = Foo()
flexmock(foo).should_call('method').with_args('foo').once
flexmock(foo).should_call('method').with_args('bar').once
foo.method('foo')
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_should_give_reasonable_error_for_builtins(self):
assertRaises(AttemptingToMockBuiltin, flexmock, object)
if __name__ == '__main__':
unittest.main()
| 36.201163 | 81 | 0.710886 |
from flexmock import FlexMock
from flexmock import AlreadyMocked
from flexmock import AndExecuteNotSupportedForClassMocks
from flexmock import AttemptingToMockBuiltin
from flexmock import Expectation
from flexmock import FlexmockContainer
from flexmock import FlexmockException
from flexmock import InvalidMethodSignature
from flexmock import InvalidExceptionClass
from flexmock import InvalidExceptionMessage
from flexmock import MethodDoesNotExist
from flexmock import MethodNotCalled
from flexmock import MethodCalledOutOfOrder
from flexmock import ReturnValue
from flexmock import flexmock
from flexmock import flexmock_nose
from flexmock import _format_args
import sys
import unittest
def module_level_function(some, args):
return "%s, %s" % (some, args)
def _tear_down(runner):
return unittest.TestCase.tearDown(runner)
def assertRaises(exception, method, *kargs, **kwargs):
try:
method(*kargs, **kwargs)
except exception:
assert True
return
except:
pass
raise Exception('%s not raised' % exception.__name__)
class TestFlexmock(unittest.TestCase):
def test_flexmock_should_create_mock_object(self):
mock = flexmock()
assert isinstance(mock, FlexMock)
def test_flexmock_should_create_mock_object_from_dict(self):
mock = flexmock(foo='foo', bar='bar')
assert 'foo' == mock.foo
assert 'bar' == mock.bar
def test_flexmock_should_add_expectations(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assert 'method_foo' in [x.method for x in mock._flexmock_expectations]
def test_flexmock_should_return_value(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar')
mock.should_receive('method_bar').and_return('value_baz')
assert 'value_bar' == mock.method_foo()
assert 'value_baz' == mock.method_bar()
def test_flexmock_should_accept_shortcuts_for_creating_mock_object(self):
mock = flexmock(attr1='value 1', attr2=lambda: 'returning 2')
assert 'value 1' == mock.attr1
assert 'returning 2' == mock.attr2()
def test_flexmock_should_accept_shortcuts_for_creating_expectations(self):
class Foo:
def method1(self): pass
def method2(self): pass
foo = Foo()
flexmock(foo, method1='returning 1', method2='returning 2')
assert 'returning 1' == foo.method1()
assert 'returning 2' == foo.method2()
assert 'returning 2' == foo.method2()
def test_flexmock_expectations_returns_all(self):
mock = flexmock(name='temp')
assert 0 == len(mock._flexmock_expectations)
mock.should_receive('method_foo')
mock.should_receive('method_bar')
assert 2 == len(mock._flexmock_expectations)
def test_flexmock_expectations_returns_named_expectation(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo')
assert 'method_foo' == mock._get_flexmock_expectation('method_foo').method
def test_flexmock_expectations_returns_none_if_not_found(self):
mock = flexmock(name='temp')
assert mock._get_flexmock_expectation('method_foo') is None
def test_flexmock_should_check_parameters(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('bar').and_return(1)
mock.should_receive('method_foo').with_args('baz').and_return(2)
assert 1 == mock.method_foo('bar')
assert 2 == mock.method_foo('baz')
def test_flexmock_should_keep_track_of_calls(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('foo').and_return(0)
mock.should_receive('method_foo').with_args('bar').and_return(1)
mock.should_receive('method_foo').with_args('baz').and_return(2)
mock.method_foo('bar')
mock.method_foo('bar')
mock.method_foo('baz')
expectation = mock._get_flexmock_expectation('method_foo', ('foo',))
assert 0 == expectation.times_called
expectation = mock._get_flexmock_expectation('method_foo', ('bar',))
assert 2 == expectation.times_called
expectation = mock._get_flexmock_expectation('method_foo', ('baz',))
assert 1 == expectation.times_called
def test_flexmock_should_set_expectation_call_numbers(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').times(1)
expectation = mock._get_flexmock_expectation('method_foo')
assertRaises(MethodNotCalled, expectation.verify)
mock.method_foo()
expectation.verify()
def test_flexmock_should_check_raised_exceptions(self):
mock = flexmock(name='temp')
class FakeException(Exception):
pass
mock.should_receive('method_foo').and_raise(FakeException)
assertRaises(FakeException, mock.method_foo)
assert 1 == mock._get_flexmock_expectation('method_foo').times_called
def test_flexmock_should_check_raised_exceptions_instance_with_args(self):
mock = flexmock(name='temp')
class FakeException(Exception):
def __init__(self, arg, arg2):
pass
mock.should_receive('method_foo').and_raise(FakeException(1, arg2=2))
assertRaises(FakeException, mock.method_foo)
assert 1 == mock._get_flexmock_expectation('method_foo').times_called
def test_flexmock_should_check_raised_exceptions_class_with_args(self):
mock = flexmock(name='temp')
class FakeException(Exception):
def __init__(self, arg, arg2):
pass
mock.should_receive('method_foo').and_raise(FakeException, 1, arg2=2)
assertRaises(FakeException, mock.method_foo)
assert 1 == mock._get_flexmock_expectation('method_foo').times_called
def test_flexmock_should_match_any_args_by_default(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar')
mock.should_receive('method_foo').with_args('baz').and_return('baz')
assert 'bar' == mock.method_foo()
assert 'bar' == mock.method_foo(1)
assert 'bar', mock.method_foo('foo' == 'bar')
assert 'baz' == mock.method_foo('baz')
def test_expectation_dot_mock_should_return_mock(self):
mock = flexmock(name='temp')
assert mock == mock.should_receive('method_foo').mock
def test_flexmock_should_create_partial_new_style_object_mock(self):
class User(object):
def __init__(self, name=None):
self.name = name
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
user = User()
flexmock(user)
user.should_receive('get_name').and_return('john')
user.set_name('mike')
assert 'john' == user.get_name()
def test_flexmock_should_create_partial_old_style_object_mock(self):
class User:
def __init__(self, name=None):
self.name = name
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
user = User()
flexmock(user)
user.should_receive('get_name').and_return('john')
user.set_name('mike')
assert 'john' == user.get_name()
def test_flexmock_should_create_partial_new_style_class_mock(self):
class User(object):
def __init__(self): pass
def get_name(self): pass
flexmock(User)
User.should_receive('get_name').and_return('mike')
user = User()
assert 'mike' == user.get_name()
def test_flexmock_should_create_partial_old_style_class_mock(self):
class User:
def __init__(self): pass
def get_name(self): pass
flexmock(User)
User.should_receive('get_name').and_return('mike')
user = User()
assert 'mike' == user.get_name()
def test_flexmock_should_match_expectations_against_builtin_classes(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args(str).and_return('got a string')
mock.should_receive('method_foo').with_args(int).and_return('got an int')
assert 'got a string' == mock.method_foo('string!')
assert 'got an int' == mock.method_foo(23)
assertRaises(InvalidMethodSignature, mock.method_foo, 2.0)
def test_flexmock_should_match_expectations_against_user_defined_classes(self):
mock = flexmock(name='temp')
class Foo:
pass
mock.should_receive('method_foo').with_args(Foo).and_return('got a Foo')
assert 'got a Foo' == mock.method_foo(Foo())
assertRaises(InvalidMethodSignature, mock.method_foo, 1)
def test_flexmock_configures_global_mocks_dict(self):
mock = flexmock(name='temp')
for expectations in FlexmockContainer.flexmock_objects.values():
assert 0 == len(expectations)
mock.should_receive('method_foo')
for expectations in FlexmockContainer.flexmock_objects.values():
assert 1 == len(expectations)
def test_flexmock_teardown_verifies_mocks(self):
mock = flexmock(name='temp')
mock.should_receive('verify_expectations').times(1)
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_teardown_does_not_verify_stubs(self):
mock = flexmock(name='temp')
mock.should_receive('verify_expectations')
_tear_down(self)
def test_flexmock_preserves_stubbed_object_methods_between_tests(self):
class User:
def get_name(self):
return 'mike'
user = User()
flexmock(user).should_receive('get_name').and_return('john')
assert 'john' == user.get_name()
_tear_down(self)
assert 'mike' == user.get_name()
def test_flexmock_preserves_stubbed_class_methods_between_tests(self):
class User:
def get_name(self):
return 'mike'
user = User()
flexmock(User).should_receive('get_name').and_return('john')
assert 'john' == user.get_name()
_tear_down(self)
assert 'mike' == user.get_name()
def test_flexmock_removes_new_stubs_from_objects_after_tests(self):
class User:
def get_name(self): pass
user = User()
saved = user.get_name
flexmock(user).should_receive('get_name').and_return('john')
assert saved != user.get_name
assert 'john' == user.get_name()
_tear_down(self)
assert saved == user.get_name
def test_flexmock_removes_new_stubs_from_classes_after_tests(self):
class User:
def get_name(self): pass
user = User()
saved = user.get_name
flexmock(User).should_receive('get_name').and_return('john')
assert saved != user.get_name
assert 'john' == user.get_name()
_tear_down(self)
assert saved == user.get_name
def test_flexmock_removes_stubs_from_multiple_objects_on_teardown(self):
class User:
def get_name(self): pass
class Group:
def get_name(self): pass
user = User()
group = User()
saved1 = user.get_name
saved2 = group.get_name
flexmock(user).should_receive('get_name').and_return('john').once
flexmock(group).should_receive('get_name').and_return('john').once
assert saved1 != user.get_name
assert saved2 != group.get_name
assert 'john' == user.get_name()
assert 'john' == group.get_name()
_tear_down(self)
assert saved1 == user.get_name
assert saved2 == group.get_name
def test_flexmock_removes_stubs_from_multiple_classes_on_teardown(self):
class User:
def get_name(self): pass
class Group:
def get_name(self): pass
user = User()
group = User()
saved1 = user.get_name
saved2 = group.get_name
flexmock(User).should_receive('get_name').and_return('john')
flexmock(Group).should_receive('get_name').and_return('john')
assert saved1 != user.get_name
assert saved2 != group.get_name
assert 'john' == user.get_name()
assert 'john' == group.get_name()
_tear_down(self)
assert saved1 == user.get_name
assert saved2 == group.get_name
def test_flexmock_respects_at_least_when_called_less_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar').at_least.twice
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_LEAST == expectation.modifier
mock.method_foo()
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_respects_at_least_when_called_requested_number(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_least.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_LEAST == expectation.modifier
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_least_when_called_more_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_least.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_LEAST == expectation.modifier
mock.method_foo()
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_most_when_called_less_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('bar').at_most.twice
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_MOST == expectation.modifier
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_most_when_called_requested_number(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_most.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_MOST == expectation.modifier
mock.method_foo()
_tear_down(self)
def test_flexmock_respects_at_most_when_called_more_than_requested(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').at_most.once
expectation = mock._get_flexmock_expectation('method_foo')
assert Expectation.AT_MOST == expectation.modifier
mock.method_foo()
mock.method_foo()
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_treats_once_as_times_one(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').once
expectation = mock._get_flexmock_expectation('method_foo')
assert 1 == expectation.expected_calls
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_treats_twice_as_times_two(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').twice.and_return('value_bar')
expectation = mock._get_flexmock_expectation('method_foo')
assert 2 == expectation.expected_calls
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_works_with_never_when_true(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').never
expectation = mock._get_flexmock_expectation('method_foo')
assert 0 == expectation.expected_calls
_tear_down(self)
def test_flexmock_works_with_never_when_false(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').and_return('value_bar').never
mock.method_foo()
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_get_flexmock_expectation_should_work_with_args(self):
mock = flexmock(name='temp')
mock.should_receive('method_foo').with_args('value_bar')
assert mock._get_flexmock_expectation('method_foo', 'value_bar')
def test_flexmock_function_should_return_previously_mocked_object(self):
class User(object): pass
user = User()
foo = flexmock(user)
assert foo._mock == flexmock(user)
def test_flexmock_should_not_return_class_object_if_mocking_instance(self):
class User:
def method(self): pass
user = User()
user2 = User()
class_mock = flexmock(User).should_receive(
'method').and_return('class').mock
user_mock = flexmock(user).should_receive(
'method').and_return('instance').mock
assert class_mock is not user_mock
assert 'instance' == user.method()
assert 'class' == user2.method()
def test_flexmock_should_blow_up_on_and_execute_for_class_mock(self):
class User:
def foo(self):
return 'class'
try:
flexmock(User).should_receive('foo').and_execute
raise Exception('and_execute should have raised an exception')
except AndExecuteNotSupportedForClassMocks:
pass
def test_flexmock_should_mock_new_instances(self):
class User(object): pass
class Group(object): pass
user = User()
flexmock(Group, new_instances=user)
assert user is Group()
def test_flexmock_should_mock_new_instances_with_multiple_params(self):
class User(object): pass
class Group(object):
def __init__(self, arg, arg2):
pass
user = User()
flexmock(Group, new_instances=user)
assert user is Group(1, 2)
def test_flexmock_should_revert_new_instances_on_teardown(self):
class User(object): pass
class Group(object): pass
user = User()
group = Group()
flexmock(Group, new_instances=user)
assert user is Group()
_tear_down(self)
assert group.__class__ == Group().__class__
def test_flexmock_should_cleanup_added_methods_and_attributes(self):
class Group(object): pass
flexmock(Group)
_tear_down(self)
for method in FlexMock.UPDATED_ATTRS:
assert method not in dir(Group)
def test_flexmock_should_cleanup_after_exception(self):
class User:
def method2(self): pass
class Group:
def method1(self): pass
flexmock(Group)
flexmock(User)
Group.should_receive('method1').once
User.should_receive('method2').once
assertRaises(MethodNotCalled, _tear_down, self)
for method in FlexMock.UPDATED_ATTRS:
assert method not in dir(Group)
for method in FlexMock.UPDATED_ATTRS:
assert method not in dir(User)
def test_flexmock_and_execute_respects_matched_expectations(self):
class Group(object):
def method1(self, arg1, arg2='b'):
return '%s:%s' % (arg1, arg2)
def method2(self, arg):
return arg
group = Group()
flexmock(group).should_receive('method1').twice.and_execute
assert 'a:c' == group.method1('a', arg2='c')
assert 'a:b' == group.method1('a')
group.should_receive('method2').once.with_args('c').and_execute
assert 'c' == group.method2('c')
_tear_down(self)
def test_flexmock_and_execute_respects_unmatched_expectations(self):
class Group(object):
def method1(self, arg1, arg2='b'):
return '%s:%s' % (arg1, arg2)
def method2(self): pass
group = Group()
flexmock(group).should_receive('method1').at_least.once.and_execute
assertRaises(MethodNotCalled, _tear_down, self)
flexmock(group)
group.should_receive('method2').with_args('a').once.and_execute
group.should_receive('method2').with_args('not a')
group.method2('not a')
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_doesnt_error_on_properly_ordered_expectations(self):
class Foo(object):
def foo(self): pass
def method1(self): pass
def bar(self): pass
def baz(self): pass
flexmock(Foo).should_receive('foo')
flexmock(Foo).should_receive('method1').with_args('a').ordered
flexmock(Foo).should_receive('bar')
flexmock(Foo).should_receive('method1').with_args('b').ordered
flexmock(Foo).should_receive('baz')
Foo.bar()
Foo.method1('a')
Foo.method1('b')
Foo.baz()
Foo.foo()
def test_flexmock_errors_on_improperly_ordered_expectations(self):
class Foo(object):
def foo(self): pass
def method1(self): pass
def bar(self): pass
def baz(self): pass
flexmock(Foo)
Foo.should_receive('foo')
Foo.should_receive('method1').with_args('a').ordered
Foo.should_receive('bar')
Foo.should_receive('method1').with_args('b').ordered
Foo.should_receive('baz')
Foo.bar()
Foo.bar()
Foo.foo()
assertRaises(MethodCalledOutOfOrder, Foo.method1, 'b')
def test_flexmock_should_accept_multiple_return_values(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1, 5).and_return(2)
assert (1, 5) == foo.method1()
assert 2 == foo.method1()
assert (1, 5) == foo.method1()
assert 2 == foo.method1()
def test_flexmock_should_accept_multiple_return_values_with_shortcut(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1, 2).one_by_one
assert 1 == foo.method1()
assert 2 == foo.method1()
assert 1 == foo.method1()
assert 2 == foo.method1()
def test_flexmock_should_mix_multiple_return_values_with_exceptions(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').and_return(1).and_raise(Exception)
assert 1 == foo.method1()
assertRaises(Exception, foo.method1)
assert 1 == foo.method1()
assertRaises(Exception, foo.method1)
def test_flexmock_should_match_types_on_multiple_arguments(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(str, int).and_return('ok')
assert 'ok', foo.method1('some string' == 12)
assertRaises(InvalidMethodSignature, foo.method1, 12, 32)
assertRaises(InvalidMethodSignature, foo.method1, 12, 'some string')
assertRaises(InvalidMethodSignature, foo.method1, 'string', 12, 14)
def test_flexmock_should_match_types_on_multiple_arguments_generic(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(
object, object, object).and_return('ok')
assert 'ok', foo.method1('some string', None == 12)
assert 'ok', foo.method1((1,), None == 12)
assert 'ok', foo.method1(12, 14 == [])
assert 'ok', foo.method1('some string', 'another one' == False)
assertRaises(InvalidMethodSignature, foo.method1, 'string', 12)
assertRaises(InvalidMethodSignature, foo.method1, 'string', 12, 13, 14)
def test_flexmock_should_match_types_on_multiple_arguments_classes(self):
class Foo:
def method1(self): pass
class Bar: pass
foo = Foo()
bar = Bar()
flexmock(foo).should_receive('method1').with_args(
object, Bar).and_return('ok')
assert 'ok', foo.method1('some string' == bar)
assertRaises(InvalidMethodSignature, foo.method1, bar, 'some string')
assertRaises(InvalidMethodSignature, foo.method1, 12, 'some string')
def test_flexmock_should_match_keyword_arguments(self):
class Foo:
def method1(self): pass
foo = Foo()
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2).twice
foo.method1(1, arg2=2, arg3=3)
foo.method1(1, arg3=3, arg2=2)
_tear_down(self)
flexmock(foo).should_receive('method1').with_args(1, arg3=3, arg2=2)
assertRaises(InvalidMethodSignature, foo.method1, arg2=2, arg3=3)
assertRaises(InvalidMethodSignature, foo.method1, 1, arg2=2, arg3=4)
assertRaises(InvalidMethodSignature, foo.method1, 1)
def test_flexmock_should_match_keyword_arguments_works_with_and_execute(self):
class Foo:
def method1(self, arg1, arg2=None, arg3=None):
return '%s%s%s' % (arg1, arg2, arg3)
foo = Foo()
flexmock(foo).should_receive('method1').with_args(
1, arg3=3, arg2=2).and_execute.once
assert '123' == foo.method1(1, arg2=2, arg3=3)
def test_flexmock_should_mock_private_methods(self):
class Foo:
def __private_method(self):
return 'foo'
def public_method(self):
return self.__private_method()
foo = Foo()
flexmock(foo).should_receive('__private_method').and_return('bar')
assert 'bar' == foo.public_method()
def test_flexmock_should_mock_private_class_methods(self):
class Foo:
pass
flexmock(Foo).should_receive('__iter__').and_yield(1, 2, 3)
assert [1, 2, 3] == [x for x in Foo()]
def test_flexmock_should_mock_generators(self):
class Gen:
def foo(self): pass
gen = Gen()
flexmock(gen).should_receive('foo').and_yield(*range(1, 10))
output = [val for val in gen.foo()]
assert [val for val in range(1, 10)] == output
def test_flexmock_should_verify_correct_spy_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_return('real', 'stuff')
assert ('real', 'stuff') == user.get_stuff()
def test_flexmock_should_verify_spy_raises_correct_exception_class(self):
class FakeException(Exception):
def __init__(self, param, param2):
self.message = '%s, %s' % (param, param2)
Exception.__init__(self)
class User:
def get_stuff(self): raise FakeException(1, 2)
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_raise(FakeException, 1, 2)
user.get_stuff()
def test_flexmock_should_verify_spy_matches_exception_message(self):
class FakeException(Exception):
def __init__(self, param, param2):
self.p1 = param
self.p2 = param2
Exception.__init__(self, param)
def __str__(self):
return '%s, %s' % (self.p1, self.p2)
class User:
def get_stuff(self): raise FakeException(1, 2)
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_raise(FakeException, 2, 1)
assertRaises(InvalidExceptionMessage, user.get_stuff)
def test_flexmock_should_blow_up_on_wrong_exception_type(self):
class User:
def get_stuff(self): raise AlreadyMocked('foo')
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_raise(MethodNotCalled)
assertRaises(InvalidExceptionClass, user.get_stuff)
def test_flexmock_should_blow_up_on_wrong_spy_return_values(self):
class User:
def get_stuff(self): return 'real', 'stuff'
def get_more_stuff(self): return 'other', 'stuff'
user = User()
flexmock(user).should_receive(
'get_stuff').and_execute.and_return('other', 'stuff')
assertRaises(InvalidMethodSignature, user.get_stuff)
flexmock(user).should_receive(
'get_more_stuff').and_execute.and_return()
assertRaises(InvalidMethodSignature, user.get_more_stuff)
def test_flexmock_should_mock_same_class_twice(self):
class Foo: pass
flexmock(Foo)
flexmock(Foo)
def test_flexmock_and_execute_should_not_clobber_original_method(self):
class User:
def get_stuff(self): return 'real', 'stuff'
user = User()
flexmock(user).should_receive('get_stuff').and_execute
flexmock(user).should_receive('get_stuff').and_execute
assert ('real', 'stuff') == user.get_stuff()
def test_flexmock_should_properly_restore_static_methods(self):
class User:
@staticmethod
def get_stuff(): return 'ok!'
assert 'ok!' == User.get_stuff()
flexmock(User).should_receive('get_stuff')
assert User.get_stuff() is None
_tear_down(self)
assert 'ok!' == User.get_stuff()
def test_flexmock_should_properly_restore_undecorated_static_methods(self):
class User:
def get_stuff(): return 'ok!'
get_stuff = staticmethod(get_stuff)
assert 'ok!' == User.get_stuff()
flexmock(User).should_receive('get_stuff')
assert User.get_stuff() is None
_tear_down(self)
assert 'ok!' == User.get_stuff()
def test_flexmock_should_properly_restore_module_level_functions(self):
if 'flexmock_test' in sys.modules:
mod = sys.modules['flexmock_test']
else:
mod = sys.modules['__main__']
flexmock(mod).should_receive('module_level_function')
assert None == module_level_function(1, 2)
_tear_down(self)
assert '1, 2' == module_level_function(1, 2)
def test_flexmock_should_properly_restore_class_methods(self):
class User:
@classmethod
def get_stuff(cls):
return cls.__name__
assert 'User' == User.get_stuff()
flexmock(User).should_receive('get_stuff').and_return('foo')
assert 'foo' == User.get_stuff()
_tear_down(self)
assert 'User' == User.get_stuff()
def test_and_execute_should_match_return_value_class(self):
class User: pass
user = User()
foo = flexmock(foo=lambda: ('bar', 'baz'),
bar=lambda: user,
baz=lambda: None,
bax=lambda: None)
foo.should_receive('foo').and_execute.and_return(str, str)
foo.should_receive('bar').and_execute.and_return(User)
foo.should_receive('baz').and_execute.and_return(object)
foo.should_receive('bax').and_execute.and_return(None)
assert ('bar', 'baz') == foo.foo()
assert user == foo.bar()
assert None == foo.baz()
assert None == foo.bax()
def test_new_instances_should_blow_up_on_should_receive(self):
class User(object): pass
mock = flexmock(User, new_instances=None)
assertRaises(FlexmockException, mock.should_receive, 'foo')
def test_should_call_alias_should_receive_and_execute(self):
class Foo:
def get_stuff(self):
return 'yay'
foo = Foo()
flexmock(foo).should_call('get_stuff').and_return('yay').once
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_should_fail_mocking_nonexistent_methods(self):
class User: pass
user = User()
assertRaises(MethodDoesNotExist,
flexmock(user).should_receive, 'nonexistent')
def test_flexmock_should_not_explode_on_unicode_formatting(self):
if sys.version_info >= (3, 0):
formatted = _format_args(
'method', {'kargs' : (chr(0x86C7),), 'kwargs' : {}})
assert formatted == 'method("蛇")'
else:
formatted = _format_args(
'method', {'kargs' : (unichr(0x86C7),), 'kwargs' : {}})
assert formatted == 'method("%s")' % unichr(0x86C7)
def test_return_value_should_not_explode_on_unicode_values(self):
class Foo:
def method(self): pass
if sys.version_info >= (3, 0):
return_value = ReturnValue(chr(0x86C7))
assert '%s' % return_value == '蛇'
else:
return_value = ReturnValue(unichr(0x86C7))
assert unicode(return_value) == unichr(0x86C7)
def test_pass_thru_should_call_original_method_only_once(self):
class Nyan(object):
def __init__(self):
self.n = 0
def method(self):
self.n += 1
obj = Nyan()
flexmock(obj)
obj.should_call('method')
obj.method()
self.assertEqual(obj.n, 1)
def test_should_call_works_for_same_method_with_different_args(self):
class Foo:
def method(self, arg):
pass
foo = Foo()
flexmock(foo).should_call('method').with_args('foo').once
flexmock(foo).should_call('method').with_args('bar').once
foo.method('foo')
foo.method('bar')
_tear_down(self)
def test_should_call_fails_properly_for_same_method_with_different_args(self):
class Foo:
def method(self, arg):
pass
foo = Foo()
flexmock(foo).should_call('method').with_args('foo').once
flexmock(foo).should_call('method').with_args('bar').once
foo.method('foo')
assertRaises(MethodNotCalled, _tear_down, self)
def test_flexmock_should_give_reasonable_error_for_builtins(self):
assertRaises(AttemptingToMockBuiltin, flexmock, object)
if __name__ == '__main__':
unittest.main()
| true | true |
1c45ccf6c4e027fc171552bcb089538da702ede9 | 3,773 | py | Python | dataviz/timeline_gibraltar.py | Udzu/pudzu | 5a0302830b052fc54feba891eb7bf634957a9d90 | [
"MIT"
] | 119 | 2017-07-22T15:02:30.000Z | 2021-08-02T10:42:59.000Z | dataviz/timeline_gibraltar.py | Udzu/pudzu | 5a0302830b052fc54feba891eb7bf634957a9d90 | [
"MIT"
] | null | null | null | dataviz/timeline_gibraltar.py | Udzu/pudzu | 5a0302830b052fc54feba891eb7bf634957a9d90 | [
"MIT"
] | 28 | 2017-08-04T14:28:41.000Z | 2019-11-27T23:46:14.000Z | from pudzu.charts import *
from pudzu.dates import *
from collections import defaultdict
df = pd.read_csv("datasets/timeline_gibraltar.csv")
df_events = pd.read_csv("datasets/timeline_gibraltar_events.csv")
START, END, INTERVAL = 1000, 2000, 250
PHOTOS = ["http://www.kindredgroup.com/wp-content/uploads/2016/11/gibralta.jpg", "http://www.visitgibraltar.gi/images/site_images/HcZOv_gugu.jpg"]
COLORS = { 'm': VegaPalette10.GREEN,
's': VegaPalette10.ORANGE,
'u': VegaPalette10.RED }
ICONS = { 's': MaskUnion(..., "white", masks=Image.open("icons/trebuchet.png").to_rgba().resize_fixed_aspect(width=15)) }
# timeline
def colorfn(d, w, h):
return COLORS[d['type']]
def labelfn(d, w):
s = d['period'].replace("\\n", "\n")
if "Medina" in d['period'] and w < 20: return None
if "Medina" in d['period'] and w < 50: s = "MS"
if "Granada" in d['period'] and w < 50: s = "Gra'da"
return Image.from_text(s, arial(10), "white", align="center", padding=2, beard_line=True)
def eventfn(d):
return ICONS[d['type']].pad(2,0)
ilabels = {(TimeChartLabelPosition.BELOW, TimeChartLabelPosition.INSIDE): labelfn,
TimeChartLabelPosition.ABOVE: lambda: " ", TimeChartLabelPosition.BELOW: lambda: " "}
llabels = ["AD {}".format(start) for start in range(START, END, INTERVAL)]
data = [df.filter_rows("start<{} and end>{}".format(start+INTERVAL, start)).update_columns(start=lambda v: v-start, end=lambda v: v-start) for start in range(START, END, INTERVAL)]
event_data = [df_events.filter_rows("time<{} and time>={}".format(time+INTERVAL, time)).update_columns(time=lambda v: v-time) for time in range(START, END, INTERVAL)]
chart = time_chart(800, 40, data, "start", "end", colorfn, label_font=arial(10), interval_label_key=ilabels,
event_data=event_data, event_label_key={TimeChartLabelPosition.ABOVE: eventfn},
xmin=0, xmax=INTERVAL, grid_font=arial(10), grid_labels=lambda v: "+{}".format(v), grid_interval=50, labels_left=llabels).pad(2, bg="black")
# legend
footer_text = ["control: ",
Rectangle(20, COLORS['m']), " Muslim ",
Rectangle(20, COLORS['s']), " Spanish ",
Rectangle(20, COLORS['u']), " British ",
" ",
ICONS['s'].pad((0,0,0,3),0), " under siege"
]
footer = Image.from_multitext(footer_text, [arial(12, bold=True)] + [arial(12)]*(len(footer_text)-1), "white", img_offset=-5)
chart = Image.from_column([chart, footer.pad((0,10), 0)], bg="black")
# photos
images = Image.from_column([Image.from_url_with_cache(u).crop_to_aspect(320,200).resize_fixed_aspect(height=180) for u in PHOTOS])
# bar chart
totals = defaultdict(lambda: 0)
for _,d in df.iterrows(): totals[d['type']] += d['end'] - d['start']
tot_data = pd.DataFrame([ totals[c] for c in 'msu' ], index=["Muslim", "Spanish", "British"])
tot_palette = [COLORS[c] for c in 'msu']
tot_bar = bar_chart(tot_data, 40, 300, fg="white", bg="black", label_font=arial(12), clabels=BarChartLabelPosition.INSIDE, grid_interval=100,
colors=lambda c,r: tot_palette[r], spacing=5)
tot_title = Image.from_text("# years of control", arial(16, bold=True), "white")
tot_img = Image.from_column([tot_title, tot_bar], padding=5, bg="black")
chart = Image.from_row([chart, images, tot_img], bg="black", padding=5)
title = Image.from_text("The rock + under siege: a political timeline of Gibraltar".upper(), arial(36, bold=True), "white")
img = Image.from_column([title.pad((0,10), 0), chart], bg="black")
img.place(Image.from_text("/u/Udzu", font("arial", 12), fg="white", bg="black", padding=3).pad((1,1,0,0), "white"), align=1, padding=(5,5), copy=False)
img.save("output/timeline_gibraltar.png")
| 49.644737 | 181 | 0.662073 | from pudzu.charts import *
from pudzu.dates import *
from collections import defaultdict
df = pd.read_csv("datasets/timeline_gibraltar.csv")
df_events = pd.read_csv("datasets/timeline_gibraltar_events.csv")
START, END, INTERVAL = 1000, 2000, 250
PHOTOS = ["http://www.kindredgroup.com/wp-content/uploads/2016/11/gibralta.jpg", "http://www.visitgibraltar.gi/images/site_images/HcZOv_gugu.jpg"]
COLORS = { 'm': VegaPalette10.GREEN,
's': VegaPalette10.ORANGE,
'u': VegaPalette10.RED }
ICONS = { 's': MaskUnion(..., "white", masks=Image.open("icons/trebuchet.png").to_rgba().resize_fixed_aspect(width=15)) }
def colorfn(d, w, h):
return COLORS[d['type']]
def labelfn(d, w):
s = d['period'].replace("\\n", "\n")
if "Medina" in d['period'] and w < 20: return None
if "Medina" in d['period'] and w < 50: s = "MS"
if "Granada" in d['period'] and w < 50: s = "Gra'da"
return Image.from_text(s, arial(10), "white", align="center", padding=2, beard_line=True)
def eventfn(d):
return ICONS[d['type']].pad(2,0)
ilabels = {(TimeChartLabelPosition.BELOW, TimeChartLabelPosition.INSIDE): labelfn,
TimeChartLabelPosition.ABOVE: lambda: " ", TimeChartLabelPosition.BELOW: lambda: " "}
llabels = ["AD {}".format(start) for start in range(START, END, INTERVAL)]
data = [df.filter_rows("start<{} and end>{}".format(start+INTERVAL, start)).update_columns(start=lambda v: v-start, end=lambda v: v-start) for start in range(START, END, INTERVAL)]
event_data = [df_events.filter_rows("time<{} and time>={}".format(time+INTERVAL, time)).update_columns(time=lambda v: v-time) for time in range(START, END, INTERVAL)]
chart = time_chart(800, 40, data, "start", "end", colorfn, label_font=arial(10), interval_label_key=ilabels,
event_data=event_data, event_label_key={TimeChartLabelPosition.ABOVE: eventfn},
xmin=0, xmax=INTERVAL, grid_font=arial(10), grid_labels=lambda v: "+{}".format(v), grid_interval=50, labels_left=llabels).pad(2, bg="black")
# legend
footer_text = ["control: ",
Rectangle(20, COLORS['m']), " Muslim ",
Rectangle(20, COLORS['s']), " Spanish ",
Rectangle(20, COLORS['u']), " British ",
" ",
ICONS['s'].pad((0,0,0,3),0), " under siege"
]
footer = Image.from_multitext(footer_text, [arial(12, bold=True)] + [arial(12)]*(len(footer_text)-1), "white", img_offset=-5)
chart = Image.from_column([chart, footer.pad((0,10), 0)], bg="black")
# photos
images = Image.from_column([Image.from_url_with_cache(u).crop_to_aspect(320,200).resize_fixed_aspect(height=180) for u in PHOTOS])
# bar chart
totals = defaultdict(lambda: 0)
for _,d in df.iterrows(): totals[d['type']] += d['end'] - d['start']
tot_data = pd.DataFrame([ totals[c] for c in 'msu' ], index=["Muslim", "Spanish", "British"])
tot_palette = [COLORS[c] for c in 'msu']
tot_bar = bar_chart(tot_data, 40, 300, fg="white", bg="black", label_font=arial(12), clabels=BarChartLabelPosition.INSIDE, grid_interval=100,
colors=lambda c,r: tot_palette[r], spacing=5)
tot_title = Image.from_text("# years of control", arial(16, bold=True), "white")
tot_img = Image.from_column([tot_title, tot_bar], padding=5, bg="black")
chart = Image.from_row([chart, images, tot_img], bg="black", padding=5)
title = Image.from_text("The rock + under siege: a political timeline of Gibraltar".upper(), arial(36, bold=True), "white")
img = Image.from_column([title.pad((0,10), 0), chart], bg="black")
img.place(Image.from_text("/u/Udzu", font("arial", 12), fg="white", bg="black", padding=3).pad((1,1,0,0), "white"), align=1, padding=(5,5), copy=False)
img.save("output/timeline_gibraltar.png")
| true | true |
1c45cda6072debbc7728300318b2edc0c33241ba | 1,070 | py | Python | software/tbd/set_ids.py | mayhem/led-chandelier | 899caa8d81e6aac6e954f78b4f5b4ab101bf5257 | [
"MIT"
] | 2 | 2018-09-20T08:36:11.000Z | 2019-08-25T20:06:11.000Z | software/tbd/set_ids.py | mayhem/led-chandelier | 899caa8d81e6aac6e954f78b4f5b4ab101bf5257 | [
"MIT"
] | null | null | null | software/tbd/set_ids.py | mayhem/led-chandelier | 899caa8d81e6aac6e954f78b4f5b4ab101bf5257 | [
"MIT"
] | 1 | 2020-12-12T18:21:18.000Z | 2020-12-12T18:21:18.000Z | #!/usr/bin/python
import os
import sys
import math
from chandelier import Chandelier, BROADCAST
import generator as g
import filter as f
import random
import common
import function as s
from time import sleep, time
from color import Color
device = "/dev/ttyAMA0"
confirm = raw_input("This will clear all ids of all the bottles currently plugged in. Are you sure you want this? Type YES>")
if confirm != "YES":
print "abort!"
sys.exit(-1)
start_id = 1
if len(sys.argv) == 2:
start_id = int(sys.argv[1])
else:
print "Usage: %s <start_id>" % (sys.argv[0])
sys.exit(-1)
print "Starting with id %d" % start_id
red = s.ConstantColor(Color(255, 0, 0))
red.chain(f.Brightness(g.Sin(.25)))
ch = Chandelier()
ch.open(device)
ch.off(BROADCAST)
id = start_id
while True:
inp = raw_input("Hit enter to program node %d" % id)
if inp.startswith("q"):
break
ch.clear_ids()
ch.set_id(id)
ch.set_classes([[id]])
ch.send_pattern_to_class(0, red)
ch.next_pattern(id, 0)
sleep(1)
ch.off(id)
id += 1
print "done"
| 19.814815 | 125 | 0.669159 |
import os
import sys
import math
from chandelier import Chandelier, BROADCAST
import generator as g
import filter as f
import random
import common
import function as s
from time import sleep, time
from color import Color
device = "/dev/ttyAMA0"
confirm = raw_input("This will clear all ids of all the bottles currently plugged in. Are you sure you want this? Type YES>")
if confirm != "YES":
print "abort!"
sys.exit(-1)
start_id = 1
if len(sys.argv) == 2:
start_id = int(sys.argv[1])
else:
print "Usage: %s <start_id>" % (sys.argv[0])
sys.exit(-1)
print "Starting with id %d" % start_id
red = s.ConstantColor(Color(255, 0, 0))
red.chain(f.Brightness(g.Sin(.25)))
ch = Chandelier()
ch.open(device)
ch.off(BROADCAST)
id = start_id
while True:
inp = raw_input("Hit enter to program node %d" % id)
if inp.startswith("q"):
break
ch.clear_ids()
ch.set_id(id)
ch.set_classes([[id]])
ch.send_pattern_to_class(0, red)
ch.next_pattern(id, 0)
sleep(1)
ch.off(id)
id += 1
print "done"
| false | true |
1c45cf4bdee098de3ed2c46a413ab004e8e94cbf | 1,436 | py | Python | tests/test_tools_jobinfo.py | NERSC/pytokio | 22244718cf82567c50620cbe0e635dfc990de36b | [
"BSD-3-Clause-LBNL"
] | 22 | 2017-11-14T01:30:48.000Z | 2022-01-01T21:51:00.000Z | tests/test_tools_jobinfo.py | glennklockwood/pytokio | 22244718cf82567c50620cbe0e635dfc990de36b | [
"BSD-3-Clause-LBNL"
] | 39 | 2017-12-20T01:42:19.000Z | 2020-05-28T21:17:26.000Z | tests/test_tools_jobinfo.py | glennklockwood/pytokio | 22244718cf82567c50620cbe0e635dfc990de36b | [
"BSD-3-Clause-LBNL"
] | 5 | 2018-02-06T19:39:19.000Z | 2019-07-10T01:20:26.000Z | """Test jobinfo and all supported backends
"""
import tokio.tools.jobinfo
import tokiotest
def test_get_job_startend_slurm():
"""tools.jobinfo.get_job_startend, Slurm
"""
tokio.config.CONFIG["jobinfo_jobid_providers"] = ["slurm"]
start, end = tokio.tools.jobinfo.get_job_startend(
jobid=tokiotest.SAMPLE_DARSHAN_JOBID,
cache_file=tokiotest.SAMPLE_SLURM_CACHE_FILE)
print(start, end)
print(type(start), type(end))
assert start
assert end
assert start <= end
def test_get_job_startend_nerscjobsdb():
"""tools.jobinfo.get_job_startend, NerscJobsDb
"""
tokio.config.CONFIG["jobinfo_jobid_providers"] = ["nersc_jobsdb"]
start, end = tokio.tools.jobinfo.get_job_startend(
jobid=tokiotest.SAMPLE_DARSHAN_JOBID,
cache_file=tokiotest.SAMPLE_NERSCJOBSDB_FILE)
print(start, end)
print(type(start), type(end))
assert start
assert end
assert start <= end
def test_get_job_nodes_slurm():
"""tools.jobinfo.get_job_nodes, Slurm
"""
tokio.config.CONFIG["jobinfo_jobnodes_providers"] = ["slurm"]
jobnodes = tokio.tools.jobinfo.get_job_nodes(
jobid=tokiotest.SAMPLE_DARSHAN_JOBID,
cache_file=tokiotest.SAMPLE_SLURM_CACHE_FILE)
print(type(jobnodes), jobnodes)
assert jobnodes
if __name__ == "__main__":
test_get_job_startend_slurm()
test_get_job_startend_nerscjobsdb()
test_get_job_nodes_slurm()
| 31.217391 | 69 | 0.722145 | import tokio.tools.jobinfo
import tokiotest
def test_get_job_startend_slurm():
tokio.config.CONFIG["jobinfo_jobid_providers"] = ["slurm"]
start, end = tokio.tools.jobinfo.get_job_startend(
jobid=tokiotest.SAMPLE_DARSHAN_JOBID,
cache_file=tokiotest.SAMPLE_SLURM_CACHE_FILE)
print(start, end)
print(type(start), type(end))
assert start
assert end
assert start <= end
def test_get_job_startend_nerscjobsdb():
tokio.config.CONFIG["jobinfo_jobid_providers"] = ["nersc_jobsdb"]
start, end = tokio.tools.jobinfo.get_job_startend(
jobid=tokiotest.SAMPLE_DARSHAN_JOBID,
cache_file=tokiotest.SAMPLE_NERSCJOBSDB_FILE)
print(start, end)
print(type(start), type(end))
assert start
assert end
assert start <= end
def test_get_job_nodes_slurm():
tokio.config.CONFIG["jobinfo_jobnodes_providers"] = ["slurm"]
jobnodes = tokio.tools.jobinfo.get_job_nodes(
jobid=tokiotest.SAMPLE_DARSHAN_JOBID,
cache_file=tokiotest.SAMPLE_SLURM_CACHE_FILE)
print(type(jobnodes), jobnodes)
assert jobnodes
if __name__ == "__main__":
test_get_job_startend_slurm()
test_get_job_startend_nerscjobsdb()
test_get_job_nodes_slurm()
| true | true |
1c45cf7be7c3d2e904239c5a45cec80098ce6554 | 78 | py | Python | wmf/dump/__init__.py | maribelacosta/wikiwho | 5c53f129b018541aad0cc63be5e03a862e6183a1 | [
"MIT"
] | 17 | 2015-01-04T15:17:15.000Z | 2019-09-17T15:38:43.000Z | wmf/dump/__init__.py | maribelacosta/wikiwho | 5c53f129b018541aad0cc63be5e03a862e6183a1 | [
"MIT"
] | 5 | 2015-06-03T09:07:40.000Z | 2017-03-31T16:36:13.000Z | wmf/dump/__init__.py | maribelacosta/wikiwho | 5c53f129b018541aad0cc63be5e03a862e6183a1 | [
"MIT"
] | 10 | 2015-02-11T11:50:11.000Z | 2021-07-28T02:17:16.000Z | from .iterator import Iterator
from .map import map
from .map import dumpFile
| 19.5 | 30 | 0.807692 | from .iterator import Iterator
from .map import map
from .map import dumpFile
| true | true |
1c45cfcca17602a353dfd446b147a8f1cf0251e7 | 4,479 | py | Python | tests/samples.py | chikko80/bit | af557cde90c9021ee16024ab89a000961c6062b4 | [
"MIT"
] | 1,173 | 2016-11-30T19:45:44.000Z | 2022-03-31T15:43:58.000Z | tests/samples.py | chikko80/bit | af557cde90c9021ee16024ab89a000961c6062b4 | [
"MIT"
] | 155 | 2017-03-17T13:06:42.000Z | 2022-02-28T16:59:14.000Z | tests/samples.py | chikko80/bit | af557cde90c9021ee16024ab89a000961c6062b4 | [
"MIT"
] | 197 | 2017-02-16T04:30:29.000Z | 2022-03-24T09:38:29.000Z | import os
BINARY_ADDRESS = b'\x00\x92F\x1b\xdeb\x83\xb4a\xec\xe7\xdd\xf4\xdb\xf1\xe0\xa4\x8b\xd1\x13\xd8&E\xb4\xbf'
BITCOIN_ADDRESS = '1ELReFsTCUY2mfaDTy32qxYiT49z786eFg'
BITCOIN_ADDRESS_COMPRESSED = '1ExJJsNLQDNVVM1s1sdyt1o5P3GC5r32UG'
BITCOIN_ADDRESS_NP2WKH = '3291hXxutb58vbDVVumaJpopanmfxjVpgJ'
BITCOIN_ADDRESS_PAY2SH = '39SrGQEfFXcTYJhBvjZeQja66Cpz82EEUn'
BITCOIN_ADDRESS_P2SH_MULTISIG = '3LtE4jSa7MDD1GEJGEBYjntYQB9KcqzRCG'
BITCOIN_ADDRESS_NP2SH_MULTISIG = '32syayPneML4mTuMfimt6ZSLPz87Fya1r6'
BITCOIN_ADDRESS_TEST = 'mtrNwJxS1VyHYn3qBY1Qfsm3K3kh1mGRMS'
BITCOIN_ADDRESS_TEST_COMPRESSED = 'muUFbvTKDEokGTVUjScMhw1QF2rtv5hxCz'
BITCOIN_ADDRESS_TEST_NP2WKH = '2MshDmGtwW3aV8Nr3B3PSvmo5o8yqhwJFKZ'
BITCOIN_ADDRESS_TEST_PAY2SH = '2NFKbBHzzh32q5DcZJNgZE9sF7gYmtPbawk'
BITCOIN_ADDRESS_TEST_P2SH_MULTISIG = '2NFEYyojDi7eFSZPwBRxSQPn1NPnS9p5upP'
BITCOIN_ADDRESS_TEST_NP2SH_MULTISIG = '2NCWeVbWmaUp92dSFP3RddPk6r3GTd6cDd6'
BITCOIN_SEGWIT_ADDRESS = 'bc1qar0srrr7xfkvy5l643lydnw9re59gtzzwf5mdq'
BITCOIN_SEGWIT_HASH = 'e8df018c7e326cc253faac7e46cdc51e68542c42'
BITCOIN_SEGWIT_ADDRESS_TEST = 'tb1qw508d6qejxtdg4y5r3zarvary0c5xw7kxpjzsx'
BITCOIN_SEGWIT_HASH_TEST = '751e76e8199196d454941c45d1b3a323f1433bd6'
BITCOIN_SEGWIT_ADDRESS_PAY2SH = 'bc1qc7slrfxkknqcq2jevvvkdgvrt8080852dfjewde450xdlk4ugp7szw5tk9'
BITCOIN_SEGWIT_HASH_PAY2SH = 'c7a1f1a4d6b4c1802a59631966a18359de779e8a6a65973735a3ccdfdabc407d'
BITCOIN_SEGWIT_ADDRESS_TEST_PAY2SH = 'tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7'
BITCOIN_SEGWIT_HASH_TEST_PAY2SH = '1863143c14c5166804bd19203356da136c985678cd4d27a1b8c6329604903262'
PAY2SH_HASH = b'U\x13\x1e\xfbz\x0e\xddLv\xcc;\xbe\x83;\xfcY\xa6\xf7<k'
PAY2SH_TEST_HASH = b'\xf2&\x1e\x95d\xc9\xdf\xff\xa8\x15\x05\xc1S\xfb\x95\xbf\x93\x99C\x08'
PRIVATE_KEY_BYTES = b'\xc2\x8a\x9f\x80s\x8fw\rRx\x03\xa5f\xcfo\xc3\xed\xf6\xce\xa5\x86\xc4\xfcJR#\xa5\xady~\x1a\xc3'
PRIVATE_KEY_DER = (
b"0\x81\x84\x02\x01\x000\x10\x06\x07*\x86H\xce=\x02\x01\x06"
b"\x05+\x81\x04\x00\n\x04m0k\x02\x01\x01\x04 \xc2\x8a\x9f"
b"\x80s\x8fw\rRx\x03\xa5f\xcfo\xc3\xed\xf6\xce\xa5\x86\xc4"
b"\xfcJR#\xa5\xady~\x1a\xc3\xa1D\x03B\x00\x04=\\(u\xc9\xbd"
b"\x11hu\xa7\x1a]\xb6L\xff\xcb\x139k\x16=\x03\x9b\x1d\x93'"
b"\x82H\x91\x80C4v\xa45**\xdd\x00\xeb\xb0\xd5\xc9LQ[r\xeb"
b"\x10\xf1\xfd\x8f?\x03\xb4/J+%[\xfc\x9a\xa9\xe3"
)
PRIVATE_KEY_HEX = 'c28a9f80738f770d527803a566cf6fc3edf6cea586c4fc4a5223a5ad797e1ac3'
PRIVATE_KEY_NUM = 87993618360805341115891506172036624893404292644470266399436498750715784469187
PRIVATE_KEY_PEM = (
b'-----BEGIN PRIVATE KEY-----\n'
b'MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgwoqfgHOPdw1SeAOlZs9v\n'
b'w+32zqWGxPxKUiOlrXl+GsOhRANCAAQ9XCh1yb0RaHWnGl22TP/LEzlrFj0Dmx2T\n'
b'J4JIkYBDNHakNSoq3QDrsNXJTFFbcusQ8f2PPwO0L0orJVv8mqnj\n'
b'-----END PRIVATE KEY-----\n'
)
PUBKEY_HASH = b'\x92F\x1b\xdeb\x83\xb4a\xec\xe7\xdd\xf4\xdb\xf1\xe0\xa4\x8b\xd1\x13\xd8'
PUBKEY_HASH_HEX = '043d5c2875c9bd116875a71a5db64cffcb13396b163d039b1d932782489180433476a4352a2add00ebb0d5c94c515b72eb10f1fd8f3f03b42f4a2b255bfc9aa9e3'
PUBKEY_HASH_COMPRESSED = b'\x99\x0e\xf6\rc\xb5\xb5\x96J\x1c"\x82\x06\x1a\xf4Q#\xe9?\xcb'
PUBLIC_KEY_COMPRESSED = b"\x03=\\(u\xc9\xbd\x11hu\xa7\x1a]\xb6L\xff\xcb\x139k\x16=\x03\x9b\x1d\x93'\x82H\x91\x80C4"
PUBLIC_KEY_UNCOMPRESSED = (
b"\x04=\\(u\xc9\xbd\x11hu\xa7\x1a]\xb6L\xff\xcb\x139k\x16=\x03"
b"\x9b\x1d\x93'\x82H\x91\x80C4v\xa45**\xdd\x00\xeb\xb0\xd5\xc9"
b"LQ[r\xeb\x10\xf1\xfd\x8f?\x03\xb4/J+%[\xfc\x9a\xa9\xe3"
)
PUBLIC_KEY_X = 27753912938952041417634381842191885283234814940840273460372041880794577257268
PUBLIC_KEY_Y = 53663045980837260634637807506183816949039230809110041985901491152185762425315
WALLET_FORMAT_COMPRESSED_MAIN = 'L3jsepcttyuJK3HKezD4qqRKGtwc8d2d1Nw6vsoPDX9cMcUxqqMv'
WALLET_FORMAT_COMPRESSED_TEST = 'cU6s7jckL3bZUUkb3Q2CD9vNu8F1o58K5R5a3JFtidoccMbhEGKZ'
WALLET_FORMAT_COMPRESSED_SEND_TEST = os.environ.get('WALLET_FORMAT_COMPRESSED_SEND_TEST', '')
WALLET_FORMAT_MAIN = '5KHxtARu5yr1JECrYGEA2YpCPdh1i9ciEgQayAF8kcqApkGzT9s'
WALLET_FORMAT_MAIN_1 = 'L3MWPaUtPpnBx7QZtMTrcKz437JFCLzz8GQ916UAZtk3P51w7cpo'
WALLET_FORMAT_MAIN_2 = 'Kxat4TMUAv3f7H4g52NKbAyEncj72h3wujGVfoGhU52WdQxrZuFd'
WALLET_FORMAT_TEST = '934bTuFSgCv9GHi9Ac84u9NA3J3isK9uadGY3nbe6MaDbnQdcbn'
WALLET_FORMAT_SEND_TEST = os.environ.get('WALLET_FORMAT_SEND_TEST', '')
WALLET_FORMAT_TEST_1 = 'KzDFE5K1Mb6cP2SAUySLUMx8F8KWn5RYiUFTFXcDMY5X22Jp1MvH'
WALLET_FORMAT_TEST_2 = 'KxhDjpU1TWDbD2ukWpTcj9T55bkKVJsyn2hcDucCecpjApetCRG9'
| 67.863636 | 150 | 0.841036 | import os
BINARY_ADDRESS = b'\x00\x92F\x1b\xdeb\x83\xb4a\xec\xe7\xdd\xf4\xdb\xf1\xe0\xa4\x8b\xd1\x13\xd8&E\xb4\xbf'
BITCOIN_ADDRESS = '1ELReFsTCUY2mfaDTy32qxYiT49z786eFg'
BITCOIN_ADDRESS_COMPRESSED = '1ExJJsNLQDNVVM1s1sdyt1o5P3GC5r32UG'
BITCOIN_ADDRESS_NP2WKH = '3291hXxutb58vbDVVumaJpopanmfxjVpgJ'
BITCOIN_ADDRESS_PAY2SH = '39SrGQEfFXcTYJhBvjZeQja66Cpz82EEUn'
BITCOIN_ADDRESS_P2SH_MULTISIG = '3LtE4jSa7MDD1GEJGEBYjntYQB9KcqzRCG'
BITCOIN_ADDRESS_NP2SH_MULTISIG = '32syayPneML4mTuMfimt6ZSLPz87Fya1r6'
BITCOIN_ADDRESS_TEST = 'mtrNwJxS1VyHYn3qBY1Qfsm3K3kh1mGRMS'
BITCOIN_ADDRESS_TEST_COMPRESSED = 'muUFbvTKDEokGTVUjScMhw1QF2rtv5hxCz'
BITCOIN_ADDRESS_TEST_NP2WKH = '2MshDmGtwW3aV8Nr3B3PSvmo5o8yqhwJFKZ'
BITCOIN_ADDRESS_TEST_PAY2SH = '2NFKbBHzzh32q5DcZJNgZE9sF7gYmtPbawk'
BITCOIN_ADDRESS_TEST_P2SH_MULTISIG = '2NFEYyojDi7eFSZPwBRxSQPn1NPnS9p5upP'
BITCOIN_ADDRESS_TEST_NP2SH_MULTISIG = '2NCWeVbWmaUp92dSFP3RddPk6r3GTd6cDd6'
BITCOIN_SEGWIT_ADDRESS = 'bc1qar0srrr7xfkvy5l643lydnw9re59gtzzwf5mdq'
BITCOIN_SEGWIT_HASH = 'e8df018c7e326cc253faac7e46cdc51e68542c42'
BITCOIN_SEGWIT_ADDRESS_TEST = 'tb1qw508d6qejxtdg4y5r3zarvary0c5xw7kxpjzsx'
BITCOIN_SEGWIT_HASH_TEST = '751e76e8199196d454941c45d1b3a323f1433bd6'
BITCOIN_SEGWIT_ADDRESS_PAY2SH = 'bc1qc7slrfxkknqcq2jevvvkdgvrt8080852dfjewde450xdlk4ugp7szw5tk9'
BITCOIN_SEGWIT_HASH_PAY2SH = 'c7a1f1a4d6b4c1802a59631966a18359de779e8a6a65973735a3ccdfdabc407d'
BITCOIN_SEGWIT_ADDRESS_TEST_PAY2SH = 'tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7'
BITCOIN_SEGWIT_HASH_TEST_PAY2SH = '1863143c14c5166804bd19203356da136c985678cd4d27a1b8c6329604903262'
PAY2SH_HASH = b'U\x13\x1e\xfbz\x0e\xddLv\xcc;\xbe\x83;\xfcY\xa6\xf7<k'
PAY2SH_TEST_HASH = b'\xf2&\x1e\x95d\xc9\xdf\xff\xa8\x15\x05\xc1S\xfb\x95\xbf\x93\x99C\x08'
PRIVATE_KEY_BYTES = b'\xc2\x8a\x9f\x80s\x8fw\rRx\x03\xa5f\xcfo\xc3\xed\xf6\xce\xa5\x86\xc4\xfcJR#\xa5\xady~\x1a\xc3'
PRIVATE_KEY_DER = (
b"0\x81\x84\x02\x01\x000\x10\x06\x07*\x86H\xce=\x02\x01\x06"
b"\x05+\x81\x04\x00\n\x04m0k\x02\x01\x01\x04 \xc2\x8a\x9f"
b"\x80s\x8fw\rRx\x03\xa5f\xcfo\xc3\xed\xf6\xce\xa5\x86\xc4"
b"\xfcJR#\xa5\xady~\x1a\xc3\xa1D\x03B\x00\x04=\\(u\xc9\xbd"
b"\x11hu\xa7\x1a]\xb6L\xff\xcb\x139k\x16=\x03\x9b\x1d\x93'"
b"\x82H\x91\x80C4v\xa45**\xdd\x00\xeb\xb0\xd5\xc9LQ[r\xeb"
b"\x10\xf1\xfd\x8f?\x03\xb4/J+%[\xfc\x9a\xa9\xe3"
)
PRIVATE_KEY_HEX = 'c28a9f80738f770d527803a566cf6fc3edf6cea586c4fc4a5223a5ad797e1ac3'
PRIVATE_KEY_NUM = 87993618360805341115891506172036624893404292644470266399436498750715784469187
PRIVATE_KEY_PEM = (
b'-----BEGIN PRIVATE KEY-----\n'
b'MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgwoqfgHOPdw1SeAOlZs9v\n'
b'w+32zqWGxPxKUiOlrXl+GsOhRANCAAQ9XCh1yb0RaHWnGl22TP/LEzlrFj0Dmx2T\n'
b'J4JIkYBDNHakNSoq3QDrsNXJTFFbcusQ8f2PPwO0L0orJVv8mqnj\n'
b'-----END PRIVATE KEY-----\n'
)
PUBKEY_HASH = b'\x92F\x1b\xdeb\x83\xb4a\xec\xe7\xdd\xf4\xdb\xf1\xe0\xa4\x8b\xd1\x13\xd8'
PUBKEY_HASH_HEX = '043d5c2875c9bd116875a71a5db64cffcb13396b163d039b1d932782489180433476a4352a2add00ebb0d5c94c515b72eb10f1fd8f3f03b42f4a2b255bfc9aa9e3'
PUBKEY_HASH_COMPRESSED = b'\x99\x0e\xf6\rc\xb5\xb5\x96J\x1c"\x82\x06\x1a\xf4Q#\xe9?\xcb'
PUBLIC_KEY_COMPRESSED = b"\x03=\\(u\xc9\xbd\x11hu\xa7\x1a]\xb6L\xff\xcb\x139k\x16=\x03\x9b\x1d\x93'\x82H\x91\x80C4"
PUBLIC_KEY_UNCOMPRESSED = (
b"\x04=\\(u\xc9\xbd\x11hu\xa7\x1a]\xb6L\xff\xcb\x139k\x16=\x03"
b"\x9b\x1d\x93'\x82H\x91\x80C4v\xa45**\xdd\x00\xeb\xb0\xd5\xc9"
b"LQ[r\xeb\x10\xf1\xfd\x8f?\x03\xb4/J+%[\xfc\x9a\xa9\xe3"
)
PUBLIC_KEY_X = 27753912938952041417634381842191885283234814940840273460372041880794577257268
PUBLIC_KEY_Y = 53663045980837260634637807506183816949039230809110041985901491152185762425315
WALLET_FORMAT_COMPRESSED_MAIN = 'L3jsepcttyuJK3HKezD4qqRKGtwc8d2d1Nw6vsoPDX9cMcUxqqMv'
WALLET_FORMAT_COMPRESSED_TEST = 'cU6s7jckL3bZUUkb3Q2CD9vNu8F1o58K5R5a3JFtidoccMbhEGKZ'
WALLET_FORMAT_COMPRESSED_SEND_TEST = os.environ.get('WALLET_FORMAT_COMPRESSED_SEND_TEST', '')
WALLET_FORMAT_MAIN = '5KHxtARu5yr1JECrYGEA2YpCPdh1i9ciEgQayAF8kcqApkGzT9s'
WALLET_FORMAT_MAIN_1 = 'L3MWPaUtPpnBx7QZtMTrcKz437JFCLzz8GQ916UAZtk3P51w7cpo'
WALLET_FORMAT_MAIN_2 = 'Kxat4TMUAv3f7H4g52NKbAyEncj72h3wujGVfoGhU52WdQxrZuFd'
WALLET_FORMAT_TEST = '934bTuFSgCv9GHi9Ac84u9NA3J3isK9uadGY3nbe6MaDbnQdcbn'
WALLET_FORMAT_SEND_TEST = os.environ.get('WALLET_FORMAT_SEND_TEST', '')
WALLET_FORMAT_TEST_1 = 'KzDFE5K1Mb6cP2SAUySLUMx8F8KWn5RYiUFTFXcDMY5X22Jp1MvH'
WALLET_FORMAT_TEST_2 = 'KxhDjpU1TWDbD2ukWpTcj9T55bkKVJsyn2hcDucCecpjApetCRG9'
| true | true |
1c45cff204c2083e87e0c5b405062242ac5f4e29 | 2,254 | py | Python | .config/qutebrowser/config.py | SqrtMinusOne/dotfiles | 1121bd865cb9ed019e9e4c257155e2fb483d98c5 | [
"Apache-2.0"
] | 12 | 2021-05-01T11:08:55.000Z | 2022-03-27T05:57:02.000Z | .config/qutebrowser/config.py | SqrtMinusOne/dotfiles | 1121bd865cb9ed019e9e4c257155e2fb483d98c5 | [
"Apache-2.0"
] | 1 | 2022-02-13T14:54:29.000Z | 2022-02-13T15:42:55.000Z | .config/qutebrowser/config.py | SqrtMinusOne/dotfiles | 1121bd865cb9ed019e9e4c257155e2fb483d98c5 | [
"Apache-2.0"
] | 4 | 2021-05-22T21:31:28.000Z | 2022-03-30T21:28:33.000Z | import os
import dracula.draw
from qutebrowser.api import interceptor
def filter_yt(info: interceptor.Request):
"""Block the given request if necessary."""
url = info.request_url
if (url.host() == 'www.youtube.com' and
url.path() == '/get_video_info' and
'&adformat=' in url.query()):
info.block()
interceptor.register(filter_yt)
config.load_autoconfig()
config.unbind('gt', mode='normal')
config.bind('gt', 'tab-next')
config.bind('gT', 'tab-prev')
config.bind('gN', 'tab-close')
config.bind('gn', 'tab-clone')
c.fonts.default_size = '10pt'
c.fonts.default_family = 'monospace'
c.fonts.web.size.default_fixed = 13
# config.unbind('T', mode='normal')
# config.bind('T', 'set-cmd-text -s :buffer')
# c.content.javascript.enabled = False
config.bind('\\t', 'set-cmd-text -s :buffer')
config.bind('\\b', 'set-cmd-text -s :bookmark-load')
config.bind('\\ww', ':open file:///home/pavel/Documents/org-mode/Bookmarks/bookmarks.html')
config.bind('\\z1', 'set zoom.default 100 ;; set fonts.default_size 10pt')
config.bind('\\z2', 'set zoom.default 125 ;; set fonts.default_size 12pt')
if c.colors.webpage.darkmode.enabled:
config.bind('\\d', 'set colors.webpage.darkmode.enabled False ;; restart')
else:
config.bind('\\d', 'set colors.webpage.darkmode.enabled True ;; restart')
# config.unbind('<Escape>', mode='insert')
config.bind('<Shift-Escape>', 'fake-key <Escape>', mode='insert')
RUSSIAN = 'йцукенгшщзхъфывапролджэячсмитьбю.'
ENGLISH = 'qwertyuiop[]asdfghjkl;\'zxcvbnm,./'
c.bindings.key_mappings = {
**{r: e for r, e in zip(RUSSIAN, ENGLISH)},
**{r.upper(): e.upper() for r, e in zip(RUSSIAN, ENGLISH)}
}
c.editor.command = [
'nvim',
'-f',
'{file}',
'-c',
'normal {line}G{column0}l'
]
c.scrolling.bar = 'always'
c.url.searchengines = {
"DEFAULT": "https://duckduckgo.com/?q={}",
"g": "https://www.google.com/search?hl=en&q={}"
}
c.zoom.levels = ['25%', '33%', '50%', '67%', '75%', '90%', '100%', '110%',
'125%', '133%', '150%', '175%', '200%', '250%', '300%',
'400%', '500%']
if os.uname().nodename == 'pavel-ntk':
c.zoom.default = '133%'
dracula.draw.blood(c, {
'spacing': {
'vertical': 6,
'horizontal': 8
}
})
| 26.517647 | 91 | 0.624667 | import os
import dracula.draw
from qutebrowser.api import interceptor
def filter_yt(info: interceptor.Request):
url = info.request_url
if (url.host() == 'www.youtube.com' and
url.path() == '/get_video_info' and
'&adformat=' in url.query()):
info.block()
interceptor.register(filter_yt)
config.load_autoconfig()
config.unbind('gt', mode='normal')
config.bind('gt', 'tab-next')
config.bind('gT', 'tab-prev')
config.bind('gN', 'tab-close')
config.bind('gn', 'tab-clone')
c.fonts.default_size = '10pt'
c.fonts.default_family = 'monospace'
c.fonts.web.size.default_fixed = 13
config.bind('\\t', 'set-cmd-text -s :buffer')
config.bind('\\b', 'set-cmd-text -s :bookmark-load')
config.bind('\\ww', ':open file:///home/pavel/Documents/org-mode/Bookmarks/bookmarks.html')
config.bind('\\z1', 'set zoom.default 100 ;; set fonts.default_size 10pt')
config.bind('\\z2', 'set zoom.default 125 ;; set fonts.default_size 12pt')
if c.colors.webpage.darkmode.enabled:
config.bind('\\d', 'set colors.webpage.darkmode.enabled False ;; restart')
else:
config.bind('\\d', 'set colors.webpage.darkmode.enabled True ;; restart')
config.bind('<Shift-Escape>', 'fake-key <Escape>', mode='insert')
RUSSIAN = 'йцукенгшщзхъфывапролджэячсмитьбю.'
ENGLISH = 'qwertyuiop[]asdfghjkl;\'zxcvbnm,./'
c.bindings.key_mappings = {
**{r: e for r, e in zip(RUSSIAN, ENGLISH)},
**{r.upper(): e.upper() for r, e in zip(RUSSIAN, ENGLISH)}
}
c.editor.command = [
'nvim',
'-f',
'{file}',
'-c',
'normal {line}G{column0}l'
]
c.scrolling.bar = 'always'
c.url.searchengines = {
"DEFAULT": "https://duckduckgo.com/?q={}",
"g": "https://www.google.com/search?hl=en&q={}"
}
c.zoom.levels = ['25%', '33%', '50%', '67%', '75%', '90%', '100%', '110%',
'125%', '133%', '150%', '175%', '200%', '250%', '300%',
'400%', '500%']
if os.uname().nodename == 'pavel-ntk':
c.zoom.default = '133%'
dracula.draw.blood(c, {
'spacing': {
'vertical': 6,
'horizontal': 8
}
})
| true | true |
1c45d042e89a5bb966939c08622d51ac265a1ecd | 3,127 | py | Python | tmt/steps/report/junit.py | KwisatzHaderach/tmt | 75ff90a543240d39c45baa849e6a3149545be0fd | [
"MIT"
] | null | null | null | tmt/steps/report/junit.py | KwisatzHaderach/tmt | 75ff90a543240d39c45baa849e6a3149545be0fd | [
"MIT"
] | null | null | null | tmt/steps/report/junit.py | KwisatzHaderach/tmt | 75ff90a543240d39c45baa849e6a3149545be0fd | [
"MIT"
] | null | null | null | import os
import click
import tmt
import tmt.steps.report
DEFAULT_NAME = "junit.xml"
def import_junit_xml():
"""
Import junit_xml module only when needed
Until we have a separate package for each plugin.
"""
global junit_xml
try:
import junit_xml
except ImportError:
raise tmt.utils.ReportError(
"Missing 'junit-xml', fixable by 'pip install tmt[report-junit]'.")
def duration_to_seconds(duration):
""" Convert valid duration string in to seconds """
if duration is None:
return None
try:
h, m, s = duration.split(':')
return int(h) * 3600 + int(m) * 60 + int(s)
except Exception as error:
raise tmt.utils.ReportError(
f"Malformed duration '{duration}' ({error}).")
class ReportJUnit(tmt.steps.report.ReportPlugin):
"""
Write test results in JUnit format
When FILE is not specified output is written to the 'junit.xml'
located in the current workdir.
"""
# Supported methods
_methods = [tmt.steps.Method(name='junit', doc=__doc__, order=50)]
# Supported keys
_keys = ["file"]
@classmethod
def options(cls, how=None):
""" Prepare command line options for connect """
return [
click.option(
'--file', metavar='FILE',
help='Path to the file to store junit to'),
] + super().options(how)
def go(self):
""" Read executed tests and write junit """
super().go()
import_junit_xml()
suite = junit_xml.TestSuite(self.step.plan.name)
for result in self.step.plan.execute.results():
try:
main_log = self.step.plan.execute.read(result.log[0])
except (IndexError, AttributeError):
main_log = None
case = junit_xml.TestCase(
result.name,
classname=None,
elapsed_sec=duration_to_seconds(result.duration),
stdout=main_log
)
# Map tmt OUTCOME to JUnit states
if result.result == "error":
case.add_error_info(result.result)
elif result.result == "fail":
case.add_failure_info(result.result)
elif result.result == "info":
case.add_skipped_info(result.result)
elif result.result == "warn":
case.add_error_info(result.result)
# Passed state is the default
suite.test_cases.append(case)
f_path = self.opt("file", os.path.join(self.workdir, DEFAULT_NAME))
try:
with open(f_path, 'w') as fw:
if hasattr(junit_xml, 'to_xml_report_file'):
junit_xml.to_xml_report_file(fw, [suite])
else:
# For older junit-xml
junit_xml.TestSuite.to_file(fw, [suite])
self.info("output", f_path, 'yellow')
except Exception as error:
raise tmt.utils.ReportError(
f"Failed to write the output '{f_path}' ({error}).")
| 30.960396 | 79 | 0.568276 | import os
import click
import tmt
import tmt.steps.report
DEFAULT_NAME = "junit.xml"
def import_junit_xml():
global junit_xml
try:
import junit_xml
except ImportError:
raise tmt.utils.ReportError(
"Missing 'junit-xml', fixable by 'pip install tmt[report-junit]'.")
def duration_to_seconds(duration):
if duration is None:
return None
try:
h, m, s = duration.split(':')
return int(h) * 3600 + int(m) * 60 + int(s)
except Exception as error:
raise tmt.utils.ReportError(
f"Malformed duration '{duration}' ({error}).")
class ReportJUnit(tmt.steps.report.ReportPlugin):
_methods = [tmt.steps.Method(name='junit', doc=__doc__, order=50)]
_keys = ["file"]
@classmethod
def options(cls, how=None):
return [
click.option(
'--file', metavar='FILE',
help='Path to the file to store junit to'),
] + super().options(how)
def go(self):
super().go()
import_junit_xml()
suite = junit_xml.TestSuite(self.step.plan.name)
for result in self.step.plan.execute.results():
try:
main_log = self.step.plan.execute.read(result.log[0])
except (IndexError, AttributeError):
main_log = None
case = junit_xml.TestCase(
result.name,
classname=None,
elapsed_sec=duration_to_seconds(result.duration),
stdout=main_log
)
if result.result == "error":
case.add_error_info(result.result)
elif result.result == "fail":
case.add_failure_info(result.result)
elif result.result == "info":
case.add_skipped_info(result.result)
elif result.result == "warn":
case.add_error_info(result.result)
suite.test_cases.append(case)
f_path = self.opt("file", os.path.join(self.workdir, DEFAULT_NAME))
try:
with open(f_path, 'w') as fw:
if hasattr(junit_xml, 'to_xml_report_file'):
junit_xml.to_xml_report_file(fw, [suite])
else:
junit_xml.TestSuite.to_file(fw, [suite])
self.info("output", f_path, 'yellow')
except Exception as error:
raise tmt.utils.ReportError(
f"Failed to write the output '{f_path}' ({error}).")
| true | true |
1c45d09ac800551f95112d696ee3ba6ef9d53511 | 5,786 | py | Python | packaging/dicarlo/sanghavi/sanghavimurty2020things1.py | dmayo/brain-score | 3ab4258152c9e3f8c7d29afb10158b184dbcebbe | [
"MIT"
] | 52 | 2019-12-13T06:43:44.000Z | 2022-02-21T07:47:39.000Z | packaging/dicarlo/sanghavi/sanghavimurty2020things1.py | dmayo/brain-score | 3ab4258152c9e3f8c7d29afb10158b184dbcebbe | [
"MIT"
] | 104 | 2019-12-06T18:08:54.000Z | 2022-03-31T23:57:51.000Z | packaging/dicarlo/sanghavi/sanghavimurty2020things1.py | dmayo/brain-score | 3ab4258152c9e3f8c7d29afb10158b184dbcebbe | [
"MIT"
] | 32 | 2019-12-05T14:31:14.000Z | 2022-03-10T02:04:45.000Z | import os
from pathlib import Path
import json
import numpy as np
import xarray as xr
import pandas as pd
from brainio_base.assemblies import NeuronRecordingAssembly
from brainio_base.stimuli import StimulusSet
from brainio_collection.packaging import package_data_assembly, package_stimulus_set
from mkgu_packaging.dicarlo.sanghavi import filter_neuroids
def collect_stimuli(data_dir):
image_dir = data_dir / 'images' / 'things-1'
assert os.path.isdir(image_dir)
files = sorted(os.listdir(image_dir), key=lambda x: int(os.path.splitext(x)[0]))
files = files[:-130] # Discard last 130 images (5 grey and 25x5 normalizer images)
assert os.path.isdir(data_dir / 'image-metadata')
stimuli = pd.read_csv(data_dir / 'image-metadata' / 'things_1_metadata.csv')
stimuli = stimuli.rename(columns={'id': 'image_id'})
stimuli['image_current_local_file_path'] = stimuli.apply(
lambda row: os.path.join(image_dir, str(row.image_id) + '.jpg'), axis=1)
assert len(np.unique(stimuli['image_id'])) == len(stimuli)
stimuli = StimulusSet(stimuli)
stimuli.image_paths = \
{stimuli.at[idx, 'image_id']: stimuli.at[idx, 'image_current_local_file_path'] for idx in range(len(stimuli))}
return stimuli
def load_responses(data_dir, stimuli):
data_dir = data_dir / 'database'
assert os.path.isdir(data_dir)
psth = np.load(data_dir / 'solo.rsvp.things-1.experiment_psth.npy') # Shaped images x repetitions x time_bins x channels
# Compute firing rate for given time bins
timebins = [[70, 170], [170, 270], [50, 100], [100, 150], [150, 200], [200, 250], [70, 270]]
photodiode_delay = 30 # Delay recorded on photodiode is ~30ms
timebase = np.arange(-100, 381, 10) # PSTH from -100ms to 380ms relative to stimulus onset
assert len(timebase) == psth.shape[2]
rate = np.empty((len(timebins), psth.shape[0], psth.shape[1], psth.shape[3]))
for idx, tb in enumerate(timebins):
t_cols = np.where((timebase >= (tb[0] + photodiode_delay)) & (timebase < (tb[1] + photodiode_delay)))[0]
rate[idx] = np.mean(psth[:, :, t_cols, :], axis=2) # Shaped time bins x images x repetitions x channels
assembly = xr.DataArray(rate,
coords={'repetition': ('repetition', list(range(rate.shape[2]))),
'time_bin_id': ('time_bin', list(range(rate.shape[0]))),
'time_bin_start': ('time_bin', [x[0] for x in timebins]),
'time_bin_stop': ('time_bin', [x[1] for x in timebins])},
dims=['time_bin', 'image', 'repetition', 'neuroid'])
# Add neuroid related meta data
neuroid_meta = pd.DataFrame(json.load(open(data_dir.parent / 'array-metadata' / 'mapping.json')))
for column_name, column_data in neuroid_meta.iteritems():
assembly = assembly.assign_coords(**{f'{column_name}': ('neuroid', list(column_data.values))})
# Add stimulus related meta data
for column_name, column_data in stimuli.iteritems():
assembly = assembly.assign_coords(**{f'{column_name}': ('image', list(column_data.values))})
# Collapse dimensions 'image' and 'repetitions' into a single 'presentation' dimension
assembly = assembly.stack(presentation=('image', 'repetition')).reset_index('presentation')
assembly = assembly.drop('image')
assembly = NeuronRecordingAssembly(assembly)
# Filter noisy electrodes
psth = np.load(data_dir / 'solo.rsvp.things-1.normalizer_psth.npy')
t_cols = np.where((timebase >= (70 + photodiode_delay)) & (timebase < (170 + photodiode_delay)))[0]
rate = np.mean(psth[:, :, t_cols, :], axis=2)
normalizer_assembly = xr.DataArray(rate,
coords={'repetition': ('repetition', list(range(rate.shape[1]))),
'image_id': ('image', list(range(rate.shape[0]))),
'id': ('image', list(range(rate.shape[0])))},
dims=['image', 'repetition', 'neuroid'])
for column_name, column_data in neuroid_meta.iteritems():
normalizer_assembly = normalizer_assembly.assign_coords(
**{f'{column_name}': ('neuroid', list(column_data.values))})
normalizer_assembly = normalizer_assembly.stack(presentation=('image', 'repetition')).reset_index('presentation')
normalizer_assembly = normalizer_assembly.drop('image')
normalizer_assembly = normalizer_assembly.transpose('presentation', 'neuroid')
normalizer_assembly = NeuronRecordingAssembly(normalizer_assembly)
filtered_assembly = filter_neuroids(normalizer_assembly, 0.7)
assembly = assembly.sel(neuroid=np.isin(assembly.neuroid_id, filtered_assembly.neuroid_id))
assembly = assembly.transpose('presentation', 'neuroid', 'time_bin')
# Add other experiment and data processing related info
assembly.attrs['image_size_degree'] = 8
assembly.attrs['stim_on_time_ms'] = 100
return assembly
def main():
data_dir = Path(__file__).parents[6] / 'data2' / 'active' / 'users' / 'sachis'
assert os.path.isdir(data_dir)
stimuli = collect_stimuli(data_dir)
stimuli.identifier = 'dicarlo.THINGS1'
assembly = load_responses(data_dir, stimuli)
assembly.name = 'dicarlo.SanghaviMurty2020THINGS1'
print('Packaging stimuli')
package_stimulus_set(stimuli, stimulus_set_identifier=stimuli.identifier, bucket_name='brainio.dicarlo')
print('Packaging assembly')
package_data_assembly(assembly, assembly_identifier=assembly.name, stimulus_set_identifier=stimuli.identifier,
bucket_name='brainio.dicarlo')
return
if __name__ == '__main__':
main()
| 48.621849 | 125 | 0.667128 | import os
from pathlib import Path
import json
import numpy as np
import xarray as xr
import pandas as pd
from brainio_base.assemblies import NeuronRecordingAssembly
from brainio_base.stimuli import StimulusSet
from brainio_collection.packaging import package_data_assembly, package_stimulus_set
from mkgu_packaging.dicarlo.sanghavi import filter_neuroids
def collect_stimuli(data_dir):
image_dir = data_dir / 'images' / 'things-1'
assert os.path.isdir(image_dir)
files = sorted(os.listdir(image_dir), key=lambda x: int(os.path.splitext(x)[0]))
files = files[:-130]
assert os.path.isdir(data_dir / 'image-metadata')
stimuli = pd.read_csv(data_dir / 'image-metadata' / 'things_1_metadata.csv')
stimuli = stimuli.rename(columns={'id': 'image_id'})
stimuli['image_current_local_file_path'] = stimuli.apply(
lambda row: os.path.join(image_dir, str(row.image_id) + '.jpg'), axis=1)
assert len(np.unique(stimuli['image_id'])) == len(stimuli)
stimuli = StimulusSet(stimuli)
stimuli.image_paths = \
{stimuli.at[idx, 'image_id']: stimuli.at[idx, 'image_current_local_file_path'] for idx in range(len(stimuli))}
return stimuli
def load_responses(data_dir, stimuli):
data_dir = data_dir / 'database'
assert os.path.isdir(data_dir)
psth = np.load(data_dir / 'solo.rsvp.things-1.experiment_psth.npy')
timebins = [[70, 170], [170, 270], [50, 100], [100, 150], [150, 200], [200, 250], [70, 270]]
photodiode_delay = 30
timebase = np.arange(-100, 381, 10)
assert len(timebase) == psth.shape[2]
rate = np.empty((len(timebins), psth.shape[0], psth.shape[1], psth.shape[3]))
for idx, tb in enumerate(timebins):
t_cols = np.where((timebase >= (tb[0] + photodiode_delay)) & (timebase < (tb[1] + photodiode_delay)))[0]
rate[idx] = np.mean(psth[:, :, t_cols, :], axis=2)
assembly = xr.DataArray(rate,
coords={'repetition': ('repetition', list(range(rate.shape[2]))),
'time_bin_id': ('time_bin', list(range(rate.shape[0]))),
'time_bin_start': ('time_bin', [x[0] for x in timebins]),
'time_bin_stop': ('time_bin', [x[1] for x in timebins])},
dims=['time_bin', 'image', 'repetition', 'neuroid'])
neuroid_meta = pd.DataFrame(json.load(open(data_dir.parent / 'array-metadata' / 'mapping.json')))
for column_name, column_data in neuroid_meta.iteritems():
assembly = assembly.assign_coords(**{f'{column_name}': ('neuroid', list(column_data.values))})
for column_name, column_data in stimuli.iteritems():
assembly = assembly.assign_coords(**{f'{column_name}': ('image', list(column_data.values))})
assembly = assembly.stack(presentation=('image', 'repetition')).reset_index('presentation')
assembly = assembly.drop('image')
assembly = NeuronRecordingAssembly(assembly)
psth = np.load(data_dir / 'solo.rsvp.things-1.normalizer_psth.npy')
t_cols = np.where((timebase >= (70 + photodiode_delay)) & (timebase < (170 + photodiode_delay)))[0]
rate = np.mean(psth[:, :, t_cols, :], axis=2)
normalizer_assembly = xr.DataArray(rate,
coords={'repetition': ('repetition', list(range(rate.shape[1]))),
'image_id': ('image', list(range(rate.shape[0]))),
'id': ('image', list(range(rate.shape[0])))},
dims=['image', 'repetition', 'neuroid'])
for column_name, column_data in neuroid_meta.iteritems():
normalizer_assembly = normalizer_assembly.assign_coords(
**{f'{column_name}': ('neuroid', list(column_data.values))})
normalizer_assembly = normalizer_assembly.stack(presentation=('image', 'repetition')).reset_index('presentation')
normalizer_assembly = normalizer_assembly.drop('image')
normalizer_assembly = normalizer_assembly.transpose('presentation', 'neuroid')
normalizer_assembly = NeuronRecordingAssembly(normalizer_assembly)
filtered_assembly = filter_neuroids(normalizer_assembly, 0.7)
assembly = assembly.sel(neuroid=np.isin(assembly.neuroid_id, filtered_assembly.neuroid_id))
assembly = assembly.transpose('presentation', 'neuroid', 'time_bin')
assembly.attrs['image_size_degree'] = 8
assembly.attrs['stim_on_time_ms'] = 100
return assembly
def main():
data_dir = Path(__file__).parents[6] / 'data2' / 'active' / 'users' / 'sachis'
assert os.path.isdir(data_dir)
stimuli = collect_stimuli(data_dir)
stimuli.identifier = 'dicarlo.THINGS1'
assembly = load_responses(data_dir, stimuli)
assembly.name = 'dicarlo.SanghaviMurty2020THINGS1'
print('Packaging stimuli')
package_stimulus_set(stimuli, stimulus_set_identifier=stimuli.identifier, bucket_name='brainio.dicarlo')
print('Packaging assembly')
package_data_assembly(assembly, assembly_identifier=assembly.name, stimulus_set_identifier=stimuli.identifier,
bucket_name='brainio.dicarlo')
return
if __name__ == '__main__':
main()
| true | true |
1c45d15d423872579297a22d537eec56230d8c1c | 197 | py | Python | Aprendendo Python/cursopythonudamy/aula16while_contador_acumulador.py | JlucasS777/Aprendendo-Python | a3a960260070f0d604c27fbbc41578a6ab11edb5 | [
"MIT"
] | null | null | null | Aprendendo Python/cursopythonudamy/aula16while_contador_acumulador.py | JlucasS777/Aprendendo-Python | a3a960260070f0d604c27fbbc41578a6ab11edb5 | [
"MIT"
] | null | null | null | Aprendendo Python/cursopythonudamy/aula16while_contador_acumulador.py | JlucasS777/Aprendendo-Python | a3a960260070f0d604c27fbbc41578a6ab11edb5 | [
"MIT"
] | null | null | null | contador = 1
acumulador = 1
while contador <= 10:
print(contador,acumulador)
if contador>5:
break
contador+=1
else :
print("Cheguei ao final do programa e usei o laço else") | 21.888889 | 60 | 0.659898 | contador = 1
acumulador = 1
while contador <= 10:
print(contador,acumulador)
if contador>5:
break
contador+=1
else :
print("Cheguei ao final do programa e usei o laço else") | true | true |
1c45d36bee4e9d54b5cbf2aeacd682f4ac03ae3c | 1,228 | py | Python | scipy/special/_precompute/utils.py | smola/scipy | ff8b9d9e87a585a820846d7f459d6156ba621c4d | [
"BSD-3-Clause"
] | 1 | 2020-02-26T12:15:51.000Z | 2020-02-26T12:15:51.000Z | scipy/special/_precompute/utils.py | smola/scipy | ff8b9d9e87a585a820846d7f459d6156ba621c4d | [
"BSD-3-Clause"
] | null | null | null | scipy/special/_precompute/utils.py | smola/scipy | ff8b9d9e87a585a820846d7f459d6156ba621c4d | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division, print_function, absolute_import
from numpy.testing import suppress_warnings
try:
import mpmath as mp
except ImportError:
pass
try:
# Can remove when sympy #11255 is resolved; see
# https://github.com/sympy/sympy/issues/11255
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "inspect.getargspec.. is deprecated")
from sympy.abc import x
except ImportError:
pass
def lagrange_inversion(a):
"""Given a series
f(x) = a[1]*x + a[2]*x**2 + ... + a[n-1]*x**(n - 1),
use the Lagrange inversion formula to compute a series
g(x) = b[1]*x + b[2]*x**2 + ... + b[n-1]*x**(n - 1)
so that f(g(x)) = g(f(x)) = x mod x**n. We must have a[0] = 0, so
necessarily b[0] = 0 too.
The algorithm is naive and could be improved, but speed isn't an
issue here and it's easy to read.
"""
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = map(lambda x: mp.mpf(x), b)
return b
| 26.12766 | 76 | 0.593648 | from __future__ import division, print_function, absolute_import
from numpy.testing import suppress_warnings
try:
import mpmath as mp
except ImportError:
pass
try:
_warnings() as sup:
sup.filter(DeprecationWarning, "inspect.getargspec.. is deprecated")
from sympy.abc import x
except ImportError:
pass
def lagrange_inversion(a):
n = len(a)
f = sum(a[i]*x**i for i in range(len(a)))
h = (x/f).series(x, 0, n).removeO()
hpower = [h**0]
for k in range(n):
hpower.append((hpower[-1]*h).expand())
b = [mp.mpf(0)]
for k in range(1, n):
b.append(hpower[k].coeff(x, k - 1)/k)
b = map(lambda x: mp.mpf(x), b)
return b
| true | true |
1c45d3b6ea710c0f740d2889df7c9d12df1dfe29 | 405 | py | Python | minesweeperapi/minesweeperapi/wsgi.py | Olaussen/minesweeper-online-api | 36ba250b65b19cc4f0d8be36b3f84faf3f692035 | [
"MIT"
] | 4 | 2020-04-15T18:21:36.000Z | 2020-04-24T12:24:03.000Z | minesweeperapi/minesweeperapi/wsgi.py | Olaussen/minesweeper-online-api | 36ba250b65b19cc4f0d8be36b3f84faf3f692035 | [
"MIT"
] | 4 | 2021-03-29T23:56:40.000Z | 2021-09-22T19:00:36.000Z | minesweeperapi/minesweeperapi/wsgi.py | Angstboksen/minesweeper-online-api | 36ba250b65b19cc4f0d8be36b3f84faf3f692035 | [
"MIT"
] | null | null | null | """
WSGI config for minesweeperapi project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'minesweeperapi.settings')
application = get_wsgi_application()
| 23.823529 | 78 | 0.792593 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'minesweeperapi.settings')
application = get_wsgi_application()
| true | true |
1c45d4943574fa4c951c551453898c091153b659 | 1,917 | py | Python | trax/rl/__init__.py | koz4k2/trax | 548f671fa3804cb86154ac504fb0c6c4269b42c7 | [
"Apache-2.0"
] | 2 | 2020-02-05T09:27:29.000Z | 2020-02-05T09:27:49.000Z | trax/rl/__init__.py | koz4k2/trax | 548f671fa3804cb86154ac504fb0c6c4269b42c7 | [
"Apache-2.0"
] | null | null | null | trax/rl/__init__.py | koz4k2/trax | 548f671fa3804cb86154ac504fb0c6c4269b42c7 | [
"Apache-2.0"
] | 1 | 2021-07-08T16:35:30.000Z | 2021-07-08T16:35:30.000Z | # coding=utf-8
# Copyright 2019 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trax RL library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gin
from trax.rl import simulated_env_problem
def configure_rl(*args, **kwargs):
kwargs['module'] = 'trax.rl'
return gin.external_configurable(*args, **kwargs)
def configure_simulated_env_problem(*args, **kwargs):
kwargs['blacklist'] = [
'batch_size', 'observation_space', 'action_space', 'reward_range',
'discrete_rewards', 'history_stream', 'output_dir']
return configure_rl(*args, **kwargs)
# pylint: disable=invalid-name
RawSimulatedEnvProblem = configure_simulated_env_problem(
simulated_env_problem.RawSimulatedEnvProblem)
SerializedSequenceSimulatedEnvProblem = configure_simulated_env_problem(
simulated_env_problem.SerializedSequenceSimulatedEnvProblem)
# pylint: disable=invalid-name
cartpole_done_fn = configure_rl(simulated_env_problem.cartpole_done_fn)
cartpole_reward_fn = configure_rl(simulated_env_problem.cartpole_reward_fn)
acrobot_done_fn = configure_rl(simulated_env_problem.acrobot_done_fn)
acrobot_reward_fn = configure_rl(simulated_env_problem.acrobot_reward_fn)
onlinetune_done_fn = configure_rl(simulated_env_problem.onlinetune_done_fn)
onlinetune_reward_fn = configure_rl(simulated_env_problem.onlinetune_reward_fn)
| 36.169811 | 79 | 0.806468 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gin
from trax.rl import simulated_env_problem
def configure_rl(*args, **kwargs):
kwargs['module'] = 'trax.rl'
return gin.external_configurable(*args, **kwargs)
def configure_simulated_env_problem(*args, **kwargs):
kwargs['blacklist'] = [
'batch_size', 'observation_space', 'action_space', 'reward_range',
'discrete_rewards', 'history_stream', 'output_dir']
return configure_rl(*args, **kwargs)
RawSimulatedEnvProblem = configure_simulated_env_problem(
simulated_env_problem.RawSimulatedEnvProblem)
SerializedSequenceSimulatedEnvProblem = configure_simulated_env_problem(
simulated_env_problem.SerializedSequenceSimulatedEnvProblem)
cartpole_done_fn = configure_rl(simulated_env_problem.cartpole_done_fn)
cartpole_reward_fn = configure_rl(simulated_env_problem.cartpole_reward_fn)
acrobot_done_fn = configure_rl(simulated_env_problem.acrobot_done_fn)
acrobot_reward_fn = configure_rl(simulated_env_problem.acrobot_reward_fn)
onlinetune_done_fn = configure_rl(simulated_env_problem.onlinetune_done_fn)
onlinetune_reward_fn = configure_rl(simulated_env_problem.onlinetune_reward_fn)
| true | true |
1c45d52d487074570c308737c78dd22714356d93 | 4,508 | py | Python | play_with_mpv.py | davehorner/play-with-mpv | 89ad8de0faf10a175fbd8cf0792706e39ed87fae | [
"Unlicense"
] | null | null | null | play_with_mpv.py | davehorner/play-with-mpv | 89ad8de0faf10a175fbd8cf0792706e39ed87fae | [
"Unlicense"
] | null | null | null | play_with_mpv.py | davehorner/play-with-mpv | 89ad8de0faf10a175fbd8cf0792706e39ed87fae | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# Plays MPV when instructed to by a chrome extension =]
import sys
import argparse
from subprocess import Popen
FileNotFoundError = IOError
if sys.version_info[0] < 3: # python 2
import BaseHTTPServer
import urlparse
class CompatibilityMixin:
def send_body(self, msg):
self.wfile.write(msg+'\n')
self.wfile.close()
else: # python 3
import http.server as BaseHTTPServer
import urllib.parse as urlparse
class CompatibilityMixin:
def send_body(self, msg):
self.wfile.write(bytes(msg+'\n', 'utf-8'))
class Handler(BaseHTTPServer.BaseHTTPRequestHandler, CompatibilityMixin):
def respond(self, code, body=None):
self.send_response(code)
self.send_header("Content-type", "text/plain")
self.end_headers()
if body:
self.send_body(body)
def do_GET(self):
try:
url = urlparse.urlparse(self.path)
query = urlparse.parse_qs(url.query)
except:
query = {}
if query.get('mpv_args'):
print("MPV ARGS:", query.get('mpv_args'))
if "play_url" in query:
urls = str(query["play_url"][0])
if urls.startswith('magnet:') or urls.endswith('.torrent'):
try:
pipe = Popen(['peerflix', '-k', urls, '--', '--force-window'] +
query.get("mpv_args", []))
except FileNotFoundError as e:
missing_bin('peerflix')
else:
try:
pipe = Popen(['mpv', urls, '--force-window'] +
query.get("mpv_args", []))
except FileNotFoundError as e:
missing_bin('mpv')
self.respond(200, "playing...")
elif "cast_url" in query:
urls = str(query["cast_url"][0])
if urls.startswith('magnet:') or urls.endswith('.torrent'):
print(" === WARNING: Casting torrents not yet fully supported!")
try:
with Popen(['mkchromecast', '--video',
'--source-url', 'http://localhost:8888']):
pass
except FileNotFoundError as e:
missing_bin('mkchromecast')
pipe.terminate()
else:
try:
pipe = Popen(['mkchromecast', '--video', '-y', urls])
except FileNotFoundError as e:
missing_bin('mkchromecast')
self.respond(200, "casting...")
elif "fairuse_url" in query:
urls = str(query["fairuse_url"][0])
location = query.get("location", ['~/Downloads/'])[0]
if "%" not in location:
location += "%(title)s.%(ext)s"
print("downloading ", urls, "to", location)
if urls.startswith('magnet:') or urls.endswith('.torrent'):
msg = " === ERROR: Downloading torrents not yet supported!"
print(msg)
self.respond(400, msg)
else:
try:
pipe = Popen(['youtube-dl', urls, '-o', location] +
query.get('ytdl_args', []))
except FileNotFoundError as e:
missing_bin('youtube-dl')
self.respond(200, "downloading...")
else:
self.respond(400)
def missing_bin(bin):
print("======================")
print("ERROR: "+bin.upper()+" does not appear to be installed correctly!")
print("please ensure you can launch '"+bin+"' in the terminal.")
print("======================")
def start():
parser = argparse.ArgumentParser(description='Plays MPV when instructed to by a browser extension.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--port', type=int, default=7531, help='The port to listen on.')
parser.add_argument('--public', action='store_true', help='Accept traffic from other comuters.')
args = parser.parse_args()
hostname = '0.0.0.0' if args.public else 'localhost'
httpd = BaseHTTPServer.HTTPServer((hostname, args.port), Handler)
print("serving on {}:{}".format(hostname, args.port))
try:
httpd.serve_forever()
except KeyboardInterrupt:
print(" shutting down...")
httpd.shutdown()
if __name__ == '__main__':
start()
| 37.566667 | 160 | 0.533718 |
import sys
import argparse
from subprocess import Popen
FileNotFoundError = IOError
if sys.version_info[0] < 3:
import BaseHTTPServer
import urlparse
class CompatibilityMixin:
def send_body(self, msg):
self.wfile.write(msg+'\n')
self.wfile.close()
else:
import http.server as BaseHTTPServer
import urllib.parse as urlparse
class CompatibilityMixin:
def send_body(self, msg):
self.wfile.write(bytes(msg+'\n', 'utf-8'))
class Handler(BaseHTTPServer.BaseHTTPRequestHandler, CompatibilityMixin):
def respond(self, code, body=None):
self.send_response(code)
self.send_header("Content-type", "text/plain")
self.end_headers()
if body:
self.send_body(body)
def do_GET(self):
try:
url = urlparse.urlparse(self.path)
query = urlparse.parse_qs(url.query)
except:
query = {}
if query.get('mpv_args'):
print("MPV ARGS:", query.get('mpv_args'))
if "play_url" in query:
urls = str(query["play_url"][0])
if urls.startswith('magnet:') or urls.endswith('.torrent'):
try:
pipe = Popen(['peerflix', '-k', urls, '--', '--force-window'] +
query.get("mpv_args", []))
except FileNotFoundError as e:
missing_bin('peerflix')
else:
try:
pipe = Popen(['mpv', urls, '--force-window'] +
query.get("mpv_args", []))
except FileNotFoundError as e:
missing_bin('mpv')
self.respond(200, "playing...")
elif "cast_url" in query:
urls = str(query["cast_url"][0])
if urls.startswith('magnet:') or urls.endswith('.torrent'):
print(" === WARNING: Casting torrents not yet fully supported!")
try:
with Popen(['mkchromecast', '--video',
'--source-url', 'http://localhost:8888']):
pass
except FileNotFoundError as e:
missing_bin('mkchromecast')
pipe.terminate()
else:
try:
pipe = Popen(['mkchromecast', '--video', '-y', urls])
except FileNotFoundError as e:
missing_bin('mkchromecast')
self.respond(200, "casting...")
elif "fairuse_url" in query:
urls = str(query["fairuse_url"][0])
location = query.get("location", ['~/Downloads/'])[0]
if "%" not in location:
location += "%(title)s.%(ext)s"
print("downloading ", urls, "to", location)
if urls.startswith('magnet:') or urls.endswith('.torrent'):
msg = " === ERROR: Downloading torrents not yet supported!"
print(msg)
self.respond(400, msg)
else:
try:
pipe = Popen(['youtube-dl', urls, '-o', location] +
query.get('ytdl_args', []))
except FileNotFoundError as e:
missing_bin('youtube-dl')
self.respond(200, "downloading...")
else:
self.respond(400)
def missing_bin(bin):
print("======================")
print("ERROR: "+bin.upper()+" does not appear to be installed correctly!")
print("please ensure you can launch '"+bin+"' in the terminal.")
print("======================")
def start():
parser = argparse.ArgumentParser(description='Plays MPV when instructed to by a browser extension.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--port', type=int, default=7531, help='The port to listen on.')
parser.add_argument('--public', action='store_true', help='Accept traffic from other comuters.')
args = parser.parse_args()
hostname = '0.0.0.0' if args.public else 'localhost'
httpd = BaseHTTPServer.HTTPServer((hostname, args.port), Handler)
print("serving on {}:{}".format(hostname, args.port))
try:
httpd.serve_forever()
except KeyboardInterrupt:
print(" shutting down...")
httpd.shutdown()
if __name__ == '__main__':
start()
| true | true |
1c45d5350b388a12e7757dc666f9344c210a8547 | 42,275 | py | Python | pypy/module/_winreg/interp_winreg.py | olliemath/pypy | 8b873bd0b8bf76075aba3d915c260789f26f5788 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | pypy/module/_winreg/interp_winreg.py | olliemath/pypy | 8b873bd0b8bf76075aba3d915c260789f26f5788 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | pypy/module/_winreg/interp_winreg.py | olliemath/pypy | 8b873bd0b8bf76075aba3d915c260789f26f5788 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | from rpython.rtyper.lltypesystem import rffi, lltype
from rpython.rlib import rwinreg, rwin32, rstring
from rpython.rlib.rarithmetic import r_uint, r_ulonglong, intmask
from rpython.rlib.buffer import ByteBuffer
from rpython.rlib.rutf8 import check_utf8
from pypy.interpreter.baseobjspace import W_Root, BufferInterfaceNotFound
from pypy.interpreter.gateway import interp2app, unwrap_spec
from pypy.interpreter.typedef import TypeDef, GetSetProperty
from pypy.interpreter.error import OperationError, oefmt, wrap_oserror
from pypy.interpreter.unicodehelper import (
str_decode_utf_16, utf8_encode_utf_16)
from pypy.module._codecs.interp_codecs import CodecState
from rpython.translator.tool.cbuild import ExternalCompilationInfo
# wrappers needed to call the reflection functions loaded at runtime
# using WINAPI convention
eci = ExternalCompilationInfo(
includes=['windows.h'],
post_include_bits=[
"RPY_EXTERN LONG\n"
"pypy_RegChangeReflectionKey(FARPROC address, HKEY key);\n"
"RPY_EXTERN LONG\n"
"pypy_RegQueryReflectionKey(FARPROC address, HKEY key, LPBOOL isDisabled);\n"
"RPY_EXTERN LONG\n"
"pypy_RegDeleteKeyExW(FARPROC address, HKEY key, LPCWSTR subkey,\n"
" REGSAM sam, DWORD reserved);\n"
],
separate_module_sources=['''
LONG
pypy_RegChangeReflectionKey(FARPROC address, HKEY key) {
LONG (WINAPI *func)(HKEY);
*(FARPROC*)&func = address;
return func(key);
}
LONG
pypy_RegQueryReflectionKey(FARPROC address, HKEY key, LPBOOL isDisabled) {
LONG (WINAPI *func)(HKEY, LPBOOL);
*(FARPROC*)&func = address;
return func(key, isDisabled);
}
LONG
pypy_RegDeleteKeyExW(FARPROC address, HKEY key, LPCWSTR subkey,
REGSAM sam, DWORD reserved) {
LONG (WINAPI *func)(HKEY, LPCWSTR, REGSAM, DWORD);
*(FARPROC*)&func = address;
return func(key, subkey, sam, reserved);
}
'''],
)
pypy_RegChangeReflectionKey = rffi.llexternal(
'pypy_RegChangeReflectionKey',
[rffi.VOIDP, rwinreg.HKEY],
rffi.LONG, compilation_info=eci)
pypy_RegQueryReflectionKey = rffi.llexternal(
'pypy_RegQueryReflectionKey',
[rffi.VOIDP, rwinreg.HKEY, rwin32.LPBOOL],
rffi.LONG, compilation_info=eci)
pypy_RegDeleteKeyExW = rffi.llexternal(
'pypy_RegDeleteKeyExW',
[rffi.VOIDP, rwinreg.HKEY, rffi.CWCHARP, rwinreg.REGSAM, rwin32.DWORD],
rffi.LONG, compilation_info=eci)
def raiseWindowsError(space, errcode, context):
message = rwin32.FormatErrorW(errcode)
w_errcode = space.newint(errcode)
w_t = space.newtuple([w_errcode, space.newtext(*message),
space.w_None, w_errcode])
raise OperationError(space.w_WindowsError, w_t)
class W_HKEY(W_Root):
def __init__(self, space, hkey):
self.hkey = hkey
self.space = space
self.register_finalizer(space)
def _finalize_(self):
# ignore errors
try:
self.Close(self.space)
except:
pass
def as_int(self):
return rffi.cast(rffi.SIZE_T, self.hkey)
def descr_bool(self, space):
return space.newbool(self.as_int() != 0)
def descr_handle_get(self, space):
return space.newint(self.as_int())
def descr_repr(self, space):
return space.newtext("<PyHKEY:0x%x>" % (self.as_int(),))
def descr_int(self, space):
return space.newint(self.as_int())
def descr__enter__(self, space):
return self
def descr__exit__(self, space, __args__):
CloseKey(space, self)
def Close(self, space):
"""key.Close() - Closes the underlying Windows handle.
If the handle is already closed, no error is raised."""
CloseKey(space, self)
def Detach(self, space):
"""int = key.Detach() - Detaches the Windows handle from the handle object.
The result is the value of the handle before it is detached. If the
handle is already detached, this will return zero.
After calling this function, the handle is effectively invalidated,
but the handle is not closed. You would call this function when you
need the underlying win32 handle to exist beyond the lifetime of the
handle object."""
key = self.as_int()
self.hkey = rwin32.NULL_HANDLE
return space.newint(key)
@unwrap_spec(key=int)
def new_HKEY(space, w_subtype, key):
hkey = rffi.cast(rwinreg.HKEY, key)
return W_HKEY(space, hkey)
descr_HKEY_new = interp2app(new_HKEY)
W_HKEY.typedef = TypeDef(
"winreg.HKEYType",
__doc__="""\
PyHKEY Object - A Python object, representing a win32 registry key.
This object wraps a Windows HKEY object, automatically closing it when
the object is destroyed. To guarantee cleanup, you can call either
the Close() method on the PyHKEY, or the CloseKey() method.
All functions which accept a handle object also accept an integer -
however, use of the handle object is encouraged.
Functions:
Close() - Closes the underlying handle.
Detach() - Returns the integer Win32 handle, detaching it from the object
Properties:
handle - The integer Win32 handle.
Operations:
__bool__ - Handles with an open object return true, otherwise false.
__int__ - Converting a handle to an integer returns the Win32 handle.
__cmp__ - Handle objects are compared using the handle value.""",
__new__=descr_HKEY_new,
__repr__=interp2app(W_HKEY.descr_repr),
__int__=interp2app(W_HKEY.descr_int),
__bool__=interp2app(W_HKEY.descr_bool),
__enter__=interp2app(W_HKEY.descr__enter__),
__exit__=interp2app(W_HKEY.descr__exit__),
handle=GetSetProperty(W_HKEY.descr_handle_get),
Close=interp2app(W_HKEY.Close),
Detach=interp2app(W_HKEY.Detach),
)
def hkey_w(w_hkey, space):
if space.is_w(w_hkey, space.w_None):
raise oefmt(space.w_TypeError,
"None is not a valid HKEY in this context")
elif isinstance(w_hkey, W_HKEY):
return w_hkey.hkey
elif space.isinstance_w(w_hkey, space.w_int):
if space.is_true(space.lt(w_hkey, space.newint(0))):
return rffi.cast(rwinreg.HKEY, space.int_w(w_hkey))
return rffi.cast(rwinreg.HKEY, space.uint_w(w_hkey))
else:
raise oefmt(space.w_TypeError, "The object is not a PyHKEY object")
def CloseKey(space, w_hkey):
"""CloseKey(hkey) - Closes a previously opened registry key.
The hkey argument specifies a previously opened key.
Note that if the key is not closed using this method, it will be
closed when the hkey object is destroyed by Python."""
hkey = hkey_w(w_hkey, space)
if hkey:
ret = rwinreg.RegCloseKey(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegCloseKey')
if isinstance(w_hkey, W_HKEY):
space.interp_w(W_HKEY, w_hkey).hkey = rwin32.NULL_HANDLE
def FlushKey(space, w_hkey):
"""FlushKey(key) - Writes all the attributes of a key to the registry.
key is an already open key, or any one of the predefined HKEY_* constants.
It is not necessary to call RegFlushKey to change a key.
Registry changes are flushed to disk by the registry using its lazy flusher.
Registry changes are also flushed to disk at system shutdown.
Unlike CloseKey(), the FlushKey() method returns only when all the data has
been written to the registry.
An application should only call FlushKey() if it requires absolute certainty
that registry changes are on disk.
If you don't know whether a FlushKey() call is required, it probably isn't."""
hkey = hkey_w(w_hkey, space)
if hkey:
ret = rwinreg.RegFlushKey(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegFlushKey')
@unwrap_spec(subkey="unicode", filename="unicode")
def LoadKey(space, w_hkey, subkey, filename):
"""LoadKey(key, sub_key, file_name) - Creates a subkey under the specified key
and stores registration information from a specified file into that subkey.
key is an already open key, or any one of the predefined HKEY_* constants.
sub_key is a string that identifies the sub_key to load
file_name is the name of the file to load registry data from.
This file must have been created with the SaveKey() function.
Under the file allocation table (FAT) file system, the filename may not
have an extension.
A call to LoadKey() fails if the calling process does not have the
SE_RESTORE_PRIVILEGE privilege.
If key is a handle returned by ConnectRegistry(), then the path specified
in fileName is relative to the remote computer.
The docs imply key must be in the HKEY_USER or HKEY_LOCAL_MACHINE tree"""
# XXX should filename use space.fsencode_w?
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with rffi.scoped_unicode2wcharp(filename) as wide_filename:
c_filename = rffi.cast(rffi.CWCHARP, wide_filename)
ret = rwinreg.RegLoadKeyW(hkey, c_subkey, c_filename)
if ret != 0:
raiseWindowsError(space, ret, 'RegLoadKey')
@unwrap_spec(filename="unicode")
def SaveKey(space, w_hkey, filename):
"""
SaveKey(key, file_name) - Saves the specified key, and all its subkeys to the
specified file.
key is an already open key, or any one of the predefined HKEY_* constants.
file_name is the name of the file to save registry data to.
This file cannot already exist. If this filename includes an extension,
it cannot be used on file allocation table (FAT) file systems by the
LoadKey(), ReplaceKey() or RestoreKey() methods.
If key represents a key on a remote computer, the path described by
file_name is relative to the remote computer.
The caller of this method must possess the SeBackupPrivilege security
privilege. This function passes NULL for security_attributes to the API."""
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(filename) as wide_filename:
c_filename = rffi.cast(rffi.CWCHARP, wide_filename)
ret = rwinreg.RegSaveKeyW(hkey, c_filename, None)
if ret != 0:
raiseWindowsError(space, ret, 'RegSaveKey')
@unwrap_spec(typ=int)
def SetValue(space, w_hkey, w_subkey, typ, w_value):
"""
SetValue(key, sub_key, type, value) - Associates a value with a specified key.
key is an already open key, or any one of the predefined HKEY_* constants.
sub_key is a string that names the subkey with which the value is associated.
type is an integer that specifies the type of the data. Currently this
must be REG_SZ, meaning only strings are supported.
value is a string that specifies the new value.
If the key specified by the sub_key parameter does not exist, the SetValue
function creates it.
Value lengths are limited by available memory. Long values (more than
2048 bytes) should be stored as files with the filenames stored in
the configuration registry. This helps the registry perform efficiently.
The key identified by the key parameter must have been opened with
KEY_SET_VALUE access."""
if typ != rwinreg.REG_SZ:
raise oefmt(space.w_ValueError, "Type must be winreg.REG_SZ")
hkey = hkey_w(w_hkey, space)
state = space.fromcache(CodecState)
errh = state.encode_error_handler
utf8 = space.utf8_w(w_subkey)
subkeyW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
utf8 = space.utf8_w(w_value)
valueW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
valueL = space.len_w(w_value)
# Add an offset to remove the BOM from the native utf16 wstr
with rffi.scoped_nonmovingbuffer(subkeyW) as subkeyP0:
subkeyP = rffi.cast(rffi.CWCHARP, rffi.ptradd(subkeyP0, 2))
with rffi.scoped_nonmovingbuffer(valueW) as valueP0:
valueP = rffi.cast(rffi.CWCHARP, rffi.ptradd(valueP0, 2))
ret = rwinreg.RegSetValueW(hkey, subkeyP, rwinreg.REG_SZ,
valueP, valueL)
if ret != 0:
raiseWindowsError(space, ret, 'RegSetValue')
def QueryValue(space, w_hkey, w_subkey):
"""
string = QueryValue(key, sub_key) - retrieves the unnamed value for a key.
key is an already open key, or any one of the predefined HKEY_* constants.
sub_key is a string that holds the name of the subkey with which the value
is associated. If this parameter is None or empty, the function retrieves
the value set by the SetValue() method for the key identified by key.
Values in the registry have name, type, and data components. This method
retrieves the data for a key's first value that has a NULL name.
But the underlying API call doesn't return the type: Lame, DONT USE THIS!!!"""
hkey = hkey_w(w_hkey, space)
if space.is_w(w_subkey, space.w_None):
subkey = None
else:
subkey = space.utf8_w(w_subkey).decode('utf8')
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with lltype.scoped_alloc(rwin32.PLONG.TO, 1) as bufsize_p:
bufsize_p[0] = rffi.cast(rwin32.LONG, 0)
ret = rwinreg.RegQueryValueW(hkey, c_subkey, None, bufsize_p)
bufSize = intmask(bufsize_p[0])
if ret == rwinreg.ERROR_MORE_DATA:
bufSize = 256
elif ret != 0:
raiseWindowsError(space, ret, 'RegQueryValue')
while True:
buf = ByteBuffer(bufSize)
bufP = rffi.cast(rffi.CWCHARP, buf.get_raw_address())
ret = rwinreg.RegQueryValueW(hkey, c_subkey, bufP, bufsize_p)
if ret == rwinreg.ERROR_MORE_DATA:
# Resize and retry
bufSize *= 2
bufsize_p[0] = rffi.cast(rwin32.LONG, bufSize)
continue
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryValue')
utf8, lgt = wbuf_to_utf8(space, buf[0:intmask(bufsize_p[0])])
return space.newtext(utf8, lgt)
def convert_to_regdata(space, w_value, typ):
'''returns CCHARP, int'''
buf = None
if typ == rwinreg.REG_DWORD:
if space.is_none(w_value) or space.isinstance_w(w_value, space.w_int):
if space.is_none(w_value):
value = r_uint(0)
else:
value = space.c_uint_w(w_value)
buflen = rffi.sizeof(rwin32.DWORD)
buf1 = lltype.malloc(rffi.CArray(rwin32.DWORD), 1, flavor='raw')
buf1[0] = rffi.cast(rffi.UINT, value)
buf = rffi.cast(rffi.CCHARP, buf1)
elif typ == rwinreg.REG_QWORD:
if space.is_none(w_value) or space.isinstance_w(w_value, space.w_int):
if space.is_none(w_value):
value = r_ulonglong(0)
else:
value = space.r_ulonglong_w(w_value)
buflen = rffi.sizeof(rffi.ULONGLONG)
buf1 = lltype.malloc(rffi.CArray(rffi.ULONGLONG), 1, flavor='raw')
buf1[0] = rffi.cast(rffi.ULONGLONG, value)
buf = rffi.cast(rffi.CCHARP, buf1)
elif typ == rwinreg.REG_SZ or typ == rwinreg.REG_EXPAND_SZ:
if space.is_w(w_value, space.w_None):
buflen = 1
buf = lltype.malloc(rffi.CCHARP.TO, buflen, flavor='raw')
buf[0] = '\0'
else:
buf = rffi.unicode2wcharp(space.utf8_w(w_value).decode('utf8'))
buf = rffi.cast(rffi.CCHARP, buf)
buflen = (space.len_w(w_value) * 2) + 1
elif typ == rwinreg.REG_MULTI_SZ:
if space.is_w(w_value, space.w_None):
buflen = 1
buf = lltype.malloc(rffi.CCHARP.TO, buflen, flavor='raw')
buf[0] = '\0'
elif space.isinstance_w(w_value, space.w_list):
strings = []
buflen = 0
# unwrap strings and compute total size
w_iter = space.iter(w_value)
while True:
try:
w_item = space.next(w_iter)
item = space.utf8_w(w_item).decode('utf8')
strings.append(item)
buflen += 2 * (len(item) + 1)
except OperationError as e:
if not e.match(space, space.w_StopIteration):
raise # re-raise other app-level exceptions
break
buflen += 2
buf = lltype.malloc(rffi.CCHARP.TO, buflen, flavor='raw')
# Now copy data
buflen = 0
for string in strings:
with rffi.scoped_unicode2wcharp(string) as wchr:
c_str = rffi.cast(rffi.CCHARP, wchr)
for i in range(len(string) * 2):
buf[buflen + i] = c_str[i]
buflen += (len(string) + 1) * 2
buf[buflen - 1] = '\0'
buf[buflen - 2] = '\0'
buflen += 2
buf[buflen - 1] = '\0'
buf[buflen - 2] = '\0'
else: # REG_BINARY and ALL unknown data types.
if space.is_w(w_value, space.w_None):
buflen = 0
buf = lltype.nullptr(rffi.CCHARP.TO)
else:
try:
value = w_value.buffer_w(space, space.BUF_SIMPLE)
except BufferInterfaceNotFound:
raise oefmt(space.w_TypeError,
"Objects of type '%T' can not be used as binary "
"registry values", w_value)
else:
value = value.as_str()
buflen = len(value)
buf = rffi.str2charp(value)
if buf is not None:
return rffi.cast(rffi.CWCHARP, buf), buflen
raise oefmt(space.w_ValueError,
"Could not convert the data to the specified type")
def wbuf_to_utf8(space, wbuf):
state = space.fromcache(CodecState)
errh = state.decode_error_handler
utf8, lgt, pos = str_decode_utf_16(wbuf, 'surrogatepass', final=True,
errorhandler=errh)
if len(utf8) > 1 and utf8[len(utf8) - 1] == '\x00':
# trim off one trailing '\x00'
newlen = len(utf8) - 1
assert newlen >=0
utf8 = utf8[0:newlen]
lgt -= 1
return utf8, lgt
def convert_from_regdata(space, buf, buflen, typ):
if typ == rwinreg.REG_DWORD:
if not buflen:
return space.newint(0)
d = rffi.cast(rwin32.LPDWORD, buf.get_raw_address())[0]
return space.newint(d)
elif typ == rwinreg.REG_QWORD:
if not buflen:
return space.newint(0)
d = rffi.cast(rffi.ULONGLONGP, buf.get_raw_address())[0]
return space.newint(d)
elif typ == rwinreg.REG_SZ or typ == rwinreg.REG_EXPAND_SZ:
if not buflen:
return space.newtext('', 0)
even = (buflen // 2) * 2
utf8, lgt = wbuf_to_utf8(space, buf[0:even])
# bpo-25778, truncate at first NULL to match reg.exe behaviour.
i = 0
utf8len = len(utf8)
while i < utf8len:
if utf8[i] == '\x00':
utf8 = utf8[0:i]
lgt = check_utf8(utf8, True)
break
i += 1
w_s = space.newtext(utf8, lgt)
return w_s
elif typ == rwinreg.REG_MULTI_SZ:
if not buflen:
return space.newlist([])
even = (buflen // 2) * 2
utf8, lgt = wbuf_to_utf8(space, buf[0:even])
parts = rstring.split(utf8, '\0')
partslen = len(parts)
if partslen > 0 and parts[partslen-1] == '':
partslen -= 1
ret = []
i = 0
while i < partslen:
lgt = check_utf8(parts[i], True)
ret.append(space.newtext(parts[i], lgt))
i += 1
return space.newlist(ret)
else: # REG_BINARY and all other types
if buflen == 0:
return space.w_None
else:
return space.newbytes(buf[0:buflen])
@unwrap_spec(value_name="unicode", typ=int)
def SetValueEx(space, w_hkey, value_name, w_reserved, typ, w_value):
"""
SetValueEx(key, value_name, reserved, type, value) - Stores data in the value
field of an open registry key.
key is an already open key, or any one of the predefined HKEY_* constants.
value_name is a string containing the name of the value to set, or None
type is an integer that specifies the type of the data. This should be one of:
REG_BINARY -- Binary data in any form.
REG_DWORD -- A 32-bit number.
REG_DWORD_LITTLE_ENDIAN -- A 32-bit number in little-endian format.
REG_DWORD_BIG_ENDIAN -- A 32-bit number in big-endian format.
REG_QWORD -- A 64-bit number.
REG_QWORD_LITTLE_ENDIAN -- A 64-bit number in little-endian format.
REG_EXPAND_SZ -- A null-terminated string that contains unexpanded references
to environment variables (for example, %PATH%).
REG_LINK -- A Unicode symbolic link.
REG_MULTI_SZ -- An sequence of null-terminated strings, terminated by
two null characters. Note that Python handles this
termination automatically.
REG_NONE -- No defined value type.
REG_RESOURCE_LIST -- A device-driver resource list.
REG_SZ -- A null-terminated string.
reserved can be anything - zero is always passed to the API.
value is a string that specifies the new value.
This method can also set additional value and type information for the
specified key. The key identified by the key parameter must have been
opened with KEY_SET_VALUE access.
To open the key, use the CreateKeyEx() or OpenKeyEx() methods.
Value lengths are limited by available memory. Long values (more than
2048 bytes) should be stored as files with the filenames stored in
the configuration registry. This helps the registry perform efficiently."""
hkey = hkey_w(w_hkey, space)
buf, buflen = convert_to_regdata(space, w_value, typ)
try:
with rffi.scoped_unicode2wcharp(value_name) as wide_vn:
c_vn = rffi.cast(rffi.CWCHARP, wide_vn)
ret = rwinreg.RegSetValueExW(hkey, c_vn, 0, typ, buf, buflen)
finally:
if buf != lltype.nullptr(rffi.CWCHARP.TO):
lltype.free(buf, flavor='raw')
if ret != 0:
raiseWindowsError(space, ret, 'RegSetValueEx')
def QueryValueEx(space, w_hkey, w_subkey):
"""
value,type_id = QueryValueEx(key, value_name) - Retrieves the type and data for
a specified value name associated with an open registry key.
key is an already open key, or any one of the predefined HKEY_* constants.
value_name is a string indicating the value to query"""
hkey = hkey_w(w_hkey, space)
if space.is_w(w_subkey, space.w_None):
subkey = None
else:
subkey = space.utf8_w(w_subkey).decode('utf8')
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as dataSize:
ret = rwinreg.RegQueryValueExW(hkey, c_subkey, null_dword,
null_dword, None, dataSize)
bufSize = intmask(dataSize[0])
if ret == rwinreg.ERROR_MORE_DATA:
# Copy CPython behaviour, otherwise bufSize can be 0
bufSize = 256
elif ret != 0:
raiseWindowsError(space, ret, 'RegQueryValue')
while True:
dataBuf = ByteBuffer(bufSize)
dataBufP = rffi.cast(rffi.CWCHARP, dataBuf.get_raw_address())
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as retType:
ret = rwinreg.RegQueryValueExW(hkey, c_subkey, null_dword,
retType, dataBufP, dataSize)
if ret == rwinreg.ERROR_MORE_DATA:
# Resize and retry
bufSize *= 2
dataSize[0] = rffi.cast(rwin32.DWORD, bufSize)
continue
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryValueEx')
length = intmask(dataSize[0])
ret_type = intmask(retType[0])
return space.newtuple([
convert_from_regdata(space, dataBuf,
length, ret_type),
space.newint(intmask(ret_type)),
])
@unwrap_spec(subkey="unicode")
def CreateKey(space, w_hkey, subkey):
"""key = CreateKey(key, sub_key) - Creates or opens the specified key.
key is an already open key, or one of the predefined HKEY_* constants
sub_key is a string that names the key this method opens or creates.
If key is one of the predefined keys, sub_key may be None. In that case,
the handle returned is the same key handle passed in to the function.
If the key already exists, this function opens the existing key
The return value is the handle of the opened key.
If the function fails, an exception is raised."""
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegCreateKeyW(hkey, c_subkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'CreateKey')
return W_HKEY(space, rethkey[0])
@unwrap_spec(sub_key="unicode", reserved=int, access=r_uint)
def CreateKeyEx(space, w_key, sub_key, reserved=0, access=rwinreg.KEY_WRITE):
"""key = CreateKey(key, sub_key) - Creates or opens the specified key.
key is an already open key, or one of the predefined HKEY_* constants
sub_key is a string that names the key this method opens or creates.
If key is one of the predefined keys, sub_key may be None. In that case,
the handle returned is the same key handle passed in to the function.
If the key already exists, this function opens the existing key
The return value is the handle of the opened key.
If the function fails, an exception is raised."""
hkey = hkey_w(w_key, space)
with rffi.scoped_unicode2wcharp(sub_key) as wide_sub_key:
c_subkey = rffi.cast(rffi.CWCHARP, wide_sub_key)
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegCreateKeyExW(hkey, c_subkey, reserved, None, 0,
access, None, rethkey,
lltype.nullptr(rwin32.LPDWORD.TO))
if ret != 0:
raiseWindowsError(space, ret, 'CreateKeyEx')
return W_HKEY(space, rethkey[0])
@unwrap_spec(subkey="unicode")
def DeleteKey(space, w_hkey, subkey):
"""
DeleteKey(key, subkey) - Deletes the specified key.
key is an already open key, or any one of the predefined HKEY_* constants.
sub_key is a string that must be a subkey of the key identified by the key
parameter. This value must not be None, and the key may not have subkeys.
This method can not delete keys with subkeys.
If the method succeeds, the entire key, including all of its values,
is removed. If the method fails, an EnvironmentError exception is raised."""
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
ret = rwinreg.RegDeleteKeyW(hkey, c_subkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegDeleteKey')
@unwrap_spec(subkey="unicode")
def DeleteValue(space, w_hkey, subkey):
"""DeleteValue(key, value) - Removes a named value from a registry key.
key is an already open key, or any one of the predefined HKEY_* constants.
value is a string that identifies the value to remove."""
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
ret = rwinreg.RegDeleteValueW(hkey, c_subkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegDeleteValue')
@unwrap_spec(reserved=int, access=r_uint)
def OpenKey(space, w_key, w_sub_key, reserved=0, access=rwinreg.KEY_READ):
"""
key = OpenKey(key, sub_key, res = 0, sam = KEY_READ) - Opens the specified key.
key is an already open key, or any one of the predefined HKEY_* constants.
sub_key is a string that identifies the sub_key to open
res is a reserved integer, and must be zero. Default is zero.
sam is an integer that specifies an access mask that describes the desired
security access for the key. Default is KEY_READ
The result is a new handle to the specified key
If the function fails, an EnvironmentError exception is raised."""
hkey = hkey_w(w_key, space)
utf8 = space.utf8_w(w_sub_key)
state = space.fromcache(CodecState)
errh = state.encode_error_handler
subkeyW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
with rffi.scoped_nonmovingbuffer(subkeyW) as subkeyP0:
subkeyP = rffi.cast(rffi.CWCHARP, rffi.ptradd(subkeyP0, 2))
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegOpenKeyExW(hkey, subkeyP, reserved, access,
rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegOpenKeyEx')
return W_HKEY(space, rethkey[0])
@unwrap_spec(index=int)
def EnumValue(space, w_hkey, index):
"""tuple = EnumValue(key, index) - Enumerates values of an open registry key.
key is an already open key, or any one of the predefined HKEY_* constants.
index is an integer that identifies the index of the value to retrieve.
The function retrieves the name of one subkey each time it is called.
It is typically called repeatedly, until an EnvironmentError exception
is raised, indicating no more values.
The result is a tuple of 3 items:
value_name is a string that identifies the value.
value_data is an object that holds the value data, and whose type depends
on the underlying registry type.
data_type is an integer that identifies the type of the value data."""
hkey = hkey_w(w_hkey, space)
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as valueSize:
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as dataSize:
ret = rwinreg.RegQueryInfoKeyW(
hkey, None, null_dword, null_dword,
null_dword, null_dword, null_dword,
null_dword, valueSize, dataSize,
null_dword, lltype.nullptr(rwin32.PFILETIME.TO))
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryInfoKey')
# include null terminators
valueSize[0] = rffi.cast(rwin32.DWORD, intmask(valueSize[0]) + 1)
dataSize[0] = rffi.cast(rwin32.DWORD, intmask(dataSize[0]) + 1)
bufDataSize = intmask(dataSize[0])
bufValueSize = intmask(valueSize[0]) * 2
valueBuf = ByteBuffer(bufValueSize)
valueBufP = rffi.cast(rffi.CWCHARP, valueBuf.get_raw_address())
while True:
dataBuf = ByteBuffer(bufDataSize)
dataBufP = rffi.cast(rffi.CCHARP, dataBuf.get_raw_address())
with lltype.scoped_alloc(rwin32.LPDWORD.TO,
1) as retType:
ret = rwinreg.RegEnumValueW(
hkey, index, valueBufP, valueSize,
null_dword, retType, dataBufP, dataSize)
if ret == rwinreg.ERROR_MORE_DATA:
# Resize and retry. For dynamic keys, the value of
# dataSize[0] is useless (always 1) so do what CPython
# does, except they use 2 instead of 4
bufDataSize *= 4
dataSize[0] = rffi.cast(rwin32.DWORD,
bufDataSize)
valueSize[0] = rffi.cast(rwin32.DWORD,
bufValueSize)
continue
if ret != 0:
raiseWindowsError(space, ret, 'RegEnumValue')
length = intmask(dataSize[0])
vlen = (intmask(valueSize[0]) + 1) * 2
utf8v, lenv = wbuf_to_utf8(space, valueBuf[0:vlen])
ret_type = intmask(retType[0])
return space.newtuple([
space.newtext(utf8v, lenv),
convert_from_regdata(space, dataBuf,
length, ret_type),
space.newint(ret_type),
])
@unwrap_spec(index=int)
def EnumKey(space, w_hkey, index):
"""string = EnumKey(key, index) - Enumerates subkeys of an open registry key.
key is an already open key, or any one of the predefined HKEY_* constants.
index is an integer that identifies the index of the key to retrieve.
The function retrieves the name of one subkey each time it is called.
It is typically called repeatedly until an EnvironmentError exception is
raised, indicating no more values are available."""
hkey = hkey_w(w_hkey, space)
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
# The Windows docs claim that the max key name length is 255
# characters, plus a terminating nul character. However,
# empirical testing demonstrates that it is possible to
# create a 256 character key that is missing the terminating
# nul. RegEnumKeyEx requires a 257 character buffer to
# retrieve such a key name.
buf = ByteBuffer(257 * 2)
bufP = rffi.cast(rwin32.LPWSTR, buf.get_raw_address())
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as valueSize:
valueSize[0] = rffi.cast(rwin32.DWORD, 257) # includes NULL terminator
ret = rwinreg.RegEnumKeyExW(hkey, index, bufP, valueSize,
null_dword, None, null_dword,
lltype.nullptr(rwin32.PFILETIME.TO))
if ret != 0:
raiseWindowsError(space, ret, 'RegEnumKeyEx')
vlen = intmask(valueSize[0]) * 2
utf8, lgt = wbuf_to_utf8(space, buf[0:vlen])
return space.newtext(utf8, lgt)
def QueryInfoKey(space, w_hkey):
"""tuple = QueryInfoKey(key) - Returns information about a key.
key is an already open key, or any one of the predefined HKEY_* constants.
The result is a tuple of 3 items:
An integer that identifies the number of sub keys this key has.
An integer that identifies the number of values this key has.
A long integer that identifies when the key was last modified (if available)
as 100's of nanoseconds since Jan 1, 1600."""
hkey = hkey_w(w_hkey, space)
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as nSubKeys:
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as nValues:
with lltype.scoped_alloc(rwin32.PFILETIME.TO, 1) as ft:
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
ret = rwinreg.RegQueryInfoKeyW(
hkey, None, null_dword, null_dword,
nSubKeys, null_dword, null_dword,
nValues, null_dword, null_dword,
null_dword, ft)
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryInfoKey')
lgt = ((lltype.r_longlong(ft[0].c_dwHighDateTime) << 32) +
lltype.r_longlong(ft[0].c_dwLowDateTime))
return space.newtuple([space.newint(nSubKeys[0]),
space.newint(nValues[0]),
space.newint(lgt)])
def ConnectRegistry(space, w_machine, w_hkey):
"""
key = ConnectRegistry(computer_name, key)
Establishes a connection to a predefined registry handle on another computer.
computer_name is the name of the remote computer, of the form \\\\computername.
If None, the local computer is used.
key is the predefined handle to connect to.
The return value is the handle of the opened key.
If the function fails, an EnvironmentError exception is raised."""
hkey = hkey_w(w_hkey, space)
if space.is_none(w_machine):
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegConnectRegistryW(None, hkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegConnectRegistry')
return W_HKEY(space, rethkey[0])
else:
utf8 = space.utf8_w(w_machine)
state = space.fromcache(CodecState)
errh = state.encode_error_handler
machineW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
with rffi.scoped_nonmovingbuffer(machineW) as machineP0:
machineP = rffi.cast(rwin32.LPWSTR, rffi.ptradd(machineP0, 2))
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegConnectRegistryW(machineP, hkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegConnectRegistry')
return W_HKEY(space, rethkey[0])
def ExpandEnvironmentStrings(space, w_source):
"string = ExpandEnvironmentStrings(string) - Expand environment vars."
try:
source, source_ulen = space.utf8_len_w(w_source)
res, res_ulen = rwinreg.ExpandEnvironmentStrings(source, source_ulen)
return space.newutf8(res, res_ulen)
except WindowsError as e:
raise wrap_oserror(space, e)
class ReflectionFunction(object):
def __init__(self, name, stdcall_wrapper):
self.name = name
self.handle = lltype.nullptr(rffi.VOIDP.TO)
self.wrapper = stdcall_wrapper
def check(self):
if self.handle != lltype.nullptr(rffi.VOIDP.TO):
return True
from rpython.rlib.rdynload import GetModuleHandle, dlsym
lib = GetModuleHandle("advapi32.dll")
try:
handle = dlsym(lib, self.name)
except KeyError:
return False
self.handle = handle
return True
def call(self, *args):
assert self.handle != lltype.nullptr(rffi.VOIDP.TO)
return self.wrapper(self.handle, *args)
_RegDisableReflectionKey = ReflectionFunction(
"RegDisableReflectionKey", pypy_RegChangeReflectionKey)
_RegEnableReflectionKey = ReflectionFunction(
"RegEnableReflectionKey", pypy_RegChangeReflectionKey)
_RegQueryReflectionKey = ReflectionFunction(
"RegQueryReflectionKey", pypy_RegQueryReflectionKey)
_RegDeleteKeyExW = ReflectionFunction("RegDeleteKeyExW", pypy_RegDeleteKeyExW)
def DisableReflectionKey(space, w_key):
"""Disables registry reflection for 32-bit processes running on a 64-bit
Operating System. Will generally raise NotImplemented if executed on
a 32-bit Operating System.
If the key is not on the reflection list, the function succeeds but has no
effect. Disabling reflection for a key does not affect reflection of any
subkeys."""
if not _RegDisableReflectionKey.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
ret = _RegDisableReflectionKey.call(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegDisableReflectionKey')
def EnableReflectionKey(space, w_key):
"""Restores registry reflection for the specified disabled key.
Will generally raise NotImplemented if executed on a 32-bit Operating
System. Restoring reflection for a key does not affect reflection of any
subkeys."""
if not _RegEnableReflectionKey.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
ret = _RegEnableReflectionKey.call(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegEnableReflectionKey')
def QueryReflectionKey(space, w_key):
"""bool = QueryReflectionKey(hkey) - Determines the reflection state for
the specified key. Will generally raise NotImplemented if executed on a
32-bit Operating System."""
if not _RegQueryReflectionKey.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
with lltype.scoped_alloc(rwin32.LPBOOL.TO, 1) as isDisabled:
ret = _RegQueryReflectionKey.call(hkey, isDisabled)
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryReflectionKey')
return space.newbool(intmask(isDisabled[0]) != 0)
@unwrap_spec(sub_key="unicode", access=r_uint, reserved=int)
def DeleteKeyEx(space, w_key, sub_key, access=rwinreg.KEY_WOW64_64KEY, reserved=0):
"""DeleteKeyEx(key, sub_key, sam, res) - Deletes the specified key.
key is an already open key, or any one of the predefined HKEY_* constants.
sub_key is a string that must be a subkey of the key identified by the key
parameter.
res is a reserved integer, and must be zero. Default is zero.
sam is an integer that specifies an access mask that describes the desired
This value must not be None, and the key may not have subkeys.
This method can not delete keys with subkeys.
If the method succeeds, the entire key, including all of its values,
is removed. If the method fails, a WindowsError exception is raised.
On unsupported Windows versions, NotImplementedError is raised."""
if not _RegDeleteKeyExW.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
with rffi.scoped_unicode2wcharp(sub_key) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
ret = _RegDeleteKeyExW.call(hkey, c_subkey, access, reserved)
if ret != 0:
raiseWindowsError(space, ret, 'RegDeleteKeyEx')
| 41.527505 | 91 | 0.649415 | from rpython.rtyper.lltypesystem import rffi, lltype
from rpython.rlib import rwinreg, rwin32, rstring
from rpython.rlib.rarithmetic import r_uint, r_ulonglong, intmask
from rpython.rlib.buffer import ByteBuffer
from rpython.rlib.rutf8 import check_utf8
from pypy.interpreter.baseobjspace import W_Root, BufferInterfaceNotFound
from pypy.interpreter.gateway import interp2app, unwrap_spec
from pypy.interpreter.typedef import TypeDef, GetSetProperty
from pypy.interpreter.error import OperationError, oefmt, wrap_oserror
from pypy.interpreter.unicodehelper import (
str_decode_utf_16, utf8_encode_utf_16)
from pypy.module._codecs.interp_codecs import CodecState
from rpython.translator.tool.cbuild import ExternalCompilationInfo
eci = ExternalCompilationInfo(
includes=['windows.h'],
post_include_bits=[
"RPY_EXTERN LONG\n"
"pypy_RegChangeReflectionKey(FARPROC address, HKEY key);\n"
"RPY_EXTERN LONG\n"
"pypy_RegQueryReflectionKey(FARPROC address, HKEY key, LPBOOL isDisabled);\n"
"RPY_EXTERN LONG\n"
"pypy_RegDeleteKeyExW(FARPROC address, HKEY key, LPCWSTR subkey,\n"
" REGSAM sam, DWORD reserved);\n"
],
separate_module_sources=['''
LONG
pypy_RegChangeReflectionKey(FARPROC address, HKEY key) {
LONG (WINAPI *func)(HKEY);
*(FARPROC*)&func = address;
return func(key);
}
LONG
pypy_RegQueryReflectionKey(FARPROC address, HKEY key, LPBOOL isDisabled) {
LONG (WINAPI *func)(HKEY, LPBOOL);
*(FARPROC*)&func = address;
return func(key, isDisabled);
}
LONG
pypy_RegDeleteKeyExW(FARPROC address, HKEY key, LPCWSTR subkey,
REGSAM sam, DWORD reserved) {
LONG (WINAPI *func)(HKEY, LPCWSTR, REGSAM, DWORD);
*(FARPROC*)&func = address;
return func(key, subkey, sam, reserved);
}
'''],
)
pypy_RegChangeReflectionKey = rffi.llexternal(
'pypy_RegChangeReflectionKey',
[rffi.VOIDP, rwinreg.HKEY],
rffi.LONG, compilation_info=eci)
pypy_RegQueryReflectionKey = rffi.llexternal(
'pypy_RegQueryReflectionKey',
[rffi.VOIDP, rwinreg.HKEY, rwin32.LPBOOL],
rffi.LONG, compilation_info=eci)
pypy_RegDeleteKeyExW = rffi.llexternal(
'pypy_RegDeleteKeyExW',
[rffi.VOIDP, rwinreg.HKEY, rffi.CWCHARP, rwinreg.REGSAM, rwin32.DWORD],
rffi.LONG, compilation_info=eci)
def raiseWindowsError(space, errcode, context):
message = rwin32.FormatErrorW(errcode)
w_errcode = space.newint(errcode)
w_t = space.newtuple([w_errcode, space.newtext(*message),
space.w_None, w_errcode])
raise OperationError(space.w_WindowsError, w_t)
class W_HKEY(W_Root):
def __init__(self, space, hkey):
self.hkey = hkey
self.space = space
self.register_finalizer(space)
def _finalize_(self):
try:
self.Close(self.space)
except:
pass
def as_int(self):
return rffi.cast(rffi.SIZE_T, self.hkey)
def descr_bool(self, space):
return space.newbool(self.as_int() != 0)
def descr_handle_get(self, space):
return space.newint(self.as_int())
def descr_repr(self, space):
return space.newtext("<PyHKEY:0x%x>" % (self.as_int(),))
def descr_int(self, space):
return space.newint(self.as_int())
def descr__enter__(self, space):
return self
def descr__exit__(self, space, __args__):
CloseKey(space, self)
def Close(self, space):
CloseKey(space, self)
def Detach(self, space):
key = self.as_int()
self.hkey = rwin32.NULL_HANDLE
return space.newint(key)
@unwrap_spec(key=int)
def new_HKEY(space, w_subtype, key):
hkey = rffi.cast(rwinreg.HKEY, key)
return W_HKEY(space, hkey)
descr_HKEY_new = interp2app(new_HKEY)
W_HKEY.typedef = TypeDef(
"winreg.HKEYType",
__doc__="""\
PyHKEY Object - A Python object, representing a win32 registry key.
This object wraps a Windows HKEY object, automatically closing it when
the object is destroyed. To guarantee cleanup, you can call either
the Close() method on the PyHKEY, or the CloseKey() method.
All functions which accept a handle object also accept an integer -
however, use of the handle object is encouraged.
Functions:
Close() - Closes the underlying handle.
Detach() - Returns the integer Win32 handle, detaching it from the object
Properties:
handle - The integer Win32 handle.
Operations:
__bool__ - Handles with an open object return true, otherwise false.
__int__ - Converting a handle to an integer returns the Win32 handle.
__cmp__ - Handle objects are compared using the handle value.""",
__new__=descr_HKEY_new,
__repr__=interp2app(W_HKEY.descr_repr),
__int__=interp2app(W_HKEY.descr_int),
__bool__=interp2app(W_HKEY.descr_bool),
__enter__=interp2app(W_HKEY.descr__enter__),
__exit__=interp2app(W_HKEY.descr__exit__),
handle=GetSetProperty(W_HKEY.descr_handle_get),
Close=interp2app(W_HKEY.Close),
Detach=interp2app(W_HKEY.Detach),
)
def hkey_w(w_hkey, space):
if space.is_w(w_hkey, space.w_None):
raise oefmt(space.w_TypeError,
"None is not a valid HKEY in this context")
elif isinstance(w_hkey, W_HKEY):
return w_hkey.hkey
elif space.isinstance_w(w_hkey, space.w_int):
if space.is_true(space.lt(w_hkey, space.newint(0))):
return rffi.cast(rwinreg.HKEY, space.int_w(w_hkey))
return rffi.cast(rwinreg.HKEY, space.uint_w(w_hkey))
else:
raise oefmt(space.w_TypeError, "The object is not a PyHKEY object")
def CloseKey(space, w_hkey):
hkey = hkey_w(w_hkey, space)
if hkey:
ret = rwinreg.RegCloseKey(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegCloseKey')
if isinstance(w_hkey, W_HKEY):
space.interp_w(W_HKEY, w_hkey).hkey = rwin32.NULL_HANDLE
def FlushKey(space, w_hkey):
hkey = hkey_w(w_hkey, space)
if hkey:
ret = rwinreg.RegFlushKey(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegFlushKey')
@unwrap_spec(subkey="unicode", filename="unicode")
def LoadKey(space, w_hkey, subkey, filename):
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with rffi.scoped_unicode2wcharp(filename) as wide_filename:
c_filename = rffi.cast(rffi.CWCHARP, wide_filename)
ret = rwinreg.RegLoadKeyW(hkey, c_subkey, c_filename)
if ret != 0:
raiseWindowsError(space, ret, 'RegLoadKey')
@unwrap_spec(filename="unicode")
def SaveKey(space, w_hkey, filename):
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(filename) as wide_filename:
c_filename = rffi.cast(rffi.CWCHARP, wide_filename)
ret = rwinreg.RegSaveKeyW(hkey, c_filename, None)
if ret != 0:
raiseWindowsError(space, ret, 'RegSaveKey')
@unwrap_spec(typ=int)
def SetValue(space, w_hkey, w_subkey, typ, w_value):
if typ != rwinreg.REG_SZ:
raise oefmt(space.w_ValueError, "Type must be winreg.REG_SZ")
hkey = hkey_w(w_hkey, space)
state = space.fromcache(CodecState)
errh = state.encode_error_handler
utf8 = space.utf8_w(w_subkey)
subkeyW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
utf8 = space.utf8_w(w_value)
valueW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
valueL = space.len_w(w_value)
with rffi.scoped_nonmovingbuffer(subkeyW) as subkeyP0:
subkeyP = rffi.cast(rffi.CWCHARP, rffi.ptradd(subkeyP0, 2))
with rffi.scoped_nonmovingbuffer(valueW) as valueP0:
valueP = rffi.cast(rffi.CWCHARP, rffi.ptradd(valueP0, 2))
ret = rwinreg.RegSetValueW(hkey, subkeyP, rwinreg.REG_SZ,
valueP, valueL)
if ret != 0:
raiseWindowsError(space, ret, 'RegSetValue')
def QueryValue(space, w_hkey, w_subkey):
hkey = hkey_w(w_hkey, space)
if space.is_w(w_subkey, space.w_None):
subkey = None
else:
subkey = space.utf8_w(w_subkey).decode('utf8')
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with lltype.scoped_alloc(rwin32.PLONG.TO, 1) as bufsize_p:
bufsize_p[0] = rffi.cast(rwin32.LONG, 0)
ret = rwinreg.RegQueryValueW(hkey, c_subkey, None, bufsize_p)
bufSize = intmask(bufsize_p[0])
if ret == rwinreg.ERROR_MORE_DATA:
bufSize = 256
elif ret != 0:
raiseWindowsError(space, ret, 'RegQueryValue')
while True:
buf = ByteBuffer(bufSize)
bufP = rffi.cast(rffi.CWCHARP, buf.get_raw_address())
ret = rwinreg.RegQueryValueW(hkey, c_subkey, bufP, bufsize_p)
if ret == rwinreg.ERROR_MORE_DATA:
bufSize *= 2
bufsize_p[0] = rffi.cast(rwin32.LONG, bufSize)
continue
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryValue')
utf8, lgt = wbuf_to_utf8(space, buf[0:intmask(bufsize_p[0])])
return space.newtext(utf8, lgt)
def convert_to_regdata(space, w_value, typ):
buf = None
if typ == rwinreg.REG_DWORD:
if space.is_none(w_value) or space.isinstance_w(w_value, space.w_int):
if space.is_none(w_value):
value = r_uint(0)
else:
value = space.c_uint_w(w_value)
buflen = rffi.sizeof(rwin32.DWORD)
buf1 = lltype.malloc(rffi.CArray(rwin32.DWORD), 1, flavor='raw')
buf1[0] = rffi.cast(rffi.UINT, value)
buf = rffi.cast(rffi.CCHARP, buf1)
elif typ == rwinreg.REG_QWORD:
if space.is_none(w_value) or space.isinstance_w(w_value, space.w_int):
if space.is_none(w_value):
value = r_ulonglong(0)
else:
value = space.r_ulonglong_w(w_value)
buflen = rffi.sizeof(rffi.ULONGLONG)
buf1 = lltype.malloc(rffi.CArray(rffi.ULONGLONG), 1, flavor='raw')
buf1[0] = rffi.cast(rffi.ULONGLONG, value)
buf = rffi.cast(rffi.CCHARP, buf1)
elif typ == rwinreg.REG_SZ or typ == rwinreg.REG_EXPAND_SZ:
if space.is_w(w_value, space.w_None):
buflen = 1
buf = lltype.malloc(rffi.CCHARP.TO, buflen, flavor='raw')
buf[0] = '\0'
else:
buf = rffi.unicode2wcharp(space.utf8_w(w_value).decode('utf8'))
buf = rffi.cast(rffi.CCHARP, buf)
buflen = (space.len_w(w_value) * 2) + 1
elif typ == rwinreg.REG_MULTI_SZ:
if space.is_w(w_value, space.w_None):
buflen = 1
buf = lltype.malloc(rffi.CCHARP.TO, buflen, flavor='raw')
buf[0] = '\0'
elif space.isinstance_w(w_value, space.w_list):
strings = []
buflen = 0
w_iter = space.iter(w_value)
while True:
try:
w_item = space.next(w_iter)
item = space.utf8_w(w_item).decode('utf8')
strings.append(item)
buflen += 2 * (len(item) + 1)
except OperationError as e:
if not e.match(space, space.w_StopIteration):
raise
break
buflen += 2
buf = lltype.malloc(rffi.CCHARP.TO, buflen, flavor='raw')
buflen = 0
for string in strings:
with rffi.scoped_unicode2wcharp(string) as wchr:
c_str = rffi.cast(rffi.CCHARP, wchr)
for i in range(len(string) * 2):
buf[buflen + i] = c_str[i]
buflen += (len(string) + 1) * 2
buf[buflen - 1] = '\0'
buf[buflen - 2] = '\0'
buflen += 2
buf[buflen - 1] = '\0'
buf[buflen - 2] = '\0'
else:
if space.is_w(w_value, space.w_None):
buflen = 0
buf = lltype.nullptr(rffi.CCHARP.TO)
else:
try:
value = w_value.buffer_w(space, space.BUF_SIMPLE)
except BufferInterfaceNotFound:
raise oefmt(space.w_TypeError,
"Objects of type '%T' can not be used as binary "
"registry values", w_value)
else:
value = value.as_str()
buflen = len(value)
buf = rffi.str2charp(value)
if buf is not None:
return rffi.cast(rffi.CWCHARP, buf), buflen
raise oefmt(space.w_ValueError,
"Could not convert the data to the specified type")
def wbuf_to_utf8(space, wbuf):
state = space.fromcache(CodecState)
errh = state.decode_error_handler
utf8, lgt, pos = str_decode_utf_16(wbuf, 'surrogatepass', final=True,
errorhandler=errh)
if len(utf8) > 1 and utf8[len(utf8) - 1] == '\x00':
newlen = len(utf8) - 1
assert newlen >=0
utf8 = utf8[0:newlen]
lgt -= 1
return utf8, lgt
def convert_from_regdata(space, buf, buflen, typ):
if typ == rwinreg.REG_DWORD:
if not buflen:
return space.newint(0)
d = rffi.cast(rwin32.LPDWORD, buf.get_raw_address())[0]
return space.newint(d)
elif typ == rwinreg.REG_QWORD:
if not buflen:
return space.newint(0)
d = rffi.cast(rffi.ULONGLONGP, buf.get_raw_address())[0]
return space.newint(d)
elif typ == rwinreg.REG_SZ or typ == rwinreg.REG_EXPAND_SZ:
if not buflen:
return space.newtext('', 0)
even = (buflen // 2) * 2
utf8, lgt = wbuf_to_utf8(space, buf[0:even])
i = 0
utf8len = len(utf8)
while i < utf8len:
if utf8[i] == '\x00':
utf8 = utf8[0:i]
lgt = check_utf8(utf8, True)
break
i += 1
w_s = space.newtext(utf8, lgt)
return w_s
elif typ == rwinreg.REG_MULTI_SZ:
if not buflen:
return space.newlist([])
even = (buflen // 2) * 2
utf8, lgt = wbuf_to_utf8(space, buf[0:even])
parts = rstring.split(utf8, '\0')
partslen = len(parts)
if partslen > 0 and parts[partslen-1] == '':
partslen -= 1
ret = []
i = 0
while i < partslen:
lgt = check_utf8(parts[i], True)
ret.append(space.newtext(parts[i], lgt))
i += 1
return space.newlist(ret)
else:
if buflen == 0:
return space.w_None
else:
return space.newbytes(buf[0:buflen])
@unwrap_spec(value_name="unicode", typ=int)
def SetValueEx(space, w_hkey, value_name, w_reserved, typ, w_value):
hkey = hkey_w(w_hkey, space)
buf, buflen = convert_to_regdata(space, w_value, typ)
try:
with rffi.scoped_unicode2wcharp(value_name) as wide_vn:
c_vn = rffi.cast(rffi.CWCHARP, wide_vn)
ret = rwinreg.RegSetValueExW(hkey, c_vn, 0, typ, buf, buflen)
finally:
if buf != lltype.nullptr(rffi.CWCHARP.TO):
lltype.free(buf, flavor='raw')
if ret != 0:
raiseWindowsError(space, ret, 'RegSetValueEx')
def QueryValueEx(space, w_hkey, w_subkey):
hkey = hkey_w(w_hkey, space)
if space.is_w(w_subkey, space.w_None):
subkey = None
else:
subkey = space.utf8_w(w_subkey).decode('utf8')
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as dataSize:
ret = rwinreg.RegQueryValueExW(hkey, c_subkey, null_dword,
null_dword, None, dataSize)
bufSize = intmask(dataSize[0])
if ret == rwinreg.ERROR_MORE_DATA:
bufSize = 256
elif ret != 0:
raiseWindowsError(space, ret, 'RegQueryValue')
while True:
dataBuf = ByteBuffer(bufSize)
dataBufP = rffi.cast(rffi.CWCHARP, dataBuf.get_raw_address())
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as retType:
ret = rwinreg.RegQueryValueExW(hkey, c_subkey, null_dword,
retType, dataBufP, dataSize)
if ret == rwinreg.ERROR_MORE_DATA:
bufSize *= 2
dataSize[0] = rffi.cast(rwin32.DWORD, bufSize)
continue
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryValueEx')
length = intmask(dataSize[0])
ret_type = intmask(retType[0])
return space.newtuple([
convert_from_regdata(space, dataBuf,
length, ret_type),
space.newint(intmask(ret_type)),
])
@unwrap_spec(subkey="unicode")
def CreateKey(space, w_hkey, subkey):
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegCreateKeyW(hkey, c_subkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'CreateKey')
return W_HKEY(space, rethkey[0])
@unwrap_spec(sub_key="unicode", reserved=int, access=r_uint)
def CreateKeyEx(space, w_key, sub_key, reserved=0, access=rwinreg.KEY_WRITE):
hkey = hkey_w(w_key, space)
with rffi.scoped_unicode2wcharp(sub_key) as wide_sub_key:
c_subkey = rffi.cast(rffi.CWCHARP, wide_sub_key)
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegCreateKeyExW(hkey, c_subkey, reserved, None, 0,
access, None, rethkey,
lltype.nullptr(rwin32.LPDWORD.TO))
if ret != 0:
raiseWindowsError(space, ret, 'CreateKeyEx')
return W_HKEY(space, rethkey[0])
@unwrap_spec(subkey="unicode")
def DeleteKey(space, w_hkey, subkey):
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
ret = rwinreg.RegDeleteKeyW(hkey, c_subkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegDeleteKey')
@unwrap_spec(subkey="unicode")
def DeleteValue(space, w_hkey, subkey):
hkey = hkey_w(w_hkey, space)
with rffi.scoped_unicode2wcharp(subkey) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
ret = rwinreg.RegDeleteValueW(hkey, c_subkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegDeleteValue')
@unwrap_spec(reserved=int, access=r_uint)
def OpenKey(space, w_key, w_sub_key, reserved=0, access=rwinreg.KEY_READ):
hkey = hkey_w(w_key, space)
utf8 = space.utf8_w(w_sub_key)
state = space.fromcache(CodecState)
errh = state.encode_error_handler
subkeyW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
with rffi.scoped_nonmovingbuffer(subkeyW) as subkeyP0:
subkeyP = rffi.cast(rffi.CWCHARP, rffi.ptradd(subkeyP0, 2))
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegOpenKeyExW(hkey, subkeyP, reserved, access,
rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegOpenKeyEx')
return W_HKEY(space, rethkey[0])
@unwrap_spec(index=int)
def EnumValue(space, w_hkey, index):
hkey = hkey_w(w_hkey, space)
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as valueSize:
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as dataSize:
ret = rwinreg.RegQueryInfoKeyW(
hkey, None, null_dword, null_dword,
null_dword, null_dword, null_dword,
null_dword, valueSize, dataSize,
null_dword, lltype.nullptr(rwin32.PFILETIME.TO))
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryInfoKey')
valueSize[0] = rffi.cast(rwin32.DWORD, intmask(valueSize[0]) + 1)
dataSize[0] = rffi.cast(rwin32.DWORD, intmask(dataSize[0]) + 1)
bufDataSize = intmask(dataSize[0])
bufValueSize = intmask(valueSize[0]) * 2
valueBuf = ByteBuffer(bufValueSize)
valueBufP = rffi.cast(rffi.CWCHARP, valueBuf.get_raw_address())
while True:
dataBuf = ByteBuffer(bufDataSize)
dataBufP = rffi.cast(rffi.CCHARP, dataBuf.get_raw_address())
with lltype.scoped_alloc(rwin32.LPDWORD.TO,
1) as retType:
ret = rwinreg.RegEnumValueW(
hkey, index, valueBufP, valueSize,
null_dword, retType, dataBufP, dataSize)
if ret == rwinreg.ERROR_MORE_DATA:
bufDataSize *= 4
dataSize[0] = rffi.cast(rwin32.DWORD,
bufDataSize)
valueSize[0] = rffi.cast(rwin32.DWORD,
bufValueSize)
continue
if ret != 0:
raiseWindowsError(space, ret, 'RegEnumValue')
length = intmask(dataSize[0])
vlen = (intmask(valueSize[0]) + 1) * 2
utf8v, lenv = wbuf_to_utf8(space, valueBuf[0:vlen])
ret_type = intmask(retType[0])
return space.newtuple([
space.newtext(utf8v, lenv),
convert_from_regdata(space, dataBuf,
length, ret_type),
space.newint(ret_type),
])
@unwrap_spec(index=int)
def EnumKey(space, w_hkey, index):
hkey = hkey_w(w_hkey, space)
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
buf = ByteBuffer(257 * 2)
bufP = rffi.cast(rwin32.LPWSTR, buf.get_raw_address())
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as valueSize:
valueSize[0] = rffi.cast(rwin32.DWORD, 257)
ret = rwinreg.RegEnumKeyExW(hkey, index, bufP, valueSize,
null_dword, None, null_dword,
lltype.nullptr(rwin32.PFILETIME.TO))
if ret != 0:
raiseWindowsError(space, ret, 'RegEnumKeyEx')
vlen = intmask(valueSize[0]) * 2
utf8, lgt = wbuf_to_utf8(space, buf[0:vlen])
return space.newtext(utf8, lgt)
def QueryInfoKey(space, w_hkey):
hkey = hkey_w(w_hkey, space)
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as nSubKeys:
with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as nValues:
with lltype.scoped_alloc(rwin32.PFILETIME.TO, 1) as ft:
null_dword = lltype.nullptr(rwin32.LPDWORD.TO)
ret = rwinreg.RegQueryInfoKeyW(
hkey, None, null_dword, null_dword,
nSubKeys, null_dword, null_dword,
nValues, null_dword, null_dword,
null_dword, ft)
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryInfoKey')
lgt = ((lltype.r_longlong(ft[0].c_dwHighDateTime) << 32) +
lltype.r_longlong(ft[0].c_dwLowDateTime))
return space.newtuple([space.newint(nSubKeys[0]),
space.newint(nValues[0]),
space.newint(lgt)])
def ConnectRegistry(space, w_machine, w_hkey):
hkey = hkey_w(w_hkey, space)
if space.is_none(w_machine):
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegConnectRegistryW(None, hkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegConnectRegistry')
return W_HKEY(space, rethkey[0])
else:
utf8 = space.utf8_w(w_machine)
state = space.fromcache(CodecState)
errh = state.encode_error_handler
machineW = utf8_encode_utf_16(utf8 + '\x00', 'strict', errh, allow_surrogates=True)
with rffi.scoped_nonmovingbuffer(machineW) as machineP0:
machineP = rffi.cast(rwin32.LPWSTR, rffi.ptradd(machineP0, 2))
with lltype.scoped_alloc(rwinreg.PHKEY.TO, 1) as rethkey:
ret = rwinreg.RegConnectRegistryW(machineP, hkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegConnectRegistry')
return W_HKEY(space, rethkey[0])
def ExpandEnvironmentStrings(space, w_source):
try:
source, source_ulen = space.utf8_len_w(w_source)
res, res_ulen = rwinreg.ExpandEnvironmentStrings(source, source_ulen)
return space.newutf8(res, res_ulen)
except WindowsError as e:
raise wrap_oserror(space, e)
class ReflectionFunction(object):
def __init__(self, name, stdcall_wrapper):
self.name = name
self.handle = lltype.nullptr(rffi.VOIDP.TO)
self.wrapper = stdcall_wrapper
def check(self):
if self.handle != lltype.nullptr(rffi.VOIDP.TO):
return True
from rpython.rlib.rdynload import GetModuleHandle, dlsym
lib = GetModuleHandle("advapi32.dll")
try:
handle = dlsym(lib, self.name)
except KeyError:
return False
self.handle = handle
return True
def call(self, *args):
assert self.handle != lltype.nullptr(rffi.VOIDP.TO)
return self.wrapper(self.handle, *args)
_RegDisableReflectionKey = ReflectionFunction(
"RegDisableReflectionKey", pypy_RegChangeReflectionKey)
_RegEnableReflectionKey = ReflectionFunction(
"RegEnableReflectionKey", pypy_RegChangeReflectionKey)
_RegQueryReflectionKey = ReflectionFunction(
"RegQueryReflectionKey", pypy_RegQueryReflectionKey)
_RegDeleteKeyExW = ReflectionFunction("RegDeleteKeyExW", pypy_RegDeleteKeyExW)
def DisableReflectionKey(space, w_key):
if not _RegDisableReflectionKey.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
ret = _RegDisableReflectionKey.call(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegDisableReflectionKey')
def EnableReflectionKey(space, w_key):
if not _RegEnableReflectionKey.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
ret = _RegEnableReflectionKey.call(hkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegEnableReflectionKey')
def QueryReflectionKey(space, w_key):
if not _RegQueryReflectionKey.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
with lltype.scoped_alloc(rwin32.LPBOOL.TO, 1) as isDisabled:
ret = _RegQueryReflectionKey.call(hkey, isDisabled)
if ret != 0:
raiseWindowsError(space, ret, 'RegQueryReflectionKey')
return space.newbool(intmask(isDisabled[0]) != 0)
@unwrap_spec(sub_key="unicode", access=r_uint, reserved=int)
def DeleteKeyEx(space, w_key, sub_key, access=rwinreg.KEY_WOW64_64KEY, reserved=0):
if not _RegDeleteKeyExW.check():
raise oefmt(space.w_NotImplementedError,
"not implemented on this platform")
else:
hkey = hkey_w(w_key, space)
with rffi.scoped_unicode2wcharp(sub_key) as wide_subkey:
c_subkey = rffi.cast(rffi.CWCHARP, wide_subkey)
ret = _RegDeleteKeyExW.call(hkey, c_subkey, access, reserved)
if ret != 0:
raiseWindowsError(space, ret, 'RegDeleteKeyEx')
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.