repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
plotly/dash | dash/_validate.py | 1 | 18551 | from collections.abc import MutableSequence
import re
from textwrap import dedent
from keyword import iskeyword
import flask
from ._grouping import grouping_len, map_grouping
from .development.base_component import Component
from . import exceptions
from ._utils import patch_collections_abc, stringify_id, to_json, coerce_to_list
from .exceptions import PageError
def validate_callback(outputs, inputs, state, extra_args, types):
Input, Output, State = types
if extra_args:
if not isinstance(extra_args[0], (Output, Input, State)):
raise exceptions.IncorrectTypeException(
dedent(
f"""
Callback arguments must be `Output`, `Input`, or `State` objects,
optionally wrapped in a list or tuple. We found (possibly after
unwrapping a list or tuple):
{repr(extra_args[0])}
"""
)
)
raise exceptions.IncorrectTypeException(
dedent(
f"""
In a callback definition, you must provide all Outputs first,
then all Inputs, then all States. After this item:
{(outputs + inputs + state)[-1]!r}
we found this item next:
{extra_args[0]!r}
"""
)
)
for args in [outputs, inputs, state]:
for arg in args:
validate_callback_arg(arg)
def validate_callback_arg(arg):
if not isinstance(getattr(arg, "component_property", None), str):
raise exceptions.IncorrectTypeException(
dedent(
f"""
component_property must be a string, found {arg.component_property!r}
"""
)
)
if hasattr(arg, "component_event"):
raise exceptions.NonExistentEventException(
"""
Events have been removed.
Use the associated property instead.
"""
)
if isinstance(arg.component_id, dict):
validate_id_dict(arg)
elif isinstance(arg.component_id, str):
validate_id_string(arg)
else:
raise exceptions.IncorrectTypeException(
dedent(
f"""
component_id must be a string or dict, found {arg.component_id!r}
"""
)
)
def validate_id_dict(arg):
arg_id = arg.component_id
for k in arg_id:
# Need to keep key type validation on the Python side, since
# non-string keys will be converted to strings in json.dumps and may
# cause unwanted collisions
if not isinstance(k, str):
raise exceptions.IncorrectTypeException(
dedent(
f"""
Wildcard ID keys must be non-empty strings,
found {k!r} in id {arg_id!r}
"""
)
)
def validate_id_string(arg):
arg_id = arg.component_id
invalid_chars = ".{"
invalid_found = [x for x in invalid_chars if x in arg_id]
if invalid_found:
raise exceptions.InvalidComponentIdError(
f"""
The element `{arg_id}` contains `{"`, `".join(invalid_found)}` in its ID.
Characters `{"`, `".join(invalid_chars)}` are not allowed in IDs.
"""
)
def validate_output_spec(output, output_spec, Output):
"""
This validation is for security and internal debugging, not for users,
so the messages are not intended to be clear.
`output` comes from the callback definition, `output_spec` from the request.
"""
if not isinstance(output, (list, tuple)):
output, output_spec = [output], [output_spec]
elif len(output) != len(output_spec):
raise exceptions.CallbackException("Wrong length output_spec")
for outi, speci in zip(output, output_spec):
speci_list = speci if isinstance(speci, (list, tuple)) else [speci]
for specij in speci_list:
if not Output(specij["id"], specij["property"]) == outi:
raise exceptions.CallbackException(
"Output does not match callback definition"
)
def validate_and_group_input_args(flat_args, arg_index_grouping):
if grouping_len(arg_index_grouping) != len(flat_args):
raise exceptions.CallbackException("Inputs do not match callback definition")
args_grouping = map_grouping(lambda ind: flat_args[ind], arg_index_grouping)
if isinstance(arg_index_grouping, dict):
func_args = []
func_kwargs = args_grouping
for key in func_kwargs:
if not key.isidentifier():
raise exceptions.CallbackException(
f"{key} is not a valid Python variable name"
)
elif isinstance(arg_index_grouping, (tuple, list)):
func_args = list(args_grouping)
func_kwargs = {}
else:
# Scalar input
func_args = [args_grouping]
func_kwargs = {}
return func_args, func_kwargs
def validate_multi_return(outputs_list, output_value, callback_id):
if not isinstance(output_value, (list, tuple)):
raise exceptions.InvalidCallbackReturnValue(
dedent(
f"""
The callback {callback_id} is a multi-output.
Expected the output type to be a list or tuple but got:
{output_value!r}.
"""
)
)
if len(output_value) != len(outputs_list):
raise exceptions.InvalidCallbackReturnValue(
f"""
Invalid number of output values for {callback_id}.
Expected {len(outputs_list)}, got {len(output_value)}
"""
)
for i, outi in enumerate(outputs_list):
if isinstance(outi, list):
vi = output_value[i]
if not isinstance(vi, (list, tuple)):
raise exceptions.InvalidCallbackReturnValue(
dedent(
f"""
The callback {callback_id} output {i} is a wildcard multi-output.
Expected the output type to be a list or tuple but got:
{vi!r}.
output spec: {outi!r}
"""
)
)
if len(vi) != len(outi):
raise exceptions.InvalidCallbackReturnValue(
dedent(
f"""
Invalid number of output values for {callback_id} item {i}.
Expected {len(vi)}, got {len(outi)}
output spec: {outi!r}
output value: {vi!r}
"""
)
)
def fail_callback_output(output_value, output):
valid_children = (str, int, float, type(None), Component)
valid_props = (str, int, float, type(None), tuple, MutableSequence)
def _raise_invalid(bad_val, outer_val, path, index=None, toplevel=False):
bad_type = type(bad_val).__name__
outer_id = f"(id={outer_val.id:s})" if getattr(outer_val, "id", False) else ""
outer_type = type(outer_val).__name__
if toplevel:
location = dedent(
"""
The value in question is either the only value returned,
or is in the top level of the returned list,
"""
)
else:
index_string = "[*]" if index is None else f"[{index:d}]"
location = dedent(
f"""
The value in question is located at
{index_string} {outer_type} {outer_id}
{path},
"""
)
obj = "tree with one value" if not toplevel else "value"
raise exceptions.InvalidCallbackReturnValue(
dedent(
f"""
The callback for `{output!r}`
returned a {obj:s} having type `{bad_type}`
which is not JSON serializable.
{location}
and has string representation
`{bad_val}`
In general, Dash properties can only be
dash components, strings, dictionaries, numbers, None,
or lists of those.
"""
)
)
def _valid_child(val):
return isinstance(val, valid_children)
def _valid_prop(val):
return isinstance(val, valid_props)
def _can_serialize(val):
if not (_valid_child(val) or _valid_prop(val)):
return False
try:
to_json(val)
except TypeError:
return False
return True
def _validate_value(val, index=None):
# val is a Component
if isinstance(val, Component):
unserializable_items = []
# pylint: disable=protected-access
for p, j in val._traverse_with_paths():
# check each component value in the tree
if not _valid_child(j):
_raise_invalid(bad_val=j, outer_val=val, path=p, index=index)
if not _can_serialize(j):
# collect unserializable items separately, so we can report
# only the deepest level, not all the parent components that
# are just unserializable because of their children.
unserializable_items = [
i for i in unserializable_items if not p.startswith(i[0])
]
if unserializable_items:
# we already have something unserializable in a different
# branch - time to stop and fail
break
if all(not i[0].startswith(p) for i in unserializable_items):
unserializable_items.append((p, j))
# Children that are not of type Component or
# list/tuple not returned by traverse
child = getattr(j, "children", None)
if not isinstance(child, (tuple, MutableSequence)):
if child and not _can_serialize(child):
_raise_invalid(
bad_val=child,
outer_val=val,
path=p + "\n" + "[*] " + type(child).__name__,
index=index,
)
if unserializable_items:
p, j = unserializable_items[0]
# just report the first one, even if there are multiple,
# as that's how all the other errors work
_raise_invalid(bad_val=j, outer_val=val, path=p, index=index)
# Also check the child of val, as it will not be returned
child = getattr(val, "children", None)
if not isinstance(child, (tuple, MutableSequence)):
if child and not _can_serialize(val):
_raise_invalid(
bad_val=child,
outer_val=val,
path=type(child).__name__,
index=index,
)
if not _can_serialize(val):
_raise_invalid(
bad_val=val,
outer_val=type(val).__name__,
path="",
index=index,
toplevel=True,
)
if isinstance(output_value, list):
for i, val in enumerate(output_value):
_validate_value(val, index=i)
else:
_validate_value(output_value)
# if we got this far, raise a generic JSON error
raise exceptions.InvalidCallbackReturnValue(
f"""
The callback for output `{output!r}`
returned a value which is not JSON serializable.
In general, Dash properties can only be dash components, strings,
dictionaries, numbers, None, or lists of those.
"""
)
def check_obsolete(kwargs):
for key in kwargs:
if key in ["components_cache_max_age", "static_folder"]:
raise exceptions.ObsoleteKwargException(
f"""
{key} is no longer a valid keyword argument in Dash since v1.0.
See https://dash.plotly.com for details.
"""
)
# any other kwarg mimic the built-in exception
raise TypeError(f"Dash() got an unexpected keyword argument '{key}'")
def validate_js_path(registered_paths, package_name, path_in_package_dist):
if package_name not in registered_paths:
raise exceptions.DependencyException(
f"""
Error loading dependency. "{package_name}" is not a registered library.
Registered libraries are:
{list(registered_paths.keys())}
"""
)
if path_in_package_dist not in registered_paths[package_name]:
raise exceptions.DependencyException(
f"""
"{package_name}" is registered but the path requested is not valid.
The path requested: "{path_in_package_dist}"
List of registered paths: {registered_paths}
"""
)
def validate_index(name, checks, index):
missing = [i for check, i in checks if not re.compile(check).search(index)]
if missing:
plural = "s" if len(missing) > 1 else ""
raise exceptions.InvalidIndexException(
f"Missing item{plural} {', '.join(missing)} in {name}."
)
def validate_layout_type(value):
if not isinstance(value, (Component, patch_collections_abc("Callable"))):
raise exceptions.NoLayoutException(
"Layout must be a dash component "
"or a function that returns a dash component."
)
def validate_layout(layout, layout_value):
if layout is None:
raise exceptions.NoLayoutException(
"""
The layout was `None` at the time that `run_server` was called.
Make sure to set the `layout` attribute of your application
before running the server.
"""
)
layout_id = stringify_id(getattr(layout_value, "id", None))
component_ids = {layout_id} if layout_id else set()
for component in layout_value._traverse(): # pylint: disable=protected-access
component_id = stringify_id(getattr(component, "id", None))
if component_id and component_id in component_ids:
raise exceptions.DuplicateIdError(
f"""
Duplicate component id found in the initial layout: `{component_id}`
"""
)
component_ids.add(component_id)
def validate_template(template):
variable_names = re.findall("<(.*?)>", template)
for name in variable_names:
if not name.isidentifier() or iskeyword(name):
raise Exception(
f'`{name}` is not a valid Python variable name in `path_template`: "{template}".'
)
def check_for_duplicate_pathnames(registry):
path_to_module = {}
for page in registry.values():
if page["path"] not in path_to_module:
path_to_module[page["path"]] = [page["module"]]
else:
path_to_module[page["path"]].append(page["module"])
for modules in path_to_module.values():
if len(modules) > 1:
raise Exception(f"modules {modules} have duplicate paths")
def validate_registry(registry):
for page in registry.values():
if "layout" not in page:
raise exceptions.NoLayoutException(
f"No layout in module `{page['module']}` in dash.page_registry"
)
if page["module"] == "__main__":
raise Exception(
"""
When registering pages from app.py, `__name__` is not a valid module name. Use a string instead.
For example, `dash.register_page("my_module_name")`, rather than `dash.register_page(__name__)`
"""
)
def validate_pages_layout(module, page):
if not hasattr(page, "layout"):
raise exceptions.NoLayoutException(
f"""
No layout found in module {module}
A variable or a function named "layout" is required.
"""
)
def validate_use_pages(config):
if not config.get("assets_folder", None):
raise PageError("`dash.register_page()` must be called after app instantiation")
if flask.has_request_context():
raise PageError(
"""
dash.register_page() can’t be called within a callback as it updates dash.page_registry, which is a global variable.
For more details, see https://dash.plotly.com/sharing-data-between-callbacks#why-global-variables-will-break-your-app
"""
)
def validate_module_name(module):
if not isinstance(module, str):
raise Exception(
"The first attribute of dash.register_page() must be a string or '__name__'"
)
return module
def validate_long_callbacks(callback_map):
# Validate that long callback side output & inputs are not circular
# If circular, triggering a long callback would result in a fatal server/computer crash.
all_outputs = set()
input_indexed = {}
for callback in callback_map.values():
out = coerce_to_list(callback["output"])
all_outputs.update(out)
for o in out:
input_indexed.setdefault(o, set())
input_indexed[o].update(coerce_to_list(callback["raw_inputs"]))
for callback in (x for x in callback_map.values() if x.get("long")):
long_info = callback["long"]
progress = long_info.get("progress", [])
running = long_info.get("running", [])
long_inputs = coerce_to_list(callback["raw_inputs"])
outputs = set([x[0] for x in running] + progress)
circular = [
x
for x in set(k for k, v in input_indexed.items() if v.intersection(outputs))
if x in long_inputs
]
if circular:
raise exceptions.LongCallbackError(
f"Long callback circular error!\n{circular} is used as input for a long callback"
f" but also used as output from an input that is updated with progress or running argument."
)
| mit | bccb2c57650194347e6f607d566b5b12 | 35.087549 | 130 | 0.547091 | 4.503277 | false | false | false | false |
plotly/dash | components/dash-table/dash_table_base/__init__.py | 1 | 2609 | import os as _os
import sys as _sys
import json
import dash as _dash
if not hasattr(_dash, "__plotly_dash") and not hasattr(_dash, "development"):
print(
"Dash was not successfully imported. "
"Make sure you don't have a file "
'named \n"dash.py" in your current directory.',
file=_sys.stderr,
)
_sys.exit(1)
from ._imports_ import * # noqa: E402, F401, F403
from ._imports_ import __all__ as _components
from . import Format # noqa: F401, E402
from . import FormatTemplate # noqa: F401, E402
__all__ = _components + ["Format", "FormatTemplate"]
_basepath = _os.path.dirname(__file__)
_filepath = _os.path.abspath(_os.path.join(_basepath, "package-info.json"))
with open(_filepath) as f:
package = json.load(f)
package_name = package["name"].replace(" ", "_").replace("-", "_")
__version__ = package["version"]
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_this_module = _sys.modules[__name__]
async_resources = ["export", "table", "highlight"]
_js_dist = []
_js_dist.extend(
[
{
"relative_package_path": "dash_table/async-{}.js".format(async_resource),
"external_url": (
"https://unpkg.com/dash-table@{}" "/dash_table/async-{}.js"
).format(__version__, async_resource),
"namespace": "dash",
"async": True,
}
for async_resource in async_resources
]
)
_js_dist.extend(
[
{
"relative_package_path": "dash_table/async-{}.js.map".format(
async_resource
),
"external_url": (
"https://unpkg.com/dash-table@{}" "/dash_table/async-{}.js.map"
).format(__version__, async_resource),
"namespace": "dash",
"dynamic": True,
}
for async_resource in async_resources
]
)
_js_dist.extend(
[
{
"relative_package_path": "dash_table/bundle.js",
"external_url": (
"https://unpkg.com/dash-table@{}/dash_table/bundle.js"
).format(__version__),
"namespace": "dash",
},
{
"relative_package_path": "dash_table/bundle.js.map",
"external_url": (
"https://unpkg.com/dash-table@{}/dash_table/bundle.js.map"
).format(__version__),
"namespace": "dash",
"dynamic": True,
},
]
)
_css_dist = []
for _component in __all__:
setattr(locals()[_component], "_js_dist", _js_dist)
setattr(locals()[_component], "_css_dist", _css_dist)
| mit | a1c68f15d492074a68de587552afac9d | 26.463158 | 85 | 0.537754 | 3.474035 | false | false | false | false |
b-ryan/powerline-shell | powerline_shell/segments/hg.py | 3 | 2097 | import subprocess
from ..utils import RepoStats, ThreadedSegment, get_subprocess_env
def _get_hg_branch():
p = subprocess.Popen(["hg", "branch"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=get_subprocess_env())
branch = p.communicate()[0].decode("utf-8").rstrip('\n')
return branch
def parse_hg_stats(status):
stats = RepoStats()
for statusline in status:
if statusline[0] == "A":
stats.staged += 1
elif statusline[0] == "?":
stats.new += 1
else: # [M]odified, [R]emoved, (!)missing
stats.changed += 1
return stats
def _get_hg_status(output):
"""This function exists to enable mocking the `hg status` output in tests.
"""
return output[0].decode("utf-8").splitlines()
def build_stats():
try:
p = subprocess.Popen(["hg", "status"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=get_subprocess_env())
except OSError:
# Will be thrown if hg cannot be found
return None, None
pdata = p.communicate()
if p.returncode != 0:
return None, None
status = _get_hg_status(pdata)
stats = parse_hg_stats(status)
branch = _get_hg_branch()
return stats, branch
class Segment(ThreadedSegment):
def run(self):
self.stats, self.branch = build_stats()
def add_to_powerline(self):
self.join()
if not self.stats:
return
bg = self.powerline.theme.REPO_CLEAN_BG
fg = self.powerline.theme.REPO_CLEAN_FG
if self.stats.dirty:
bg = self.powerline.theme.REPO_DIRTY_BG
fg = self.powerline.theme.REPO_DIRTY_FG
if self.powerline.segment_conf("vcs", "show_symbol"):
symbol = RepoStats().symbols["hg"] + " "
else:
symbol = ""
self.powerline.append(" " + symbol + self.branch + " ", fg, bg)
self.stats.add_to_powerline(self.powerline)
| mit | 476b75bf1ab55a5d6a2a4cb33691d567 | 29.838235 | 78 | 0.555079 | 3.854779 | false | false | false | false |
tomerfiliba/plumbum | plumbum/path/local.py | 1 | 10683 | import errno
import glob
import logging
import os
import shutil
import urllib.parse as urlparse
import urllib.request as urllib
from contextlib import contextmanager
from plumbum.lib import IS_WIN32
from plumbum.path.base import FSUser, Path
from plumbum.path.remote import RemotePath
try:
from grp import getgrgid, getgrnam
from pwd import getpwnam, getpwuid
except ImportError:
def getpwuid(_x): # type: ignore[misc]
return (None,)
def getgrgid(_x): # type: ignore[misc]
return (None,)
def getpwnam(_x): # type: ignore[misc]
raise OSError("`getpwnam` not supported")
def getgrnam(_x): # type: ignore[misc]
raise OSError("`getgrnam` not supported")
logger = logging.getLogger("plumbum.local")
_EMPTY = object()
# ===================================================================================================
# Local Paths
# ===================================================================================================
class LocalPath(Path):
"""The class implementing local-machine paths"""
CASE_SENSITIVE = not IS_WIN32
def __new__(cls, *parts):
if (
len(parts) == 1
and isinstance(parts[0], cls)
and not isinstance(parts[0], LocalWorkdir)
):
return parts[0]
if not parts:
raise TypeError("At least one path part is required (none given)")
if any(isinstance(path, RemotePath) for path in parts):
raise TypeError(f"LocalPath cannot be constructed from {parts!r}")
self = super().__new__(
cls, os.path.normpath(os.path.join(*(str(p) for p in parts)))
)
return self
@property
def _path(self):
return str(self)
def _get_info(self):
return self._path
def _form(self, *parts):
return LocalPath(*parts)
@property
def name(self):
return os.path.basename(str(self))
@property
def dirname(self):
return LocalPath(os.path.dirname(str(self)))
@property
def suffix(self):
return os.path.splitext(str(self))[1]
@property
def suffixes(self):
exts = []
base = str(self)
while True:
base, ext = os.path.splitext(base)
if ext:
exts.append(ext)
else:
return list(reversed(exts))
@property
def uid(self):
uid = self.stat().st_uid
name = getpwuid(uid)[0]
return FSUser(uid, name)
@property
def gid(self):
gid = self.stat().st_gid
name = getgrgid(gid)[0]
return FSUser(gid, name)
def join(self, *others):
return LocalPath(self, *others)
def list(self):
return [self / fn for fn in os.listdir(str(self))]
def iterdir(self):
try:
return (self / fn.name for fn in os.scandir(str(self)))
except AttributeError:
return (self / fn for fn in os.listdir(str(self)))
def is_dir(self):
return os.path.isdir(str(self))
def is_file(self):
return os.path.isfile(str(self))
def is_symlink(self):
return os.path.islink(str(self))
def exists(self):
return os.path.exists(str(self))
def stat(self):
return os.stat(str(self))
def with_name(self, name):
return LocalPath(self.dirname) / name
@property
def stem(self):
return self.name.rsplit(os.path.extsep)[0]
def with_suffix(self, suffix, depth=1):
if suffix and not suffix.startswith(os.path.extsep) or suffix == os.path.extsep:
raise ValueError(f"Invalid suffix {suffix!r}")
name = self.name
depth = len(self.suffixes) if depth is None else min(depth, len(self.suffixes))
for _ in range(depth):
name, _ = os.path.splitext(name)
return LocalPath(self.dirname) / (name + suffix)
def glob(self, pattern):
return self._glob(
pattern,
lambda pat: [
LocalPath(m)
for m in glob.glob(os.path.join(glob.escape(str(self)), pat))
],
)
def delete(self):
if not self.exists():
return
if self.is_dir():
shutil.rmtree(str(self))
else:
try:
os.remove(str(self))
except OSError as ex: # pragma: no cover
# file might already been removed (a race with other threads/processes)
if ex.errno != errno.ENOENT:
raise
def move(self, dst):
if isinstance(dst, RemotePath):
raise TypeError(f"Cannot move local path {self} to {dst!r}")
shutil.move(str(self), str(dst))
return LocalPath(dst)
def copy(self, dst, override=None):
if isinstance(dst, RemotePath):
raise TypeError(f"Cannot copy local path {self} to {dst!r}")
dst = LocalPath(dst)
if override is False and dst.exists():
raise TypeError("File exists and override was not specified")
if override:
dst.delete()
if self.is_dir():
shutil.copytree(str(self), str(dst))
else:
dst_dir = LocalPath(dst).dirname
if not dst_dir.exists():
dst_dir.mkdir()
shutil.copy2(str(self), str(dst))
return dst
def mkdir(self, mode=0o777, parents=True, exist_ok=True):
if not self.exists() or not exist_ok:
try:
if parents:
os.makedirs(str(self), mode)
else:
os.mkdir(str(self), mode)
except OSError as ex: # pragma: no cover
# directory might already exist (a race with other threads/processes)
if ex.errno != errno.EEXIST or not exist_ok:
raise
def open(self, mode="r", encoding=None):
return open(
str(self),
mode,
encoding=encoding,
)
def read(self, encoding=None, mode="r"):
if encoding and "b" not in mode:
mode = mode + "b"
with self.open(mode) as f:
data = f.read()
if encoding:
data = data.decode(encoding)
return data
def write(self, data, encoding=None, mode=None):
if encoding:
data = data.encode(encoding)
if mode is None:
if isinstance(data, str):
mode = "w"
else:
mode = "wb"
with self.open(mode) as f:
f.write(data)
def touch(self):
with open(str(self), "a", encoding="utf-8"):
os.utime(str(self), None)
def chown(self, owner=None, group=None, recursive=None):
if not hasattr(os, "chown"):
raise OSError("os.chown() not supported")
uid = (
self.uid
if owner is None
else (owner if isinstance(owner, int) else getpwnam(owner)[2])
)
gid = (
self.gid
if group is None
else (group if isinstance(group, int) else getgrnam(group)[2])
)
os.chown(str(self), uid, gid)
if recursive or (recursive is None and self.is_dir()):
for subpath in self.walk():
os.chown(str(subpath), uid, gid)
def chmod(self, mode):
if not hasattr(os, "chmod"):
raise OSError("os.chmod() not supported")
os.chmod(str(self), mode)
def access(self, mode=0):
return os.access(str(self), self._access_mode_to_flags(mode))
def link(self, dst):
if isinstance(dst, RemotePath):
raise TypeError(
f"Cannot create a hardlink from local path {self} to {dst!r}"
)
if hasattr(os, "link"):
os.link(str(self), str(dst))
else:
from plumbum.machines.local import local
# windows: use mklink
if self.is_dir():
local["cmd"]("/C", "mklink", "/D", "/H", str(dst), str(self))
else:
local["cmd"]("/C", "mklink", "/H", str(dst), str(self))
def symlink(self, dst):
if isinstance(dst, RemotePath):
raise TypeError(
f"Cannot create a symlink from local path {self} to {dst!r}"
)
if hasattr(os, "symlink"):
os.symlink(str(self), str(dst))
else:
from plumbum.machines.local import local
# windows: use mklink
if self.is_dir():
local["cmd"]("/C", "mklink", "/D", str(dst), str(self))
else:
local["cmd"]("/C", "mklink", str(dst), str(self))
def unlink(self):
try:
if hasattr(os, "symlink") or not self.is_dir():
os.unlink(str(self))
else:
# windows: use rmdir for directories and directory symlinks
os.rmdir(str(self))
except OSError as ex: # pragma: no cover
# file might already been removed (a race with other threads/processes)
if ex.errno != errno.ENOENT:
raise
def as_uri(self, scheme="file"):
return urlparse.urljoin(str(scheme) + ":", urllib.pathname2url(str(self)))
@property
def drive(self):
return os.path.splitdrive(str(self))[0]
@property
def root(self):
return os.path.sep
class LocalWorkdir(LocalPath):
"""Working directory manipulator"""
def __hash__(self):
raise TypeError("unhashable type")
def __new__(cls):
return super().__new__(cls, os.getcwd())
def chdir(self, newdir):
"""Changes the current working directory to the given one
:param newdir: The destination director (a string or a ``LocalPath``)
"""
if isinstance(newdir, RemotePath):
raise TypeError(f"newdir cannot be {newdir!r}")
logger.debug("Chdir to %s", newdir)
os.chdir(str(newdir))
return self.__class__()
def getpath(self):
"""Returns the current working directory as a ``LocalPath`` object"""
return LocalPath(self._path)
@contextmanager
def __call__(self, newdir):
"""A context manager used to ``chdir`` into a directory and then ``chdir`` back to
the previous location; much like ``pushd``/``popd``.
:param newdir: The destination directory (a string or a ``LocalPath``)
"""
prev = self._path
newdir = self.chdir(newdir)
try:
yield newdir
finally:
self.chdir(prev)
| mit | 82eb3360c71749bf5bf74bd9f811d88e | 29.092958 | 101 | 0.535898 | 4.028281 | false | false | false | false |
radish-bdd/radish | radish/hookregistry.py | 1 | 4063 | # -*- coding: utf-8 -*-
"""
This module provides a registry for all hooks
"""
from singleton import singleton
from . import utils
from .exceptions import HookError
import tagexpressions
@singleton()
class HookRegistry(object):
"""
Represents an object with all registered hooks
"""
DEFAULT_HOOK_ORDER = 100
def __init__(self):
self._hooks = {}
self.reset()
self.build_hooks()
@property
def hooks(self):
"""
Returns all registered hooks
"""
return self._hooks
class Hook(object):
"""
Represents a hook object
This object is needed to provide decorators like:
* @before.all
* @before.each_feature
"""
def __init__(self, when):
self._when = when
@classmethod
def build_decorator(cls, what):
"""
Builds the hook decorator
"""
def _decorator(self, *args, **kwargs):
"""
Actual hook decorator
"""
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
func = args[0]
# hook was called without argument -> legacy!
HookRegistry().register(
self._when, what, func
) # pylint: disable=protected-access
else:
# hook was called with argument
on_tags = kwargs.get("on_tags")
order = kwargs.get("order")
if on_tags:
expr = tagexpressions.parse(on_tags)
on_tags = lambda tags: expr.evaluate(tags)
def func(f):
HookRegistry().register(
self._when, what, f, order, on_tags)
return f
return func
_decorator.__name__ = _decorator.fn_name = what
setattr(cls, what, _decorator)
def build_hooks(self):
"""
Builds all hooks
"""
for hook in self._hooks.keys():
self.Hook.build_decorator(hook)
def register(self, when, what, func, order=None, on_tags=None):
"""
Registers a function as a hook
"""
if order is None:
order = self.DEFAULT_HOOK_ORDER
if on_tags is None:
on_tags = lambda _: True # if no tags are specified we always return True
self._hooks[what][when].append((order, on_tags, func))
def reset(self):
"""
Resets all registerd hooks
"""
self._hooks = {
"all": {"before": [], "after": []},
"each_feature": {"before": [], "after": []},
"each_scenario": {"before": [], "after": []},
"each_step": {"before": [], "after": []},
}
def __has_to_run(self, model, on_tags):
"""
Return if the given hook has to run or not
depending on it's tags
"""
if isinstance(model, list):
return any(on_tags([t.name for t in m.all_tags]) for m in model)
return on_tags([t.name for t in model.all_tags])
def call(self, when, what, ascending, model, *args, **kwargs):
"""
Calls a registered hook
"""
for _, on_tags, func in sorted(self._hooks[what][when], key=lambda h: h[0], reverse=not ascending):
if not self.__has_to_run(model, on_tags):
# # this hook does not have to run because
# # it was excluded due to the tags for this model
continue
try:
func(model, *args, **kwargs)
except Exception as e:
raise HookError(func, utils.Failure(e))
return None
HookRegistry()
before = HookRegistry.Hook("before") # pylint: disable=invalid-name
after = HookRegistry.Hook("after") # pylint: disable=invalid-name
| mit | b2cfefef37a9aa3ea58e2693eecaaf63 | 28.021429 | 107 | 0.492001 | 4.504435 | false | false | false | false |
radish-bdd/radish | tests/unit/test_model.py | 1 | 2730 | # -*- coding: utf-8 -*-
"""
radish
~~~~~~
Behavior Driven Development tool for Python - the root from red to green
Copyright: MIT, Timo Furrer <tuxtimo@gmail.com>
"""
import time
import pytest
from radish.model import Model, Tag
from radish.exceptions import RadishError
def test_creating_simple_model():
"""
Test creating a simple Model
"""
# given & when
model = Model(1, "Model", "I am a Model", "foo.feature", 1, parent=None, tags=None)
# then
assert model.id == 1
assert model.keyword == "Model"
assert model.sentence == "I am a Model"
assert model.path == "foo.feature"
assert model.line == 1
assert model.parent is None
assert model.tags == []
def test_creating_a_tag():
"""
Test creating a Tag
"""
# given & when
tag = Tag("foo", arg="bar")
# then
assert tag.name == "foo"
assert tag.arg == "bar"
def test_getting_tags_from_model():
"""
Test getting all Tags from a Model
"""
# given & when
parent_model = Model(
1,
"Model",
"I am a Model",
"foo.feature",
1,
parent=None,
tags=[Tag("some_tag")],
)
model = Model(
1,
"Model",
"I am a Model",
"foo.feature",
1,
parent=parent_model,
tags=[Tag("foo"), Tag("bar")],
)
# when
tags = model.all_tags
# then
assert len(tags) == 3
assert tags[0].name == "some_tag"
assert tags[1].name == "foo"
assert tags[2].name == "bar"
def test_getting_model_duration():
"""
Test getting duration of a Model
"""
# given & when
model = Model(1, "Model", "I am a Model", "foo.feature", 1, parent=None, tags=None)
model.starttime = time.time()
model.endtime = model.starttime + 10
# when
duration = model.duration
# then
assert duration == 10
def test_getting_model_duration_with_missing_time():
"""
Test getting duration of a Model with missing start- or endtime
"""
# given & when
model = Model(1, "Model", "I am a Model", "foo.feature", 1, parent=None, tags=None)
# when - missing starttime
model.starttime = None
with pytest.raises(RadishError) as exc:
model.duration
# then
assert (
str(exc.value)
== "Cannot get duration of Model 'I am a Model' because either starttime or endtime is not set"
)
# when - missing starttime
model.starttime = time.time()
with pytest.raises(RadishError) as exc:
model.duration
# then
assert (
str(exc.value)
== "Cannot get duration of Model 'I am a Model' because either starttime or endtime is not set"
)
| mit | 247d1b42ba5eb3b430ae1ed9969ddd1a | 21.016129 | 103 | 0.576923 | 3.601583 | false | true | false | false |
radish-bdd/radish | tests/radish/steps.py | 1 | 5348 | # -*- coding: utf-8 -*-
"""
radish
~~~~~~
Behavior Driven Development tool for Python - the root from red to green
Copyright: MIT, Timo Furrer <tuxtimo@gmail.com>
"""
import os
import json
from radish import given, when, then, world
from radish.extensions.cucumber_json_writer import CucumberJSONWriter
@given("I have a step")
def have_a_step(step):
"Given I have a step"
pass
@when("I do something")
def do_something(step):
"When I do something"
pass
@then("I expect something")
def expect_something(step):
"Then I expect something"
pass
@given("I have the number {number:d}")
def have_number(step, number):
"Given I have the number <n>"
if not hasattr(step.context, "numbers"):
step.context.numbers = []
step.context.numbers.append(number)
@when("I add them up")
def sum_numbers(step):
"When I add them up"
step.context.sum = sum(step.context.numbers)
@when("I add them up with failure")
def sum_numbers(step):
"When I add them up with failure"
assert False, "Unable to add numbers: {0}".format(step.context.numbers)
@when("I subtract them")
def subtract_numbers(step):
"When I subtract them up"
difference = step.context.numbers[0]
for n in step.context.numbers[1:]:
difference -= n
step.context.difference = difference
@then("I expect the sum to be {expected_sum:d}")
def expect_sum(step, expected_sum):
"Then I expect the sum to be <n>"
assert (
step.context.sum == expected_sum
), "The expected sum {0} does not match actual sum {1}".format(
expected_sum, step.context.sum
)
@then("I expect the difference to be {expected_diff:d}")
def expect_sum(step, expected_diff):
"Then I expect the difference to be <n>"
assert (
step.context.difference == expected_diff
), "The expected difference {0} does not match actual difference {1}".format(
expected_diff.step.context.difference
)
@given("I have an instable function")
def have_instable_function(step):
"Given I have an instable function"
pass
@when("I execute it")
def execute_instable_function(step):
"When I execute it"
pass
@then("I expect it to pass")
def expect_instable_function_pass(step):
"Then I expect it to pass"
pass
@given("I have the following heros")
def have_heros(step):
"Given I have the following heros"
step.context.heros = step.table
@when("I capitalize their first name")
def cap_first_name(step):
"When I capitalize their first name"
for hero in step.context.heros:
hero["firstname"] = hero["firstname"].upper()
@then("I have the following names")
def have_names(step):
"Then I have the following names"
assert list(x["firstname"] for x in step.context.heros) == list(
x["cap_heroname"] for x in step.table
)
@given("I have the following quote")
def have_quote(step):
"Given I have the following quote"
step.context.quote = step.text
@when("I look for it's author")
def lookup_author(step):
"When I look for it's author"
step.context.author = "Shakespeare"
@then("I will find {:S}")
def expect_author(step, author):
"Then I will find <author>"
assert step.context.author == author
@when("I embed a text {test_text:QuotedString}")
def embed_a_text(step, test_text):
'When I embed a text "<test_text>"'
step.embed(test_text)
step.context.step_with_embedded_data = step
@then("step with embedded text should have following embedded data")
def embed_a_text(step):
"Then step with embedded text should have following embedded data"
assert hasattr(
step.context, "step_with_embedded_data"
), "step_embeddings is missing in context - please check if step with text embedding has been executed"
test_step_embeddings = step.context.step_with_embedded_data.embeddings
for embeddings in step.table:
assert embeddings in test_step_embeddings, "{0} not found in {1}".format(
embeddings, test_step_embeddings
)
@when("generate cucumber report")
def generate_cucumber_report(step):
cjw = CucumberJSONWriter()
cjw.generate_ccjson([step.parent.parent], None)
@then("genreated cucumber json equals to {expected_json_file:QuotedString}")
def proper_cucumber_json_is_generated(step, expected_json_file):
def remove_changing(d):
return {k: v for k, v in d.items() if k not in ["duration", "uri"]}
with open(world.config.cucumber_json, "r") as f_cucumber_json:
cucumber_json = json.load(f_cucumber_json, object_hook=remove_changing)
json_file_path = os.path.join(
os.path.dirname(step.path), "..", "output", expected_json_file
)
with open(json_file_path, "r") as f_expected_cucumber_json:
expected_cucumber_json = json.load(
f_expected_cucumber_json, object_hook=remove_changing
)
assert cucumber_json == expected_cucumber_json
@when("YAML specification is set to")
def yaml_specification_is_set_to(step):
step.context.doc_text = step.text
@then("YAML specification contains proper data")
def yaml_specification_contains_correct_data(step):
expected_data = """version: '3'
services:
webapp:
build: ./dir"""
assert step.context.doc_text == expected_data, '"{}" != "{}"'.format(step.context.doc_text, expected_data) | mit | 19052295c553fb011fdbfc102190eb34 | 26.152284 | 110 | 0.675019 | 3.410714 | false | false | false | false |
radish-bdd/radish | radish/extensions/endreport_writer.py | 1 | 7389 | # -*- coding: utf-8 -*-
"""
This radish extension module provide the functionality to write the end report
"""
# disable no-member lint error because of dynamic method from colorful
# pylint: disable=no-member
from datetime import timedelta
import colorful
import humanize
from radish.hookregistry import after
from radish.stepmodel import Step
from radish.utils import console_write as write, make_unique_obj_list, get_func_code
from radish.scenariooutline import ScenarioOutline
from radish.scenarioloop import ScenarioLoop
from radish.extensionregistry import extension
from radish.terrain import world
from radish.stepregistry import StepRegistry
@extension
class EndreportWriter(object):
"""
Endreport writer radish extension
"""
LOAD_IF = staticmethod(lambda config: not config.show)
LOAD_PRIORITY = 50
def __init__(self):
after.all(self.console_write)
def console_write(self, features, marker):
"""
Writes the endreport for all features
:param list features: all features
"""
stats = {
"features": {
"amount": 0,
"passed": 0,
"failed": 0,
"skipped": 0,
"untested": 0,
"pending": 0,
},
"scenarios": {
"amount": 0,
"passed": 0,
"failed": 0,
"skipped": 0,
"untested": 0,
"pending": 0,
},
"steps": {
"amount": 0,
"passed": 0,
"failed": 0,
"skipped": 0,
"untested": 0,
"pending": 0,
},
}
pending_steps = []
duration = timedelta()
for feature in features:
if not feature.has_to_run(world.config.scenarios):
continue
stats["features"]["amount"] += 1
stats["features"][feature.state] += 1
if feature.state in [Step.State.PASSED, Step.State.FAILED]:
duration += feature.duration
for scenario in feature.all_scenarios:
if not scenario.has_to_run(world.config.scenarios):
continue
if isinstance(scenario, ScenarioOutline): # skip ScenarioOutlines
continue
if isinstance(scenario, ScenarioLoop): # skip ScenarioLoop
continue
stats["scenarios"]["amount"] += 1
stats["scenarios"][scenario.state] += 1
for step in scenario.steps:
stats["steps"]["amount"] += 1
stats["steps"][step.state] += 1
if step.state == Step.State.PENDING:
pending_steps.append(step)
colored_closing_paren = colorful.bold_white(")")
colored_comma = colorful.bold_white(", ")
passed_word = colorful.bold_green("{0} passed")
failed_word = colorful.bold_red("{0} failed")
skipped_word = colorful.cyan("{0} skipped")
pending_word = colorful.bold_yellow("{0} pending")
output = colorful.bold_white(
"{0} features (".format(stats["features"]["amount"])
)
output += passed_word.format(stats["features"]["passed"])
if stats["features"]["failed"]:
output += colored_comma + failed_word.format(stats["features"]["failed"])
if stats["features"]["skipped"]:
output += colored_comma + skipped_word.format(stats["features"]["skipped"])
if stats["features"]["pending"]:
output += colored_comma + pending_word.format(stats["features"]["pending"])
output += colored_closing_paren
output += "\n"
output += colorful.bold_white(
"{} scenarios (".format(stats["scenarios"]["amount"])
)
output += passed_word.format(stats["scenarios"]["passed"])
if stats["scenarios"]["failed"]:
output += colored_comma + failed_word.format(stats["scenarios"]["failed"])
if stats["scenarios"]["skipped"]:
output += colored_comma + skipped_word.format(stats["scenarios"]["skipped"])
if stats["scenarios"]["pending"]:
output += colored_comma + pending_word.format(stats["scenarios"]["pending"])
output += colored_closing_paren
output += "\n"
output += colorful.bold_white("{} steps (".format(stats["steps"]["amount"]))
output += passed_word.format(stats["steps"]["passed"])
if stats["steps"]["failed"]:
output += colored_comma + failed_word.format(stats["steps"]["failed"])
if stats["steps"]["skipped"]:
output += colored_comma + skipped_word.format(stats["steps"]["skipped"])
if stats["steps"]["pending"]:
output += colored_comma + pending_word.format(stats["steps"]["pending"])
output += colored_closing_paren
if pending_steps:
sr = StepRegistry()
pending_step_implementations = make_unique_obj_list(
pending_steps, lambda x: x.definition_func
)
output += colorful.white(
"\nYou have {0} pending step implementation{1} affecting {2} step{3}:\n {4}\n\nNote: this could be the reason for some failing subsequent steps".format(
len(pending_step_implementations),
"s" if len(pending_step_implementations) != 1 else "",
len(pending_steps),
"s" if len(pending_steps) != 1 else "",
"\n ".join(
[
"- '{0}' @ {1}".format(
sr.get_pattern(s.definition_func),
get_func_code(s.definition_func).co_filename,
)
for s in pending_step_implementations
]
),
)
)
output += "\n"
if world.config.wip:
if stats["scenarios"]["passed"] > 0:
output += colorful.red(
"\nThe --wip switch was used, so I didn't expect anything to pass. These scenarios passed:\n"
)
has_passed_scenarios = False
for feature in features:
passed_scenarios = list(
filter(
lambda s: s.state == Step.State.PASSED,
feature.all_scenarios,
)
)
for scenario in passed_scenarios:
output += colorful.red(
"\n - {}: {}".format(feature.path, scenario.sentence)
)
has_passed_scenarios = True
if has_passed_scenarios:
output += "\n"
else:
output += colorful.green(
"\nThe --wip switch was used, so the failures were expected. All is good.\n"
)
output += colorful.cyan(
"Run {0} finished within {1}".format(
marker, humanize.naturaldelta(duration)
)
)
write(output)
| mit | 8bb1e6b6fc1e8a6dd25758c5c77d6ada | 36.130653 | 169 | 0.508459 | 4.739577 | false | false | false | false |
radish-bdd/radish | radish/core.py | 1 | 3214 | # -*- coding: utf-8 -*-
"""
Providing radish core functionality.
"""
from threading import Lock
from collections import OrderedDict
from .parser import FeatureParser
class Configuration(object):
"""
Manage configuration. Attributes of the class are created from the
names of the command line options and are set to the command line
values.
Attribute names are parsed from command-line options removing or
replacing characters that can not be used in python variables.
Specifically:
* "-" is replaced with "_"
* "--" is removed.
* "<" and ">" are removed (they are used in positional arguments)
:param arguments: command line arguments and their values
:type arguments: dict-line object (i.e. docopt.Dict)
"""
def __init__(self, arguments):
for key, value in arguments.items():
config_key = (
key.replace("--", "")
.replace("-", "_")
.replace("<", "")
.replace(">", "")
)
setattr(self, config_key, value)
# FIXME: rename
class Core(object):
"""
Provide some core functionalities like parsing and storing of the feature files
"""
def __init__(self):
self.features = []
self._features_to_run = OrderedDict()
self._feature_id_lock = Lock()
self._feature_id = 0
self._scenario_id_lock = Lock()
self._scenario_id = 0
@property
def features_to_run(self):
"""
Return all parsed features which are to run
"""
return [f for f in self._features_to_run.values()]
@property
def next_feature_id(self):
"""
Returns the next feature id
"""
with self._feature_id_lock:
self._feature_id += 1
return self._feature_id
@property
def next_scenario_id(self):
"""
Returns the next scenario id
"""
with self._scenario_id_lock:
self._scenario_id += 1
return self._scenario_id
def parse_features(self, feature_files, tag_expr):
"""
Parses the given feature files
"""
for featurefile in feature_files:
feature = self.parse_feature(
featurefile, tag_expr, featureid=self.next_feature_id
)
if feature is not None:
for scenario in feature.scenarios:
scenario.absolute_id = self.next_scenario_id
self._features_to_run[featurefile] = feature
def parse_feature(self, featurefile, tag_expr, inherited_tags=None, featureid=0):
"""
Parses the given feature file
If the feature is alreay parsed then it will just return it
:returns: the parsed feature
:rtype: Feature
"""
featureparser = FeatureParser(
self, featurefile, featureid, tag_expr, inherited_tags=inherited_tags
)
feature = featureparser.parse()
if feature is None:
return None
self.features.append(feature)
return feature
| mit | 9776f00d3a0d87dbb2410f1458970b36 | 27.442478 | 87 | 0.561294 | 4.644509 | false | false | false | false |
onecodex/onecodex | onecodex/viz/_bargraph.py | 1 | 12197 | from onecodex.exceptions import OneCodexException, PlottingException
from onecodex.lib.enums import Rank, Metric, Link
from onecodex.viz._primitives import (
interleave_palette,
prepare_props,
sort_helper,
get_ncbi_taxonomy_browser_url,
get_classification_url,
open_links_in_new_tab,
)
class VizBargraphMixin(object):
def plot_bargraph(
self,
rank=Rank.Auto,
normalize="auto",
top_n="auto",
threshold="auto",
title=None,
xlabel=None,
ylabel=None,
tooltip=None,
return_chart=False,
haxis=None,
legend="auto",
label=None,
sort_x=None,
include_taxa_missing_rank=None,
include_other=True,
width=None,
height=None,
group_by=None,
link=Link.Ocx,
):
"""Plot a bargraph of relative abundance of taxa for multiple samples.
Parameters
----------
rank : {'auto', 'kingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species'}, optional
Analysis will be restricted to abundances of taxa at the specified level.
normalize : 'auto' or `bool`, optional
Convert read counts to relative abundances such that each sample sums to 1.0. Setting
'auto' will choose automatically based on the data.
return_chart : `bool`, optional
When True, return an `altair.Chart` object instead of displaying the resulting plot in
the current notebook.
top_n : `int`, optional
Display the top N most abundant taxa in the entire cohort of samples.
threshold : `float`
Display only taxa that are more abundant that this threshold in one or more samples.
title : `string`, optional
Text label at the top of the plot.
xlabel : `string`, optional
Text label along the horizontal axis.
ylabel : `string`, optional
Text label along the vertical axis.
tooltip : `string` or `list`, optional
A string or list containing strings representing metadata fields. When a point in the
plot is hovered over, the value of the metadata associated with that sample will be
displayed in a modal.
haxis : `string`, optional
The metadata field (or tuple containing multiple categorical fields) used to facet
samples.
legend: `string` or `altair.Legend`, optional
If a string is provided, it will be used as the legend title. Defaults to the metric
used to generate the plot, e.g. readcount_w_children or abundance. Alternatively, an
`altair.Legend` instance may be provided for legend customization.
label : `string` or `callable`, optional
A metadata field (or function) used to label each analysis. If passing a function, a
dict containing the metadata for each analysis is passed as the first and only
positional argument. The callable function must return a string.
sort_x : `list` or `callable`, optional
Either a list of sorted labels or a function that will be called with a list of x-axis labels
as the only argument, and must return the same list in a user-specified order.
include_taxa_missing_rank : `bool`, optional
Whether or not a row should be plotted for taxa that do not have a designated parent at `rank`.
group_by : `string`, optional
The metadata field used to group samples together. Readcounts or abundances will be
averaged within each group.
link: {'ocx', 'ncbi'}, optional
If `link` is 'ocx', clicking a sample will open its classification results in the One
Codex app in a new tab. If `link` is 'ncbi', clicking a taxon will open the NCBI
taxonomy browser in a new tab.
Examples
--------
Plot a bargraph of the top 10 most abundant genera
>>> plot_bargraph(rank='genus', top_n=10)
"""
# Deferred imports
import altair as alt
if rank is None:
raise OneCodexException("Please specify a rank or 'auto' to choose automatically")
if not (threshold or top_n):
raise OneCodexException("Please specify at least one of: threshold, top_n")
if len(self._results) < 1:
raise PlottingException(
"There are too few samples for bargraph plots after filtering. Please select 1 or "
"more samples to plot."
)
if not normalize and self._guess_normalized():
raise OneCodexException("Data has already been normalized and this cannot be undone.")
if group_by:
if not all(kwarg is None for kwarg in (tooltip, haxis, label, sort_x)):
raise OneCodexException(
"`tooltip`, `haxis`, `label`, and `sort_x` are not supported with `group_by`."
)
if group_by not in self.metadata:
raise OneCodexException(
f"Metadata field {group_by} not found. Choose from: {', '.join(self.metadata.keys())}"
)
if (
self._metric in {Metric.Readcount, Metric.ReadcountWChildren}
and self._guess_normalized()
):
raise OneCodexException(
"`group_by` may not be used with readcounts that have already been normalized."
)
if include_taxa_missing_rank is None:
if self._metric == Metric.AbundanceWChildren:
include_taxa_missing_rank = True
else:
include_taxa_missing_rank = False
# We're intentionally *not* normalizing or filtering by top_n/threshold in to_df() in case
# group_by was passed. Grouping samples needs to happen *before* normalization.
df = self.to_df(
rank=rank,
top_n=None,
threshold=None,
normalize=None,
include_taxa_missing_rank=include_taxa_missing_rank,
)
pretty_metric_name = self.metric
if group_by:
df = df.fillna(0.0).join(self.metadata[group_by]).groupby(group_by, dropna=False).mean()
# Nicer display for missing metadata values than `null`
df.index = df.index.fillna("N/A")
pretty_metric_name = f"Mean {pretty_metric_name}"
if normalize and (not self._guess_normalized() or group_by):
# Replace nans with zeros for samples that have a total abundance of zero.
df = df.div(df.sum(axis=1), axis=0).fillna(0.0)
# Keep track of empty rows *before* filtering taxa by threshold/top_n. We'll use this below
# to calculate "Other".
empty_rows = df[df.sum(axis=1) == 0.0].index
if top_n == "auto" and threshold == "auto":
top_n = 10
threshold = None
elif top_n == "auto" and threshold != "auto":
top_n = None
elif top_n != "auto" and threshold == "auto":
threshold = None
if threshold:
df = df.loc[:, df.max() >= threshold]
if top_n:
df = df.loc[:, df.mean().sort_values(ascending=False).iloc[:top_n].index]
if include_other and normalize:
df["Other"] = df.apply(
lambda row: 0.0 if row.name in empty_rows else 1 - row.sum(), axis=1
)
if isinstance(legend, str):
if legend == "auto":
legend = pretty_metric_name
legend = alt.Legend(title=legend, symbolLimit=40, labelLimit=0)
if not isinstance(legend, alt.Legend):
raise TypeError(f"`legend` must be of type str or altair.Legend, not {type(legend)}")
if tooltip:
if isinstance(tooltip, list):
tooltip = tooltip.copy()
else:
tooltip = [tooltip]
else:
tooltip = []
if group_by:
tooltip.append(group_by)
metadata_columns = []
else:
tooltip.insert(0, "Label")
if haxis:
tooltip.append(haxis)
# takes metadata columns and returns a dataframe with just those columns
# renames columns in the case where columns are taxids
magic_metadata, magic_fields = self._metadata_fetch(tooltip, label=label)
df = df.join(magic_metadata)
metadata_columns = magic_metadata.columns.tolist()
tooltip = [magic_fields[f] for f in tooltip]
# should ultimately be Label/`group_by`, tax_name, metric name, then custom fields
tooltip.insert(1, "tax_name")
tooltip.insert(2, "{}:Q".format(pretty_metric_name))
df = df.reset_index().melt(
id_vars=[df.index.name] + metadata_columns,
var_name="tax_id",
value_name=pretty_metric_name,
)
# add taxa names
df["tax_name"] = df["tax_id"].apply(
lambda t: "{} ({})".format(self.taxonomy["name"][t], t)
if t in self.taxonomy["name"]
else t
)
#
# TODO: how to sort bars in bargraph
# - abundance (mean across all samples)
# - parent taxon (this will require that we make a few assumptions
# about taxonomic ranks but as all taxonomic data will be coming from
# OCX this should be okay)
#
ylabel = ylabel or pretty_metric_name
if xlabel is None:
xlabel = group_by if group_by else ""
encode_kwargs = {}
if haxis:
encode_kwargs["column"] = alt.Column(
haxis, header=alt.Header(titleOrient="bottom", labelOrient="bottom")
)
domain = sorted(df["tax_name"].unique())
no_level_name = "No {}".format(rank)
color_range = interleave_palette(set(domain) - {"Other", no_level_name})
other_color = ["#DCE0E5"]
no_level_color = ["#eeefe1"]
if include_taxa_missing_rank and no_level_name in domain:
domain.remove(no_level_name)
domain = [no_level_name] + domain
color_range = no_level_color + color_range
if include_other and "Other" in domain:
domain.remove("Other")
domain = ["Other"] + domain
color_range = other_color + color_range
sort_order = None
if not group_by:
sort_order = sort_helper(sort_x, df["Label"].tolist())
df["order"] = df["tax_name"].apply(domain.index)
if link == Link.Ocx and not group_by:
df["url"] = df["classification_id"].apply(get_classification_url)
encode_kwargs["href"] = "url:N"
elif link == Link.Ncbi:
df["url"] = df["tax_id"].apply(get_ncbi_taxonomy_browser_url)
encode_kwargs["href"] = "url:N"
y_scale_kwargs = {"zero": True, "nice": False}
if normalize:
y_scale_kwargs["domain"] = [0, 1]
chart = (
alt.Chart(df)
.mark_bar()
.encode(
x=alt.X(
group_by if group_by else "Label", axis=alt.Axis(title=xlabel), sort=sort_order
),
y=alt.Y(
pretty_metric_name,
axis=alt.Axis(title=ylabel),
scale=alt.Scale(**y_scale_kwargs),
),
color=alt.Color(
"tax_name",
legend=legend,
sort=domain,
scale=alt.Scale(domain=domain, range=color_range),
),
tooltip=tooltip,
order=alt.Order("order", sort="descending"),
**encode_kwargs,
)
)
if haxis:
chart = chart.resolve_scale(x="independent")
chart = chart.properties(**prepare_props(title=title, width=width, height=height))
open_links_in_new_tab(chart)
return chart if return_chart else chart.display()
| mit | 7ec2211f07f714fd3fb7b73f192f1680 | 38.859477 | 107 | 0.567025 | 4.179918 | false | false | false | false |
python-visualization/folium | folium/plugins/marker_cluster.py | 1 | 3791 | from jinja2 import Template
from folium.elements import JSCSSMixin
from folium.map import Layer, Marker
from folium.utilities import parse_options, validate_locations
class MarkerCluster(JSCSSMixin, Layer):
"""
Provides Beautiful Animated Marker Clustering functionality for maps.
Parameters
----------
locations: list of list or array of shape (n, 2).
Data points of the form [[lat, lng]].
popups: list of length n, default None
Popup for each marker, either a Popup object or a string or None.
icons: list of length n, default None
Icon for each marker, either an Icon object or a string or None.
name : string, default None
The name of the Layer, as it will appear in LayerControls
overlay : bool, default True
Adds the layer as an optional overlay (True) or the base layer (False).
control : bool, default True
Whether the Layer will be included in LayerControls.
show: bool, default True
Whether the layer will be shown on opening (only for overlays).
icon_create_function : string, default None
Override the default behaviour, making possible to customize
markers colors and sizes.
options : dict, default None
A dictionary with options for Leaflet.markercluster. See
https://github.com/Leaflet/Leaflet.markercluster for options.
Example
-------
>>> icon_create_function = '''
... function(cluster) {
... return L.divIcon({html: '<b>' + cluster.getChildCount() + '</b>',
... className: 'marker-cluster marker-cluster-small',
... iconSize: new L.Point(20, 20)});
... }
... '''
"""
_template = Template(
"""
{% macro script(this, kwargs) %}
var {{ this.get_name() }} = L.markerClusterGroup(
{{ this.options|tojson }}
);
{%- if this.icon_create_function is not none %}
{{ this.get_name() }}.options.iconCreateFunction =
{{ this.icon_create_function.strip() }};
{%- endif %}
{{ this._parent.get_name() }}.addLayer({{ this.get_name() }});
{% endmacro %}
"""
)
default_js = [
(
"markerclusterjs",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/1.1.0/leaflet.markercluster.js",
)
]
default_css = [
(
"markerclustercss",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/1.1.0/MarkerCluster.css",
),
(
"markerclusterdefaultcss",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet.markercluster/1.1.0/MarkerCluster.Default.css",
),
]
def __init__(
self,
locations=None,
popups=None,
icons=None,
name=None,
overlay=True,
control=True,
show=True,
icon_create_function=None,
options=None,
**kwargs
):
if options is not None:
kwargs.update(options) # options argument is legacy
super().__init__(name=name, overlay=overlay, control=control, show=show)
self._name = "MarkerCluster"
if locations is not None:
locations = validate_locations(locations)
for i, location in enumerate(locations):
self.add_child(
Marker(
location, popup=popups and popups[i], icon=icons and icons[i]
)
)
self.options = parse_options(**kwargs)
if icon_create_function is not None:
assert isinstance(icon_create_function, str)
self.icon_create_function = icon_create_function
| mit | a7c97d6a844057580963c162dacc367b | 33.463636 | 107 | 0.575838 | 4.18895 | false | false | false | false |
python-visualization/folium | folium/plugins/heat_map.py | 1 | 3672 | import warnings
import numpy as np
from jinja2 import Template
from folium.elements import JSCSSMixin
from folium.map import Layer
from folium.utilities import (
if_pandas_df_convert_to_numpy,
none_max,
none_min,
parse_options,
validate_location,
)
class HeatMap(JSCSSMixin, Layer):
"""
Create a Heatmap layer
Parameters
----------
data : list of points of the form [lat, lng] or [lat, lng, weight]
The points you want to plot.
You can also provide a numpy.array of shape (n,2) or (n,3).
name : string, default None
The name of the Layer, as it will appear in LayerControls.
min_opacity : default 1.
The minimum opacity the heat will start at.
max_zoom : default 18
Zoom level where the points reach maximum intensity (as intensity
scales with zoom), equals maxZoom of the map by default
radius : int, default 25
Radius of each "point" of the heatmap
blur : int, default 15
Amount of blur
gradient : dict, default None
Color gradient config. e.g. {0.4: 'blue', 0.65: 'lime', 1: 'red'}
overlay : bool, default True
Adds the layer as an optional overlay (True) or the base layer (False).
control : bool, default True
Whether the Layer will be included in LayerControls.
show: bool, default True
Whether the layer will be shown on opening (only for overlays).
"""
_template = Template(
"""
{% macro script(this, kwargs) %}
var {{ this.get_name() }} = L.heatLayer(
{{ this.data|tojson }},
{{ this.options|tojson }}
).addTo({{ this._parent.get_name() }});
{% endmacro %}
"""
)
default_js = [
(
"leaflet-heat.js",
"https://cdn.jsdelivr.net/gh/python-visualization/folium@main/folium/templates/leaflet_heat.min.js",
),
]
def __init__(
self,
data,
name=None,
min_opacity=0.5,
max_zoom=18,
radius=25,
blur=15,
gradient=None,
overlay=True,
control=True,
show=True,
**kwargs
):
super().__init__(name=name, overlay=overlay, control=control, show=show)
self._name = "HeatMap"
data = if_pandas_df_convert_to_numpy(data)
self.data = [
[*validate_location(line[:2]), *line[2:]] for line in data # noqa: E999
]
if np.any(np.isnan(self.data)):
raise ValueError("data may not contain NaNs.")
if kwargs.pop("max_val", None):
warnings.warn(
"The `max_val` parameter is no longer necessary. "
"The largest intensity is calculated automatically.",
stacklevel=2,
)
self.options = parse_options(
min_opacity=min_opacity,
max_zoom=max_zoom,
radius=radius,
blur=blur,
gradient=gradient,
**kwargs
)
def _get_self_bounds(self):
"""
Computes the bounds of the object itself (not including it's children)
in the form [[lat_min, lon_min], [lat_max, lon_max]].
"""
bounds = [[None, None], [None, None]]
for point in self.data:
bounds = [
[
none_min(bounds[0][0], point[0]),
none_min(bounds[0][1], point[1]),
],
[
none_max(bounds[1][0], point[0]),
none_max(bounds[1][1], point[1]),
],
]
return bounds
| mit | 4ff52db3cd3833367c9db9cc58e0d0d5 | 29.347107 | 112 | 0.535948 | 3.948387 | false | false | false | false |
python-visualization/folium | tests/plugins/test_tag_filter_button.py | 1 | 2470 | """
Test TagFilterButton
------------
"""
import random
import numpy as np
from jinja2 import Template
import folium
from folium import plugins
from folium.utilities import normalize
def test_tag_filter_button():
np.random.seed(3141592)
# Generate base data
initial_data = np.random.normal(size=(100, 2)) * np.array([[1, 1]]) + np.array(
[[48, 5]]
)
# Generate the data to segment by (levels of another categorical pandas column in practical usage)
n = 5
categories = [f"category{i + 1}" for i in range(n)]
category_column = [random.choice(categories) for i in range(len(initial_data))]
# Create map and add the data with additional parameter tags as the segmentation
m = folium.Map([48.0, 5.0], tiles="stamentoner", zoom_start=6)
for i, latlng in enumerate(initial_data):
category = category_column[i]
folium.Marker(tuple(latlng), tags=[category]).add_to(m)
hm = plugins.TagFilterButton(categories).add_to(m)
out = normalize(m._parent.render())
# We verify that the script imports are present.
script = '<script src="https://cdn.jsdelivr.net/npm/leaflet-tag-filter-button/src/leaflet-tag-filter-button.js"></script>' # noqa
assert script in out
script = '<script src="https://cdn.jsdelivr.net/npm/leaflet-easybutton@2/src/easy-button.js"></script>' # noqa
assert script in out
script = '<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/leaflet-tag-filter-button/src/leaflet-tag-filter-button.css"/>' # noqa
assert script in out
script = '<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/leaflet-easybutton@2/src/easy-button.css"/>' # noqa
assert script in out
script = '<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/css-ripple-effect@1.0.5/dist/ripple.min.css"/>' # noqa
assert script in out
# We verify that the script part is correct.
tmpl = Template(
"""
var {{this.get_name()}} = L.control.tagFilterButton(
{
data: {{this.options.data}},
icon: "{{this.options.icon}}",
clearText: {{this.options.clear_text}},
filterOnEveryClick: {{this.options.filter_on_every_click}},
openPopupOnHover: {{this.options.open_popup_on_hover}}
})
.addTo({{this._parent.get_name()}});
"""
)
assert normalize(tmpl.render(this=hm))
| mit | c2c458e07ca161ed04e8eec3bced41dd | 38.83871 | 143 | 0.631984 | 3.459384 | false | false | false | false |
python-visualization/folium | folium/plugins/polyline_text_path.py | 1 | 2292 | from jinja2 import Template
from folium.elements import JSCSSMixin
from folium.features import MacroElement
from folium.utilities import parse_options
class PolyLineTextPath(JSCSSMixin, MacroElement):
"""
Shows a text along a PolyLine.
Parameters
----------
polyline: folium.features.PolyLine object
The folium.features.PolyLine object to attach the text to.
text: string
The string to be attached to the polyline.
repeat: bool, default False
Specifies if the text should be repeated along the polyline.
center: bool, default False
Centers the text according to the polyline's bounding box
below: bool, default False
Show text below the path
offset: int, default 0
Set an offset to position text relative to the polyline.
orientation: int, default 0
Rotate text to a specified angle.
attributes: dict
Object containing the attributes applied to the text tag.
Check valid attributes here:
https://developer.mozilla.org/en-US/docs/Web/SVG/Element/text#attributes
Example: {'fill': '#007DEF', 'font-weight': 'bold', 'font-size': '24'}
See https://github.com/makinacorpus/Leaflet.TextPath for more information.
"""
_template = Template(
"""
{% macro script(this, kwargs) %}
{{ this.polyline.get_name() }}.setText(
{{ this.text|tojson }},
{{ this.options|tojson }}
);
{% endmacro %}
"""
)
default_js = [
(
"polylinetextpath",
"https://cdn.jsdelivr.net/npm/leaflet-textpath@1.2.3/leaflet.textpath.min.js",
)
]
def __init__(
self,
polyline,
text,
repeat=False,
center=False,
below=False,
offset=0,
orientation=0,
attributes=None,
**kwargs
):
super().__init__()
self._name = "PolyLineTextPath"
self.polyline = polyline
self.text = text
self.options = parse_options(
repeat=repeat,
center=center,
below=below,
offset=offset,
orientation=orientation,
attributes=attributes,
**kwargs
)
| mit | b721a98a3abf949725b90522bfd9ae6a | 27.65 | 90 | 0.585515 | 4.190128 | false | false | false | false |
python-visualization/folium | folium/folium.py | 1 | 16746 | """
Make beautiful, interactive maps with Python and Leaflet.js
"""
import time
import warnings
import webbrowser
from branca.element import Element, Figure, MacroElement
from jinja2 import Environment, PackageLoader, Template
from folium.elements import JSCSSMixin
from folium.map import FitBounds
from folium.raster_layers import TileLayer
from folium.utilities import (
_parse_size,
parse_options,
temp_html_filepath,
validate_location,
)
ENV = Environment(loader=PackageLoader("folium", "templates"))
_default_js = [
("leaflet", "https://cdn.jsdelivr.net/npm/leaflet@1.9.3/dist/leaflet.js"),
("jquery", "https://code.jquery.com/jquery-1.12.4.min.js"),
(
"bootstrap",
"https://cdn.jsdelivr.net/npm/bootstrap@5.2.2/dist/js/bootstrap.bundle.min.js",
),
(
"awesome_markers",
"https://cdnjs.cloudflare.com/ajax/libs/Leaflet.awesome-markers/2.0.2/leaflet.awesome-markers.js",
), # noqa
]
_default_css = [
("leaflet_css", "https://cdn.jsdelivr.net/npm/leaflet@1.9.3/dist/leaflet.css"),
(
"bootstrap_css",
"https://cdn.jsdelivr.net/npm/bootstrap@5.2.2/dist/css/bootstrap.min.css",
),
# glyphicons came from Bootstrap 3 and are used for Awesome Markers
(
"glyphicons_css",
"https://netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.min.css",
),
(
"awesome_markers_font_css",
"https://cdn.jsdelivr.net/npm/@fortawesome/fontawesome-free@6.2.0/css/all.min.css",
), # noqa
(
"awesome_markers_css",
"https://cdnjs.cloudflare.com/ajax/libs/Leaflet.awesome-markers/2.0.2/leaflet.awesome-markers.css",
), # noqa
(
"awesome_rotate_css",
"https://cdn.jsdelivr.net/gh/python-visualization/folium/folium/templates/leaflet.awesome.rotate.min.css",
), # noqa
]
class GlobalSwitches(Element):
_template = Template(
"""
<script>
L_NO_TOUCH = {{ this.no_touch |tojson}};
L_DISABLE_3D = {{ this.disable_3d|tojson }};
</script>
"""
)
def __init__(self, no_touch=False, disable_3d=False):
super().__init__()
self._name = "GlobalSwitches"
self.no_touch = no_touch
self.disable_3d = disable_3d
class Map(JSCSSMixin, MacroElement):
"""Create a Map with Folium and Leaflet.js
Generate a base map of given width and height with either default
tilesets or a custom tileset URL. The following tilesets are built-in
to Folium. Pass any of the following to the "tiles" keyword:
- "OpenStreetMap"
- "Mapbox Bright" (Limited levels of zoom for free tiles)
- "Mapbox Control Room" (Limited levels of zoom for free tiles)
- "Stamen" (Terrain, Toner, and Watercolor)
- "Cloudmade" (Must pass API key)
- "Mapbox" (Must pass API key)
- "CartoDB" (positron and dark_matter)
You can pass a custom tileset to Folium by passing a
:class:`xyzservices.TileProvider` or a Leaflet-style
URL to the tiles parameter: ``http://{s}.yourtiles.com/{z}/{x}/{y}.png``.
You can find a list of free tile providers here:
``http://leaflet-extras.github.io/leaflet-providers/preview/``.
Be sure to check their terms and conditions and to provide attribution
with the `attr` keyword.
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Map (Northing, Easting).
width: pixel int or percentage string (default: '100%')
Width of the map.
height: pixel int or percentage string (default: '100%')
Height of the map.
tiles: str or TileLayer or :class:`xyzservices.TileProvider`, default 'OpenStreetMap'
Map tileset to use. Can choose from a list of built-in tiles,
pass a :class:`xyzservices.TileProvider`,
pass a custom URL, pass a TileLayer object,
or pass `None` to create a map without tiles.
For more advanced tile layer options, use the `TileLayer` class.
min_zoom: int, default 0
Minimum allowed zoom level for the tile layer that is created.
max_zoom: int, default 18
Maximum allowed zoom level for the tile layer that is created.
zoom_start: int, default 10
Initial zoom level for the map.
attr: string, default None
Map tile attribution; only required if passing custom tile URL.
crs : str, default 'EPSG3857'
Defines coordinate reference systems for projecting geographical points
into pixel (screen) coordinates and back.
You can use Leaflet's values :
* EPSG3857 : The most common CRS for online maps, used by almost all
free and commercial tile providers. Uses Spherical Mercator projection.
Set in by default in Map's crs option.
* EPSG4326 : A common CRS among GIS enthusiasts.
Uses simple Equirectangular projection.
* EPSG3395 : Rarely used by some commercial tile providers.
Uses Elliptical Mercator projection.
* Simple : A simple CRS that maps longitude and latitude into
x and y directly. May be used for maps of flat surfaces
(e.g. game maps). Note that the y axis should still be inverted
(going from bottom to top).
control_scale : bool, default False
Whether to add a control scale on the map.
prefer_canvas : bool, default False
Forces Leaflet to use the Canvas back-end (if available) for
vector layers instead of SVG. This can increase performance
considerably in some cases (e.g. many thousands of circle
markers on the map).
no_touch : bool, default False
Forces Leaflet to not use touch events even if it detects them.
disable_3d : bool, default False
Forces Leaflet to not use hardware-accelerated CSS 3D
transforms for positioning (which may cause glitches in some
rare environments) even if they're supported.
zoom_control : bool, default True
Display zoom controls on the map.
**kwargs
Additional keyword arguments are passed to Leaflets Map class:
https://leafletjs.com/reference.html#map
Returns
-------
Folium Map Object
Examples
--------
>>> m = folium.Map(location=[45.523, -122.675], width=750, height=500)
>>> m = folium.Map(location=[45.523, -122.675], tiles="cartodb positron")
>>> m = folium.Map(
... location=[45.523, -122.675],
... zoom_start=2,
... tiles="https://api.mapbox.com/v4/mapbox.streets/{z}/{x}/{y}.png?access_token=mytoken",
... attr="Mapbox attribution",
... )
""" # noqa
_template = Template(
"""
{% macro header(this, kwargs) %}
<meta name="viewport" content="width=device-width,
initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<style>
#{{ this.get_name() }} {
position: {{this.position}};
width: {{this.width[0]}}{{this.width[1]}};
height: {{this.height[0]}}{{this.height[1]}};
left: {{this.left[0]}}{{this.left[1]}};
top: {{this.top[0]}}{{this.top[1]}};
}
.leaflet-container { font-size: 1rem; }
</style>
{% endmacro %}
{% macro html(this, kwargs) %}
<div class="folium-map" id={{ this.get_name()|tojson }} ></div>
{% endmacro %}
{% macro script(this, kwargs) %}
var {{ this.get_name() }} = L.map(
{{ this.get_name()|tojson }},
{
center: {{ this.location|tojson }},
crs: L.CRS.{{ this.crs }},
{%- for key, value in this.options.items() %}
{{ key }}: {{ value|tojson }},
{%- endfor %}
}
);
{%- if this.control_scale %}
L.control.scale().addTo({{ this.get_name() }});
{%- endif %}
{% if this.objects_to_stay_in_front %}
function objects_in_front() {
{%- for obj in this.objects_to_stay_in_front %}
{{ obj.get_name() }}.bringToFront();
{%- endfor %}
};
{{ this.get_name() }}.on("overlayadd", objects_in_front);
$(document).ready(objects_in_front);
{%- endif %}
{% endmacro %}
"""
)
# use the module variables for backwards compatibility
default_js = _default_js
default_css = _default_css
def __init__(
self,
location=None,
width="100%",
height="100%",
left="0%",
top="0%",
position="relative",
tiles="OpenStreetMap",
attr=None,
min_zoom=0,
max_zoom=18,
zoom_start=10,
min_lat=-90,
max_lat=90,
min_lon=-180,
max_lon=180,
max_bounds=False,
crs="EPSG3857",
control_scale=False,
prefer_canvas=False,
no_touch=False,
disable_3d=False,
png_enabled=False,
zoom_control=True,
**kwargs,
):
super().__init__()
self._name = "Map"
self._env = ENV
# Undocumented for now b/c this will be subject to a re-factor soon.
self._png_image = None
self.png_enabled = png_enabled
if location is None:
# If location is not passed we center and zoom out.
self.location = [0, 0]
zoom_start = 1
else:
self.location = validate_location(location)
Figure().add_child(self)
# Map Size Parameters.
self.width = _parse_size(width)
self.height = _parse_size(height)
self.left = _parse_size(left)
self.top = _parse_size(top)
self.position = position
max_bounds_array = (
[[min_lat, min_lon], [max_lat, max_lon]] if max_bounds else None
)
self.crs = crs
self.control_scale = control_scale
self.options = parse_options(
max_bounds=max_bounds_array,
zoom=zoom_start,
zoom_control=zoom_control,
prefer_canvas=prefer_canvas,
**kwargs,
)
self.global_switches = GlobalSwitches(no_touch, disable_3d)
self.objects_to_stay_in_front = []
if isinstance(tiles, TileLayer):
self.add_child(tiles)
elif tiles:
tile_layer = TileLayer(
tiles=tiles, attr=attr, min_zoom=min_zoom, max_zoom=max_zoom
)
self.add_child(tile_layer, name=tile_layer.tile_name)
def _repr_html_(self, **kwargs):
"""Displays the HTML Map in a Jupyter notebook."""
if self._parent is None:
self.add_to(Figure())
out = self._parent._repr_html_(**kwargs)
self._parent = None
else:
out = self._parent._repr_html_(**kwargs)
return out
def _to_png(self, delay=3, driver=None):
"""Export the HTML to byte representation of a PNG image.
Uses selenium to render the HTML and record a PNG. You may need to
adjust the `delay` time keyword argument if maps render without data or tiles.
Uses a headless Firefox webdriver by default, though you can provide your own.
Examples
--------
>>> m._to_png()
>>> m._to_png(time=10) # Wait 10 seconds between render and snapshot.
"""
if self._png_image is None:
if driver is None:
from selenium import webdriver
options = webdriver.firefox.options.Options()
options.add_argument("--headless")
driver = webdriver.Firefox(options=options)
html = self.get_root().render()
with temp_html_filepath(html) as fname:
# We need the tempfile to avoid JS security issues.
driver.get(f"file:///{fname}")
driver.fullscreen_window()
time.sleep(delay)
div = driver.find_element("class name", "folium-map")
png = div.screenshot_as_png
driver.quit()
self._png_image = png
return self._png_image
def _repr_png_(self):
"""Displays the PNG Map in a Jupyter notebook."""
# The notebook calls all _repr_*_ by default.
# We don't want that here b/c this one is quite slow.
if not self.png_enabled:
return None
return self._to_png()
def render(self, **kwargs):
"""Renders the HTML representation of the element."""
figure = self.get_root()
assert isinstance(
figure, Figure
), "You cannot render this Element if it is not in a Figure."
# Set global switches
figure.header.add_child(self.global_switches, name="global_switches")
figure.header.add_child(
Element(
"<style>html, body {"
"width: 100%;"
"height: 100%;"
"margin: 0;"
"padding: 0;"
"}"
"</style>"
),
name="css_style",
)
figure.header.add_child(
Element(
"<style>#map {"
"position:absolute;"
"top:0;"
"bottom:0;"
"right:0;"
"left:0;"
"}"
"</style>"
),
name="map_style",
)
super().render(**kwargs)
def show_in_browser(self):
"""Display the Map in the default web browser."""
with temp_html_filepath(self.get_root().render()) as fname:
webbrowser.open(fname)
print(
"Your map should have been opened in your browser automatically."
"\nPress ctrl+c to return."
)
# Block until stopped by user, afterwards remove the temporary file
try:
while True:
time.sleep(100)
except KeyboardInterrupt:
pass
def fit_bounds(
self,
bounds,
padding_top_left=None,
padding_bottom_right=None,
padding=None,
max_zoom=None,
):
"""Fit the map to contain a bounding box with the
maximum zoom level possible.
Parameters
----------
bounds: list of (latitude, longitude) points
Bounding box specified as two points [southwest, northeast]
padding_top_left: (x, y) point, default None
Padding in the top left corner. Useful if some elements in
the corner, such as controls, might obscure objects you're zooming
to.
padding_bottom_right: (x, y) point, default None
Padding in the bottom right corner.
padding: (x, y) point, default None
Equivalent to setting both top left and bottom right padding to
the same value.
max_zoom: int, default None
Maximum zoom to be used.
Examples
--------
>>> m.fit_bounds([[52.193636, -2.221575], [52.636878, -1.139759]])
"""
self.add_child(
FitBounds(
bounds,
padding_top_left=padding_top_left,
padding_bottom_right=padding_bottom_right,
padding=padding,
max_zoom=max_zoom,
)
)
def choropleth(self, *args, **kwargs):
"""Call the Choropleth class with the same arguments.
This method may be deleted after a year from now (Nov 2018).
"""
warnings.warn(
"The choropleth method has been deprecated. Instead use the new "
"Choropleth class, which has the same arguments. See the example "
"notebook 'GeoJSON_and_choropleth' for how to do this.",
FutureWarning,
)
from folium.features import Choropleth
self.add_child(Choropleth(*args, **kwargs))
def keep_in_front(self, *args):
"""Pass one or multiple layers that must stay in front.
The ordering matters, the last one is put on top.
Parameters
----------
*args :
Variable length argument list. Any folium object that counts as an
overlay. For example FeatureGroup or TileLayer.
Does not work with markers, for those use z_index_offset.
"""
for obj in args:
self.objects_to_stay_in_front.append(obj)
| mit | a560a47c2897dc5c64be0e4cf8546028 | 33.315574 | 114 | 0.562702 | 3.923618 | false | false | false | false |
python-visualization/folium | tests/plugins/test_time_slider_choropleth.py | 1 | 2895 | """
tests TimeSliderChoropleth
--------------------------
"""
import json
import numpy as np
import pandas as pd
from branca.colormap import linear
import folium
from folium.plugins import TimeSliderChoropleth
from folium.utilities import normalize
def test_timedynamic_geo_json():
"""
tests folium.plugins.TimeSliderChoropleth
"""
import geopandas as gpd
assert "naturalearth_lowres" in gpd.datasets.available
datapath = gpd.datasets.get_path("naturalearth_lowres")
gdf = gpd.read_file(datapath)
"""
Timestamps, start date is carefully chosen to be earlier than 2001-09-09
(9 digit timestamp), end date is later (10 digits). This is to ensure an
integer sort is used (and not a string sort were '2' > '10').
datetime.strftime('%s') on Windows just generates date and not timestamp so avoid.
"""
n_periods = 3
dt_range = pd.Series(pd.date_range("2001-08-1", periods=n_periods, freq="M"))
dt_index = [f"{dt.timestamp():.0f}" for dt in dt_range]
styledata = {}
for country in gdf.index:
pdf = pd.DataFrame(
{
"color": np.random.normal(size=n_periods),
"opacity": np.random.normal(size=n_periods),
},
index=dt_index,
)
styledata[country] = pdf.cumsum()
max_color, min_color = 0, 0
for country, data in styledata.items():
max_color = max(max_color, data["color"].max())
min_color = min(max_color, data["color"].min())
cmap = linear.PuRd_09.scale(min_color, max_color)
# Define function to normalize column into range [0,1]
def norm(col):
return (col - col.min()) / (col.max() - col.min())
for country, data in styledata.items():
data["color"] = data["color"].apply(cmap)
data["opacity"] = norm(data["opacity"])
styledict = {
str(country): data.to_dict(orient="index")
for country, data in styledata.items()
}
m = folium.Map((0, 0), tiles="Stamen Watercolor", zoom_start=2)
time_slider_choropleth = TimeSliderChoropleth(gdf.to_json(), styledict)
time_slider_choropleth.add_to(m)
rendered = time_slider_choropleth._template.module.script(time_slider_choropleth)
m._repr_html_()
out = normalize(m._parent.render())
assert '<script src="https://d3js.org/d3.v4.min.js"></script>' in out
# We verify that data has been inserted correctly
expected_timestamps = sorted(dt_index, key=int) # numeric sort
expected_timestamps = f"var timestamps = {expected_timestamps};"
expected_timestamps = expected_timestamps.split(";")[0].strip().replace("'", '"')
rendered_timestamps = rendered.split(";")[0].strip()
assert expected_timestamps == rendered_timestamps
expected_styledict = normalize(json.dumps(styledict, sort_keys=True))
assert expected_styledict in normalize(rendered)
| mit | a830755df966a478a69fbf031fe9765c | 31.166667 | 86 | 0.643523 | 3.509091 | false | false | false | false |
python-visualization/folium | folium/plugins/feature_group_sub_group.py | 1 | 2789 | from jinja2 import Template
from folium.elements import JSCSSMixin
from folium.map import Layer
class FeatureGroupSubGroup(JSCSSMixin, Layer):
"""
Creates a Feature Group that adds its child layers into a parent group when
added to a map (e.g. through LayerControl). Useful to create nested groups,
or cluster markers from multiple overlays. From [0].
[0] https://github.com/ghybs/Leaflet.FeatureGroup.SubGroup
Parameters
----------
group : Layer
The MarkerCluster or FeatureGroup containing this subgroup.
name : string, default None
The name of the Layer, as it will appear in LayerControls
overlay : bool, default True
Adds the layer as an optional overlay (True) or the base layer (False).
control : bool, default True
Whether the Layer will be included in LayerControls.
show: bool, default True
Whether the layer will be shown on opening (only for overlays).
Examples
-------
Nested groups
=============
>>> fg = folium.FeatureGroup() # Main group
>>> g1 = folium.plugins.FeatureGroupSubGroup(fg, "g1") # First subgroup of fg
>>> g2 = folium.plugins.FeatureGroupSubGroup(fg, "g2") # Second subgroup of fg
>>> m.add_child(fg)
>>> m.add_child(g1)
>>> m.add_child(g2)
>>> g1.add_child(folium.Marker([0, 0]))
>>> g2.add_child(folium.Marker([0, 1]))
>>> folium.LayerControl().add_to(m)
Multiple overlays part of the same cluster group
=====================================================
>>> mcg = folium.plugins.MarkerCluster(
... control=False
... ) # Marker Cluster, hidden in controls
>>> g1 = folium.plugins.FeatureGroupSubGroup(mcg, "g1") # First group, in mcg
>>> g2 = folium.plugins.FeatureGroupSubGroup(mcg, "g2") # Second group, in mcg
>>> m.add_child(mcg)
>>> m.add_child(g1)
>>> m.add_child(g2)
>>> g1.add_child(folium.Marker([0, 0]))
>>> g2.add_child(folium.Marker([0, 1]))
>>> folium.LayerControl().add_to(m)
"""
_template = Template(
"""
{% macro script(this, kwargs) %}
var {{ this.get_name() }} = L.featureGroup.subGroup(
{{ this._group.get_name() }}
);
{{ this.get_name() }}.addTo({{ this._parent.get_name() }});
{% endmacro %}
"""
)
default_js = [
(
"featuregroupsubgroupjs",
"https://unpkg.com/leaflet.featuregroup.subgroup@1.0.2/dist/leaflet.featuregroup.subgroup.js",
),
]
def __init__(self, group, name=None, overlay=True, control=True, show=True):
super().__init__(name=name, overlay=overlay, control=control, show=show)
self._group = group
self._name = "FeatureGroupSubGroup"
| mit | 3db420c84b5cfbd27f2624cb76fb3989 | 33.8625 | 106 | 0.595195 | 3.548346 | false | false | false | false |
python-visualization/folium | folium/plugins/minimap.py | 1 | 4858 | from branca.element import MacroElement
from jinja2 import Template
from folium.elements import JSCSSMixin
from folium.raster_layers import TileLayer
from folium.utilities import parse_options
class MiniMap(JSCSSMixin, MacroElement):
"""Add a minimap (locator) to an existing map.
Uses the Leaflet plugin by Norkart under BSD 2-Clause "Simplified" License.
https://github.com/Norkart/Leaflet-MiniMap
Parameters
----------
tile_layer : folium TileLayer object or str, default None
Provide a folium TileLayer object or the wanted tiles as string.
If not provided it will use the default of 'TileLayer', currently
OpenStreetMap.
position : str, default 'bottomright'
The standard Control position parameter for the widget.
width : int, default 150
The width of the minimap in pixels.
height : int, default 150
The height of the minimap in pixels.
collapsed_width : int, default 25
The width of the toggle marker and the minimap when collapsed in pixels.
collapsed_height : int, default 25
The height of the toggle marker and the minimap when collapsed
zoom_level_offset : int, default -5
The offset applied to the zoom in the minimap compared to the zoom of
the main map. Can be positive or negative.
zoom_level_fixed : int, default None
Overrides the offset to apply a fixed zoom level to the minimap
regardless of the main map zoom.
Set it to any valid zoom level, if unset zoom_level_offset is used
instead.
center_fixed : bool, default False
Applies a fixed position to the minimap regardless of the main map's
view / position. Prevents panning the minimap, but does allow zooming
(both in the minimap and the main map).
If the minimap is zoomed, it will always zoom around the centerFixed
point. You can pass in a LatLng-equivalent object.
zoom_animation : bool, default False
Sets whether the minimap should have an animated zoom.
(Will cause it to lag a bit after the movement of the main map.)
toggle_display : bool, default False
Sets whether the minimap should have a button to minimise it.
auto_toggle_display : bool, default False
Sets whether the minimap should hide automatically
if the parent map bounds does not fit within the minimap bounds.
Especially useful when 'zoomLevelFixed' is set.
minimized : bool, default False
Sets whether the minimap should start in a minimized position.
Examples
--------
>>> MiniMap(tile_layer="Stamen WaterColor", position="bottomleft")
"""
_template = Template(
"""
{% macro script(this, kwargs) %}
var {{ this.tile_layer.get_name() }} = L.tileLayer(
{{ this.tile_layer.tiles|tojson }},
{{ this.tile_layer.options|tojson }}
);
var {{ this.get_name() }} = new L.Control.MiniMap(
{{ this.tile_layer.get_name() }},
{{ this.options|tojson }}
);
{{ this._parent.get_name() }}.addControl({{ this.get_name() }});
{% endmacro %}
"""
) # noqa
default_js = [
(
"Control_MiniMap_js",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet-minimap/3.6.1/Control.MiniMap.js",
)
]
default_css = [
(
"Control_MiniMap_css",
"https://cdnjs.cloudflare.com/ajax/libs/leaflet-minimap/3.6.1/Control.MiniMap.css",
),
]
def __init__(
self,
tile_layer=None,
position="bottomright",
width=150,
height=150,
collapsed_width=25,
collapsed_height=25,
zoom_level_offset=-5,
zoom_level_fixed=None,
center_fixed=False,
zoom_animation=False,
toggle_display=False,
auto_toggle_display=False,
minimized=False,
**kwargs
):
super().__init__()
self._name = "MiniMap"
if tile_layer is None:
self.tile_layer = TileLayer()
elif isinstance(tile_layer, TileLayer):
self.tile_layer = tile_layer
else:
self.tile_layer = TileLayer(tile_layer)
self.options = parse_options(
position=position,
width=width,
height=height,
collapsed_width=collapsed_width,
collapsed_height=collapsed_height,
zoom_level_offset=zoom_level_offset,
zoom_level_fixed=zoom_level_fixed,
center_fixed=center_fixed,
zoom_animation=zoom_animation,
toggle_display=toggle_display,
auto_toggle_display=auto_toggle_display,
minimized=minimized,
**kwargs
)
| mit | 3fae8bac2ae869f5645daaa32b102ae0 | 35.526316 | 95 | 0.615068 | 4.096121 | false | false | false | false |
python-visualization/folium | setup.py | 1 | 2623 | import os
import sys
from setuptools import setup
rootpath = os.path.abspath(os.path.dirname(__file__))
if sys.version_info < (3, 5):
error = """
folium 0.9+ supports Python 3.5 and above.
When using Python 2.7, please install folium 0.8.*.
See folium `README.rst` file for more information:
https://github.com/python-visualization/folium/blob/main/README.rst
Python {py} detected.
Try upgrading pip and retry.
""".format(
py=".".join([str(v) for v in sys.version_info[:3]])
)
print(error, file=sys.stderr)
sys.exit(1)
def read(*parts):
return open(os.path.join(rootpath, *parts)).read()
def walk_subpkg(name):
data_files = []
package_dir = "folium"
for parent, dirs, files in os.walk(os.path.join(package_dir, name)):
# Remove package_dir from the path.
sub_dir = os.sep.join(parent.split(os.sep)[1:])
for f in files:
data_files.append(os.path.join(sub_dir, f))
return data_files
package_data = {
"": [
"*.js",
"plugins/*.js",
"plugins/*.html",
"plugins/*.css",
"plugins/*.tpl",
"templates/*.html",
"templates/*.js",
"templates/*.txt",
]
+ walk_subpkg("templates/tiles")
}
packages = ["folium", "folium.plugins"]
# Dependencies.
with open("requirements.txt") as f:
tests_require = f.readlines()
install_requires = [t.strip() for t in tests_require]
setup(
name="folium",
description="Make beautiful maps with Leaflet.js & Python",
license="MIT",
long_description="{}".format(read("README.rst")),
long_description_content_type="text/x-rst",
author="Rob Story",
author_email="wrobstory@gmail.com",
url="https://github.com/python-visualization/folium",
keywords="data visualization",
classifiers=[
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Scientific/Engineering :: GIS",
"Topic :: Scientific/Engineering :: Visualization",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
],
platforms="any",
packages=packages,
package_data=package_data,
python_requires=">=3.5",
extras_require={"testing": ["pytest"]},
install_requires=install_requires,
zip_safe=False,
use_scm_version={
"write_to": "folium/_version.py",
"write_to_template": '__version__ = "{version}"',
"tag_regex": r"^(?P<prefix>v)?(?P<version>[^\+]+)(?P<suffix>.*)?$",
},
)
| mit | ddda6aeefc4cd01b0fb33a957f021ae6 | 26.904255 | 75 | 0.600076 | 3.419817 | false | false | false | false |
pypa/setuptools | setuptools/_vendor/importlib_metadata/_text.py | 11 | 2166 | import re
from ._functools import method_cache
# from jaraco.text 3.5
class FoldedCase(str):
"""
A case insensitive string class; behaves just like str
except compares equal when the only variation is case.
>>> s = FoldedCase('hello world')
>>> s == 'Hello World'
True
>>> 'Hello World' == s
True
>>> s != 'Hello World'
False
>>> s.index('O')
4
>>> s.split('O')
['hell', ' w', 'rld']
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
['alpha', 'Beta', 'GAMMA']
Sequence membership is straightforward.
>>> "Hello World" in [s]
True
>>> s in ["Hello World"]
True
You may test for set inclusion, but candidate and elements
must both be folded.
>>> FoldedCase("Hello World") in {s}
True
>>> s in {FoldedCase("Hello World")}
True
String inclusion works as long as the FoldedCase object
is on the right.
>>> "hello" in FoldedCase("Hello World")
True
But not if the FoldedCase object is on the left:
>>> FoldedCase('hello') in 'Hello World'
False
In that case, use in_:
>>> FoldedCase('hello').in_('Hello World')
True
>>> FoldedCase('hello') > FoldedCase('Hello')
False
"""
def __lt__(self, other):
return self.lower() < other.lower()
def __gt__(self, other):
return self.lower() > other.lower()
def __eq__(self, other):
return self.lower() == other.lower()
def __ne__(self, other):
return self.lower() != other.lower()
def __hash__(self):
return hash(self.lower())
def __contains__(self, other):
return super().lower().__contains__(other.lower())
def in_(self, other):
"Does self appear in other?"
return self in FoldedCase(other)
# cache lower since it's likely to be called frequently.
@method_cache
def lower(self):
return super().lower()
def index(self, sub):
return self.lower().index(sub.lower())
def split(self, splitter=' ', maxsplit=0):
pattern = re.compile(re.escape(splitter), re.I)
return pattern.split(self, maxsplit)
| mit | 089e44128f88bd18127e700897934efd | 20.878788 | 62 | 0.576639 | 3.671186 | false | false | false | false |
beerfactory/hbmqtt | samples/client_subscribe_acl.py | 1 | 1653 | import logging
import asyncio
from hbmqtt.client import MQTTClient, ClientException
from hbmqtt.mqtt.constants import QOS_1
#
# This sample shows how to subscbribe a topic and receive data from incoming messages
# It subscribes to '$SYS/broker/uptime' topic and displays the first ten values returned
# by the broker.
#
logger = logging.getLogger(__name__)
@asyncio.coroutine
def uptime_coro():
C = MQTTClient()
yield from C.connect('mqtt://test:test@0.0.0.0:1883')
# yield from C.connect('mqtt://0.0.0.0:1883')
# Subscribe to '$SYS/broker/uptime' with QOS=1
yield from C.subscribe([
('data/memes', QOS_1), # Topic allowed
('data/classified', QOS_1), # Topic forbidden
('repositories/hbmqtt/master', QOS_1), # Topic allowed
('repositories/hbmqtt/devel', QOS_1), # Topic forbidden
('calendar/hbmqtt/releases', QOS_1), # Topic allowed
])
logger.info("Subscribed")
try:
for i in range(1, 100):
message = yield from C.deliver_message()
packet = message.publish_packet
print("%d: %s => %s" % (i, packet.variable_header.topic_name, str(packet.payload.data)))
yield from C.unsubscribe(['$SYS/broker/uptime', '$SYS/broker/load/#'])
logger.info("UnSubscribed")
yield from C.disconnect()
except ClientException as ce:
logger.error("Client exception: %s" % ce)
if __name__ == '__main__':
formatter = "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
logging.basicConfig(level=logging.INFO, format=formatter)
asyncio.get_event_loop().run_until_complete(uptime_coro())
| mit | ae18a01e4c82445bbadbf921b4b83d59 | 34.934783 | 100 | 0.647913 | 3.42236 | false | false | false | false |
pypa/setuptools | setuptools/_vendor/importlib_resources/simple.py | 12 | 2836 | """
Interface adapters for low-level readers.
"""
import abc
import io
import itertools
from typing import BinaryIO, List
from .abc import Traversable, TraversableResources
class SimpleReader(abc.ABC):
"""
The minimum, low-level interface required from a resource
provider.
"""
@abc.abstractproperty
def package(self):
# type: () -> str
"""
The name of the package for which this reader loads resources.
"""
@abc.abstractmethod
def children(self):
# type: () -> List['SimpleReader']
"""
Obtain an iterable of SimpleReader for available
child containers (e.g. directories).
"""
@abc.abstractmethod
def resources(self):
# type: () -> List[str]
"""
Obtain available named resources for this virtual package.
"""
@abc.abstractmethod
def open_binary(self, resource):
# type: (str) -> BinaryIO
"""
Obtain a File-like for a named resource.
"""
@property
def name(self):
return self.package.split('.')[-1]
class ResourceHandle(Traversable):
"""
Handle to a named resource in a ResourceReader.
"""
def __init__(self, parent, name):
# type: (ResourceContainer, str) -> None
self.parent = parent
self.name = name # type: ignore
def is_file(self):
return True
def is_dir(self):
return False
def open(self, mode='r', *args, **kwargs):
stream = self.parent.reader.open_binary(self.name)
if 'b' not in mode:
stream = io.TextIOWrapper(*args, **kwargs)
return stream
def joinpath(self, name):
raise RuntimeError("Cannot traverse into a resource")
class ResourceContainer(Traversable):
"""
Traversable container for a package's resources via its reader.
"""
def __init__(self, reader):
# type: (SimpleReader) -> None
self.reader = reader
def is_dir(self):
return True
def is_file(self):
return False
def iterdir(self):
files = (ResourceHandle(self, name) for name in self.reader.resources)
dirs = map(ResourceContainer, self.reader.children())
return itertools.chain(files, dirs)
def open(self, *args, **kwargs):
raise IsADirectoryError()
def joinpath(self, name):
return next(
traversable for traversable in self.iterdir() if traversable.name == name
)
class TraversableReader(TraversableResources, SimpleReader):
"""
A TraversableResources based on SimpleReader. Resource providers
may derive from this class to provide the TraversableResources
interface by supplying the SimpleReader interface.
"""
def files(self):
return ResourceContainer(self)
| mit | f8488bcb5fe470a64051115f6d2dace6 | 23.448276 | 85 | 0.615656 | 4.245509 | false | false | false | false |
pypa/setuptools | setuptools/extension.py | 1 | 5591 | import re
import functools
import distutils.core
import distutils.errors
import distutils.extension
from .monkey import get_unpatched
def _have_cython():
"""
Return True if Cython can be imported.
"""
cython_impl = 'Cython.Distutils.build_ext'
try:
# from (cython_impl) import build_ext
__import__(cython_impl, fromlist=['build_ext']).build_ext
return True
except Exception:
pass
return False
# for compatibility
have_pyrex = _have_cython
_Extension = get_unpatched(distutils.core.Extension)
class Extension(_Extension):
"""
Describes a single extension module.
This means that all source files will be compiled into a single binary file
``<module path>.<suffix>`` (with ``<module path>`` derived from ``name`` and
``<suffix>`` defined by one of the values in
``importlib.machinery.EXTENSION_SUFFIXES``).
In the case ``.pyx`` files are passed as ``sources and`` ``Cython`` is **not**
installed in the build environment, ``setuptools`` may also try to look for the
equivalent ``.cpp`` or ``.c`` files.
:arg str name:
the full name of the extension, including any packages -- ie.
*not* a filename or pathname, but Python dotted name
:arg list[str] sources:
list of source filenames, relative to the distribution root
(where the setup script lives), in Unix form (slash-separated)
for portability. Source files may be C, C++, SWIG (.i),
platform-specific resource files, or whatever else is recognized
by the "build_ext" command as source for a Python extension.
:keyword list[str] include_dirs:
list of directories to search for C/C++ header files (in Unix
form for portability)
:keyword list[tuple[str, str|None]] define_macros:
list of macros to define; each macro is defined using a 2-tuple:
the first item corresponding to the name of the macro and the second
item either a string with its value or None to
define it without a particular value (equivalent of "#define
FOO" in source or -DFOO on Unix C compiler command line)
:keyword list[str] undef_macros:
list of macros to undefine explicitly
:keyword list[str] library_dirs:
list of directories to search for C/C++ libraries at link time
:keyword list[str] libraries:
list of library names (not filenames or paths) to link against
:keyword list[str] runtime_library_dirs:
list of directories to search for C/C++ libraries at run time
(for shared extensions, this is when the extension is loaded).
Setting this will cause an exception during build on Windows
platforms.
:keyword list[str] extra_objects:
list of extra files to link with (eg. object files not implied
by 'sources', static library that must be explicitly specified,
binary resource files, etc.)
:keyword list[str] extra_compile_args:
any extra platform- and compiler-specific information to use
when compiling the source files in 'sources'. For platforms and
compilers where "command line" makes sense, this is typically a
list of command-line arguments, but for other platforms it could
be anything.
:keyword list[str] extra_link_args:
any extra platform- and compiler-specific information to use
when linking object files together to create the extension (or
to create a new static Python interpreter). Similar
interpretation as for 'extra_compile_args'.
:keyword list[str] export_symbols:
list of symbols to be exported from a shared extension. Not
used on all platforms, and not generally necessary for Python
extensions, which typically export exactly one symbol: "init" +
extension_name.
:keyword list[str] swig_opts:
any extra options to pass to SWIG if a source file has the .i
extension.
:keyword list[str] depends:
list of files that the extension depends on
:keyword str language:
extension language (i.e. "c", "c++", "objc"). Will be detected
from the source extensions if not provided.
:keyword bool optional:
specifies that a build failure in the extension should not abort the
build process, but simply not install the failing extension.
:keyword bool py_limited_api:
opt-in flag for the usage of :doc:`Python's limited API <python:c-api/stable>`.
:raises setuptools.errors.PlatformError: if 'runtime_library_dirs' is
specified on Windows. (since v63)
"""
def __init__(self, name, sources, *args, **kw):
# The *args is needed for compatibility as calls may use positional
# arguments. py_limited_api may be set only via keyword.
self.py_limited_api = kw.pop("py_limited_api", False)
super().__init__(name, sources, *args, **kw)
def _convert_pyx_sources_to_lang(self):
"""
Replace sources with .pyx extensions to sources with the target
language extension. This mechanism allows language authors to supply
pre-converted sources but to prefer the .pyx sources.
"""
if _have_cython():
# the build has Cython, so allow it to compile the .pyx files
return
lang = self.language or ''
target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
sub = functools.partial(re.sub, '.pyx$', target_ext)
self.sources = list(map(sub, self.sources))
class Library(Extension):
"""Just like a regular Extension, but built as a library instead"""
| mit | a6a2ea854bba08190a058f361acb6fb3 | 36.777027 | 85 | 0.676623 | 4.327399 | false | false | false | false |
pypa/setuptools | setuptools/command/setopt.py | 7 | 5086 | from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsOptionError
import distutils
import os
import configparser
from setuptools import Command
__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
def config_file(kind="local"):
"""Get the filename of the distutils, local, global, or per-user config
`kind` must be one of "local", "global", or "user"
"""
if kind == 'local':
return 'setup.cfg'
if kind == 'global':
return os.path.join(
os.path.dirname(distutils.__file__), 'distutils.cfg'
)
if kind == 'user':
dot = os.name == 'posix' and '.' or ''
return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
raise ValueError(
"config_file() type must be 'local', 'global', or 'user'", kind
)
def edit_config(filename, settings, dry_run=False):
"""Edit a configuration file to include `settings`
`settings` is a dictionary of dictionaries or ``None`` values, keyed by
command/section name. A ``None`` value means to delete the entire section,
while a dictionary lists settings to be changed or deleted in that section.
A setting of ``None`` means to delete that setting.
"""
log.debug("Reading configuration from %s", filename)
opts = configparser.RawConfigParser()
opts.optionxform = lambda x: x
opts.read([filename])
for section, options in settings.items():
if options is None:
log.info("Deleting section [%s] from %s", section, filename)
opts.remove_section(section)
else:
if not opts.has_section(section):
log.debug("Adding new section [%s] to %s", section, filename)
opts.add_section(section)
for option, value in options.items():
if value is None:
log.debug(
"Deleting %s.%s from %s",
section, option, filename
)
opts.remove_option(section, option)
if not opts.options(section):
log.info("Deleting empty [%s] section from %s",
section, filename)
opts.remove_section(section)
else:
log.debug(
"Setting %s.%s to %r in %s",
section, option, value, filename
)
opts.set(section, option, value)
log.info("Writing %s", filename)
if not dry_run:
with open(filename, 'w') as f:
opts.write(f)
class option_base(Command):
"""Abstract base class for commands that mess with config files"""
user_options = [
('global-config', 'g',
"save options to the site-wide distutils.cfg file"),
('user-config', 'u',
"save options to the current user's pydistutils.cfg file"),
('filename=', 'f',
"configuration file to use (default=setup.cfg)"),
]
boolean_options = [
'global-config', 'user-config',
]
def initialize_options(self):
self.global_config = None
self.user_config = None
self.filename = None
def finalize_options(self):
filenames = []
if self.global_config:
filenames.append(config_file('global'))
if self.user_config:
filenames.append(config_file('user'))
if self.filename is not None:
filenames.append(self.filename)
if not filenames:
filenames.append(config_file('local'))
if len(filenames) > 1:
raise DistutilsOptionError(
"Must specify only one configuration file option",
filenames
)
self.filename, = filenames
class setopt(option_base):
"""Save command-line options to a file"""
description = "set an option in setup.cfg or another config file"
user_options = [
('command=', 'c', 'command to set an option for'),
('option=', 'o', 'option to set'),
('set-value=', 's', 'value of the option'),
('remove', 'r', 'remove (unset) the value'),
] + option_base.user_options
boolean_options = option_base.boolean_options + ['remove']
def initialize_options(self):
option_base.initialize_options(self)
self.command = None
self.option = None
self.set_value = None
self.remove = None
def finalize_options(self):
option_base.finalize_options(self)
if self.command is None or self.option is None:
raise DistutilsOptionError("Must specify --command *and* --option")
if self.set_value is None and not self.remove:
raise DistutilsOptionError("Must specify --set-value or --remove")
def run(self):
edit_config(
self.filename, {
self.command: {self.option.replace('-', '_'): self.set_value}
},
self.dry_run
)
| mit | c05d89f0c353f6fef3ae3abf93987b55 | 33.134228 | 79 | 0.567833 | 4.354452 | false | true | false | false |
pypa/setuptools | pkg_resources/_vendor/jaraco/context.py | 8 | 5420 | import os
import subprocess
import contextlib
import functools
import tempfile
import shutil
import operator
@contextlib.contextmanager
def pushd(dir):
orig = os.getcwd()
os.chdir(dir)
try:
yield dir
finally:
os.chdir(orig)
@contextlib.contextmanager
def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
"""
Get a tarball, extract it, change to that directory, yield, then
clean up.
`runner` is the function to invoke commands.
`pushd` is a context manager for changing the directory.
"""
if target_dir is None:
target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
if runner is None:
runner = functools.partial(subprocess.check_call, shell=True)
# In the tar command, use --strip-components=1 to strip the first path and
# then
# use -C to cause the files to be extracted to {target_dir}. This ensures
# that we always know where the files were extracted.
runner('mkdir {target_dir}'.format(**vars()))
try:
getter = 'wget {url} -O -'
extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
cmd = ' | '.join((getter, extract))
runner(cmd.format(compression=infer_compression(url), **vars()))
with pushd(target_dir):
yield target_dir
finally:
runner('rm -Rf {target_dir}'.format(**vars()))
def infer_compression(url):
"""
Given a URL or filename, infer the compression code for tar.
"""
# cheat and just assume it's the last two characters
compression_indicator = url[-2:]
mapping = dict(gz='z', bz='j', xz='J')
# Assume 'z' (gzip) if no match
return mapping.get(compression_indicator, 'z')
@contextlib.contextmanager
def temp_dir(remover=shutil.rmtree):
"""
Create a temporary directory context. Pass a custom remover
to override the removal behavior.
"""
temp_dir = tempfile.mkdtemp()
try:
yield temp_dir
finally:
remover(temp_dir)
@contextlib.contextmanager
def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
"""
Check out the repo indicated by url.
If dest_ctx is supplied, it should be a context manager
to yield the target directory for the check out.
"""
exe = 'git' if 'git' in url else 'hg'
with dest_ctx() as repo_dir:
cmd = [exe, 'clone', url, repo_dir]
if branch:
cmd.extend(['--branch', branch])
devnull = open(os.path.devnull, 'w')
stdout = devnull if quiet else None
subprocess.check_call(cmd, stdout=stdout)
yield repo_dir
@contextlib.contextmanager
def null():
yield
class ExceptionTrap:
"""
A context manager that will catch certain exceptions and provide an
indication they occurred.
>>> with ExceptionTrap() as trap:
... raise Exception()
>>> bool(trap)
True
>>> with ExceptionTrap() as trap:
... pass
>>> bool(trap)
False
>>> with ExceptionTrap(ValueError) as trap:
... raise ValueError("1 + 1 is not 3")
>>> bool(trap)
True
>>> with ExceptionTrap(ValueError) as trap:
... raise Exception()
Traceback (most recent call last):
...
Exception
>>> bool(trap)
False
"""
exc_info = None, None, None
def __init__(self, exceptions=(Exception,)):
self.exceptions = exceptions
def __enter__(self):
return self
@property
def type(self):
return self.exc_info[0]
@property
def value(self):
return self.exc_info[1]
@property
def tb(self):
return self.exc_info[2]
def __exit__(self, *exc_info):
type = exc_info[0]
matches = type and issubclass(type, self.exceptions)
if matches:
self.exc_info = exc_info
return matches
def __bool__(self):
return bool(self.type)
def raises(self, func, *, _test=bool):
"""
Wrap func and replace the result with the truth
value of the trap (True if an exception occurred).
First, give the decorator an alias to support Python 3.8
Syntax.
>>> raises = ExceptionTrap(ValueError).raises
Now decorate a function that always fails.
>>> @raises
... def fail():
... raise ValueError('failed')
>>> fail()
True
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
with ExceptionTrap(self.exceptions) as trap:
func(*args, **kwargs)
return _test(trap)
return wrapper
def passes(self, func):
"""
Wrap func and replace the result with the truth
value of the trap (True if no exception).
First, give the decorator an alias to support Python 3.8
Syntax.
>>> passes = ExceptionTrap(ValueError).passes
Now decorate a function that always fails.
>>> @passes
... def fail():
... raise ValueError('failed')
>>> fail()
False
"""
return self.raises(func, _test=operator.not_)
class suppress(contextlib.suppress, contextlib.ContextDecorator):
"""
A version of contextlib.suppress with decorator support.
>>> @suppress(KeyError)
... def key_error():
... {}['']
>>> key_error()
"""
| mit | d6895bb6479fea18a123146bf0fea3b9 | 24.446009 | 85 | 0.595941 | 4.084401 | false | false | false | false |
pypa/setuptools | setuptools/_distutils/command/bdist.py | 1 | 5409 | """distutils.command.bdist
Implements the Distutils 'bdist' command (create a built [binary]
distribution)."""
import os
import warnings
from ..core import Command
from ..errors import DistutilsPlatformError, DistutilsOptionError
from ..util import get_platform
def show_formats():
"""Print list of available formats (arguments to "--format" option)."""
from ..fancy_getopt import FancyGetopt
formats = []
for format in bdist.format_commands:
formats.append(("formats=" + format, None, bdist.format_commands[format][1]))
pretty_printer = FancyGetopt(formats)
pretty_printer.print_help("List of available distribution formats:")
class ListCompat(dict):
# adapter to allow for Setuptools compatibility in format_commands
def append(self, item):
warnings.warn(
"""format_commands is now a dict. append is deprecated.""",
DeprecationWarning,
stacklevel=2,
)
class bdist(Command):
description = "create a built (binary) distribution"
user_options = [
('bdist-base=', 'b', "temporary directory for creating built distributions"),
(
'plat-name=',
'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform(),
),
('formats=', None, "formats for distribution (comma-separated list)"),
(
'dist-dir=',
'd',
"directory to put final built distributions in " "[default: dist]",
),
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
(
'owner=',
'u',
"Owner name used when creating a tar file" " [default: current user]",
),
(
'group=',
'g',
"Group name used when creating a tar file" " [default: current group]",
),
]
boolean_options = ['skip-build']
help_options = [
('help-formats', None, "lists available distribution formats", show_formats),
]
# The following commands do not take a format option from bdist
no_format_option = ('bdist_rpm',)
# This won't do in reality: will need to distinguish RPM-ish Linux,
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
default_format = {'posix': 'gztar', 'nt': 'zip'}
# Define commands in preferred order for the --help-formats option
format_commands = ListCompat(
{
'rpm': ('bdist_rpm', "RPM distribution"),
'gztar': ('bdist_dumb', "gzip'ed tar file"),
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
'xztar': ('bdist_dumb', "xz'ed tar file"),
'ztar': ('bdist_dumb', "compressed tar file"),
'tar': ('bdist_dumb', "tar file"),
'zip': ('bdist_dumb', "ZIP file"),
}
)
# for compatibility until consumers only reference format_commands
format_command = format_commands
def initialize_options(self):
self.bdist_base = None
self.plat_name = None
self.formats = None
self.dist_dir = None
self.skip_build = 0
self.group = None
self.owner = None
def finalize_options(self):
# have to finalize 'plat_name' before 'bdist_base'
if self.plat_name is None:
if self.skip_build:
self.plat_name = get_platform()
else:
self.plat_name = self.get_finalized_command('build').plat_name
# 'bdist_base' -- parent of per-built-distribution-format
# temporary directories (eg. we'll probably have
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
if self.bdist_base is None:
build_base = self.get_finalized_command('build').build_base
self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name)
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise DistutilsPlatformError(
"don't know how to create built distributions "
"on platform %s" % os.name
)
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# Figure out which sub-commands we need to run.
commands = []
for format in self.formats:
try:
commands.append(self.format_commands[format][0])
except KeyError:
raise DistutilsOptionError("invalid format '%s'" % format)
# Reinitialize and run each command.
for i in range(len(self.formats)):
cmd_name = commands[i]
sub_cmd = self.reinitialize_command(cmd_name)
if cmd_name not in self.no_format_option:
sub_cmd.format = self.formats[i]
# passing the owner and group names for tar archiving
if cmd_name == 'bdist_dumb':
sub_cmd.owner = self.owner
sub_cmd.group = self.group
# If we're going to need to run this command again, tell it to
# keep its temporary files around so subsequent runs go faster.
if cmd_name in commands[i + 1 :]:
sub_cmd.keep_temp = 1
self.run_command(cmd_name)
| mit | 59db9a68b3a11e2cf005b188a05b5199 | 33.452229 | 85 | 0.57164 | 4.265773 | false | false | false | false |
pypa/setuptools | setuptools/command/install_egg_info.py | 2 | 2226 | from distutils import log, dir_util
import os
from setuptools import Command
from setuptools import namespaces
from setuptools.archive_util import unpack_archive
from .._path import ensure_directory
import pkg_resources
class install_egg_info(namespaces.Installer, Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',
('install_dir', 'install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name() + '.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = []
def run(self):
self.run_command('egg_info')
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
ensure_directory(self.target)
self.execute(
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src, dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/', 'CVS/':
if src.startswith(skip) or '/' + skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
| mit | e56af98371c35052bd5103e993ee226f | 34.333333 | 78 | 0.601078 | 3.912127 | false | false | false | false |
pypa/setuptools | setuptools/_distutils/tests/test_archive_util.py | 1 | 12919 | """Tests for distutils.archive_util."""
import os
import sys
import tarfile
from os.path import splitdrive
import warnings
import functools
import operator
import pathlib
import pytest
import path
from distutils import archive_util
from distutils.archive_util import (
check_archive_formats,
make_tarball,
make_zipfile,
make_archive,
ARCHIVE_FORMATS,
)
from distutils.spawn import spawn
from distutils.tests import support
from test.support import patch
from .unix_compat import require_unix_id, require_uid_0, grp, pwd, UID_0_SUPPORT
from .py38compat import check_warnings
def can_fs_encode(filename):
"""
Return True if the filename can be saved in the file system.
"""
if os.path.supports_unicode_filenames:
return True
try:
filename.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
return False
return True
def all_equal(values):
return functools.reduce(operator.eq, values)
def same_drive(*paths):
return all_equal(pathlib.Path(path).drive for path in paths)
class ArchiveUtilTestCase(support.TempdirManager):
@pytest.mark.usefixtures('needs_zlib')
def test_make_tarball(self, name='archive'):
# creating something to tar
tmpdir = self._create_files()
self._make_tarball(tmpdir, name, '.tar.gz')
# trying an uncompressed one
self._make_tarball(tmpdir, name, '.tar', compress=None)
@pytest.mark.usefixtures('needs_zlib')
def test_make_tarball_gzip(self):
tmpdir = self._create_files()
self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip')
def test_make_tarball_bzip2(self):
pytest.importorskip('bz2')
tmpdir = self._create_files()
self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2')
def test_make_tarball_xz(self):
pytest.importorskip('lzma')
tmpdir = self._create_files()
self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz')
@pytest.mark.skipif("not can_fs_encode('årchiv')")
def test_make_tarball_latin1(self):
"""
Mirror test_make_tarball, except filename contains latin characters.
"""
self.test_make_tarball('årchiv') # note this isn't a real word
@pytest.mark.skipif("not can_fs_encode('のアーカイブ')")
def test_make_tarball_extended(self):
"""
Mirror test_make_tarball, except filename contains extended
characters outside the latin charset.
"""
self.test_make_tarball('のアーカイブ') # japanese for archive
def _make_tarball(self, tmpdir, target_name, suffix, **kwargs):
tmpdir2 = self.mkdtemp()
if same_drive(tmpdir, tmpdir2):
pytest.skip("source and target should be on same drive")
base_name = os.path.join(tmpdir2, target_name)
# working with relative paths to avoid tar warnings
with path.Path(tmpdir):
make_tarball(splitdrive(base_name)[1], 'dist', **kwargs)
# check if the compressed tarball was created
tarball = base_name + suffix
assert os.path.exists(tarball)
assert self._tarinfo(tarball) == self._created_files
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return names
finally:
tar.close()
_zip_created_files = [
'dist/',
'dist/file1',
'dist/file2',
'dist/sub/',
'dist/sub/file3',
'dist/sub2/',
]
_created_files = [p.rstrip('/') for p in _zip_created_files]
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
self.write_file([dist, 'file1'], 'xxx')
self.write_file([dist, 'file2'], 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
self.write_file([dist, 'sub', 'file3'], 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
return tmpdir
@pytest.mark.usefixtures('needs_zlib')
@pytest.mark.skipif("not (find_executable('tar') and find_executable('gzip'))")
def test_tarfile_vs_tar(self):
tmpdir = self._create_files()
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
assert os.path.exists(tarball)
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f', '-9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
assert os.path.exists(tarball2)
# let's compare both tarballs
assert self._tarinfo(tarball) == self._created_files
assert self._tarinfo(tarball2) == self._created_files
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
assert os.path.exists(tarball)
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
assert os.path.exists(tarball)
@pytest.mark.skipif("not find_executable('compress')")
def test_compress_deprecated(self):
tmpdir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
# using compress and testing the DeprecationWarning
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress')
finally:
os.chdir(old_dir)
tarball = base_name + '.tar.Z'
assert os.path.exists(tarball)
assert len(w.warnings) == 1
# same test with dry_run
os.remove(tarball)
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with check_warnings() as w:
warnings.simplefilter("always")
make_tarball(base_name, 'dist', compress='compress', dry_run=True)
finally:
os.chdir(old_dir)
assert not os.path.exists(tarball)
assert len(w.warnings) == 1
@pytest.mark.usefixtures('needs_zlib')
def test_make_zipfile(self):
zipfile = pytest.importorskip('zipfile')
# creating something to tar
tmpdir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
with path.Path(tmpdir):
make_zipfile(base_name, 'dist')
# check if the compressed tarball was created
tarball = base_name + '.zip'
assert os.path.exists(tarball)
with zipfile.ZipFile(tarball) as zf:
assert sorted(zf.namelist()) == self._zip_created_files
def test_make_zipfile_no_zlib(self):
zipfile = pytest.importorskip('zipfile')
patch(self, archive_util.zipfile, 'zlib', None) # force zlib ImportError
called = []
zipfile_class = zipfile.ZipFile
def fake_zipfile(*a, **kw):
if kw.get('compression', None) == zipfile.ZIP_STORED:
called.append((a, kw))
return zipfile_class(*a, **kw)
patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile)
# create something to tar and compress
tmpdir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
with path.Path(tmpdir):
make_zipfile(base_name, 'dist')
tarball = base_name + '.zip'
assert called == [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]
assert os.path.exists(tarball)
with zipfile.ZipFile(tarball) as zf:
assert sorted(zf.namelist()) == self._zip_created_files
def test_check_archive_formats(self):
assert check_archive_formats(['gztar', 'xxx', 'zip']) == 'xxx'
assert (
check_archive_formats(['gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip'])
is None
)
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
with pytest.raises(ValueError):
make_archive(base_name, 'xxx')
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except Exception:
pass
assert os.getcwd() == current_dir
finally:
del ARCHIVE_FORMATS['xxx']
def test_make_archive_tar(self):
base_dir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
res = make_archive(base_name, 'tar', base_dir, 'dist')
assert os.path.exists(res)
assert os.path.basename(res) == 'archive.tar'
assert self._tarinfo(res) == self._created_files
@pytest.mark.usefixtures('needs_zlib')
def test_make_archive_gztar(self):
base_dir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
res = make_archive(base_name, 'gztar', base_dir, 'dist')
assert os.path.exists(res)
assert os.path.basename(res) == 'archive.tar.gz'
assert self._tarinfo(res) == self._created_files
def test_make_archive_bztar(self):
pytest.importorskip('bz2')
base_dir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
res = make_archive(base_name, 'bztar', base_dir, 'dist')
assert os.path.exists(res)
assert os.path.basename(res) == 'archive.tar.bz2'
assert self._tarinfo(res) == self._created_files
def test_make_archive_xztar(self):
pytest.importorskip('lzma')
base_dir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
res = make_archive(base_name, 'xztar', base_dir, 'dist')
assert os.path.exists(res)
assert os.path.basename(res) == 'archive.tar.xz'
assert self._tarinfo(res) == self._created_files
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_0_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir = self._create_files()
root_dir = self.mkdtemp()
base_name = os.path.join(self.mkdtemp(), 'archive')
res = make_archive(
base_name, 'zip', root_dir, base_dir, owner=owner, group=group
)
assert os.path.exists(res)
res = make_archive(base_name, 'zip', root_dir, base_dir)
assert os.path.exists(res)
res = make_archive(
base_name, 'tar', root_dir, base_dir, owner=owner, group=group
)
assert os.path.exists(res)
res = make_archive(
base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh'
)
assert os.path.exists(res)
@pytest.mark.usefixtures('needs_zlib')
@require_unix_id
@require_uid_0
def test_tarfile_root_owner(self):
tmpdir = self._create_files()
base_name = os.path.join(self.mkdtemp(), 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
try:
archive_name = make_tarball(
base_name, 'dist', compress=None, owner=owner, group=group
)
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
assert os.path.exists(archive_name)
# now checks the rights
archive = tarfile.open(archive_name)
try:
for member in archive.getmembers():
assert member.uid == 0
assert member.gid == 0
finally:
archive.close()
| mit | 71c7dd75d6751de56df83020f45fd016 | 32.401554 | 84 | 0.585046 | 3.744699 | false | true | false | false |
pypa/setuptools | pkg_resources/_vendor/pyparsing/actions.py | 12 | 6426 | # actions.py
from .exceptions import ParseException
from .util import col
class OnlyOnce:
"""
Wrapper for parse actions, to ensure they are only called once.
"""
def __init__(self, method_call):
from .core import _trim_arity
self.callable = _trim_arity(method_call)
self.called = False
def __call__(self, s, l, t):
if not self.called:
results = self.callable(s, l, t)
self.called = True
return results
raise ParseException(s, l, "OnlyOnce obj called multiple times w/out reset")
def reset(self):
"""
Allow the associated parse action to be called once more.
"""
self.called = False
def match_only_at_col(n):
"""
Helper method for defining parse actions that require matching at
a specific column in the input text.
"""
def verify_col(strg, locn, toks):
if col(locn, strg) != n:
raise ParseException(strg, locn, "matched token not at column {}".format(n))
return verify_col
def replace_with(repl_str):
"""
Helper method for common parse actions that simply return
a literal value. Especially useful when used with
:class:`transform_string<ParserElement.transform_string>` ().
Example::
num = Word(nums).set_parse_action(lambda toks: int(toks[0]))
na = one_of("N/A NA").set_parse_action(replace_with(math.nan))
term = na | num
term[1, ...].parse_string("324 234 N/A 234") # -> [324, 234, nan, 234]
"""
return lambda s, l, t: [repl_str]
def remove_quotes(s, l, t):
"""
Helper parse action for removing quotation marks from parsed
quoted strings.
Example::
# by default, quotation marks are included in parsed results
quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
# use remove_quotes to strip quotation marks from parsed results
quoted_string.set_parse_action(remove_quotes)
quoted_string.parse_string("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
"""
return t[0][1:-1]
def with_attribute(*args, **attr_dict):
"""
Helper to create a validating parse action to be used with start
tags created with :class:`make_xml_tags` or
:class:`make_html_tags`. Use ``with_attribute`` to qualify
a starting tag with a required attribute value, to avoid false
matches on common tags such as ``<TD>`` or ``<DIV>``.
Call ``with_attribute`` with a series of attribute names and
values. Specify the list of filter attributes names and values as:
- keyword arguments, as in ``(align="right")``, or
- as an explicit dict with ``**`` operator, when an attribute
name is also a Python reserved word, as in ``**{"class":"Customer", "align":"right"}``
- a list of name-value tuples, as in ``(("ns1:class", "Customer"), ("ns2:align", "right"))``
For attribute names with a namespace prefix, you must use the second
form. Attribute names are matched insensitive to upper/lower case.
If just testing for ``class`` (with or without a namespace), use
:class:`with_class`.
To verify that the attribute exists, but without specifying a value,
pass ``with_attribute.ANY_VALUE`` as the value.
Example::
html = '''
<div>
Some text
<div type="grid">1 4 0 1 0</div>
<div type="graph">1,3 2,3 1,1</div>
<div>this has no type</div>
</div>
'''
div,div_end = make_html_tags("div")
# only match div tag having a type attribute with value "grid"
div_grid = div().set_parse_action(with_attribute(type="grid"))
grid_expr = div_grid + SkipTo(div | div_end)("body")
for grid_header in grid_expr.search_string(html):
print(grid_header.body)
# construct a match with any div tag having a type attribute, regardless of the value
div_any_type = div().set_parse_action(with_attribute(type=with_attribute.ANY_VALUE))
div_expr = div_any_type + SkipTo(div | div_end)("body")
for div_header in div_expr.search_string(html):
print(div_header.body)
prints::
1 4 0 1 0
1 4 0 1 0
1,3 2,3 1,1
"""
if args:
attrs = args[:]
else:
attrs = attr_dict.items()
attrs = [(k, v) for k, v in attrs]
def pa(s, l, tokens):
for attrName, attrValue in attrs:
if attrName not in tokens:
raise ParseException(s, l, "no matching attribute " + attrName)
if attrValue != with_attribute.ANY_VALUE and tokens[attrName] != attrValue:
raise ParseException(
s,
l,
"attribute {!r} has value {!r}, must be {!r}".format(
attrName, tokens[attrName], attrValue
),
)
return pa
with_attribute.ANY_VALUE = object()
def with_class(classname, namespace=""):
"""
Simplified version of :class:`with_attribute` when
matching on a div class - made difficult because ``class`` is
a reserved word in Python.
Example::
html = '''
<div>
Some text
<div class="grid">1 4 0 1 0</div>
<div class="graph">1,3 2,3 1,1</div>
<div>this <div> has no class</div>
</div>
'''
div,div_end = make_html_tags("div")
div_grid = div().set_parse_action(with_class("grid"))
grid_expr = div_grid + SkipTo(div | div_end)("body")
for grid_header in grid_expr.search_string(html):
print(grid_header.body)
div_any_type = div().set_parse_action(with_class(withAttribute.ANY_VALUE))
div_expr = div_any_type + SkipTo(div | div_end)("body")
for div_header in div_expr.search_string(html):
print(div_header.body)
prints::
1 4 0 1 0
1 4 0 1 0
1,3 2,3 1,1
"""
classattr = "{}:class".format(namespace) if namespace else "class"
return with_attribute(**{classattr: classname})
# pre-PEP8 compatibility symbols
replaceWith = replace_with
removeQuotes = remove_quotes
withAttribute = with_attribute
withClass = with_class
matchOnlyAtCol = match_only_at_col
| mit | 10a5e3093fe01fffa4a8eb32a685506c | 30.043478 | 122 | 0.594304 | 3.831843 | false | false | false | false |
pypa/setuptools | setuptools/_distutils/archive_util.py | 1 | 8572 | """distutils.archive_util
Utility functions for creating archive files (tarballs, zip files,
that sort of thing)."""
import os
from warnings import warn
import sys
try:
import zipfile
except ImportError:
zipfile = None
from .errors import DistutilsExecError
from .spawn import spawn
from .dir_util import mkpath
from ._log import log
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def make_tarball(
base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or
None. ("compress" will be deprecated in Python 3.2)
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_dir' + ".tar", possibly plus
the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z").
Returns the output filename.
"""
tar_compression = {
'gzip': 'gz',
'bzip2': 'bz2',
'xz': 'xz',
None: '',
'compress': '',
}
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'}
# flags for compression program, each element of list will be an argument
if compress is not None and compress not in compress_ext.keys():
raise ValueError(
"bad value for 'compress': must be None, 'gzip', 'bzip2', "
"'xz' or 'compress'"
)
archive_name = base_name + '.tar'
if compress != 'compress':
archive_name += compress_ext.get(compress, '')
mkpath(os.path.dirname(archive_name), dry_run=dry_run)
# creating the tarball
import tarfile # late import so Python build itself doesn't break
log.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
# compression using `compress`
if compress == 'compress':
warn("'compress' is deprecated.", DeprecationWarning)
# the option varies depending on the platform
compressed_name = archive_name + compress_ext[compress]
if sys.platform == 'win32':
cmd = [compress, archive_name, compressed_name]
else:
cmd = [compress, '-f', archive_name]
spawn(cmd, dry_run=dry_run)
return compressed_name
return archive_name
def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
"zipfile" Python module (if available) or the InfoZIP "zip" utility
(if installed and found on the default search path). If neither tool is
available, raises DistutilsExecError. Returns the name of the output zip
file.
"""
zip_filename = base_name + ".zip"
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
# If zipfile module is not available, try spawning an external
# 'zip' command.
if zipfile is None:
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
try:
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise DistutilsExecError(
(
"unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility"
)
% zip_filename
)
else:
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
if not dry_run:
try:
zip = zipfile.ZipFile(
zip_filename, "w", compression=zipfile.ZIP_DEFLATED
)
except RuntimeError:
zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED)
with zip:
if base_dir != os.curdir:
path = os.path.normpath(os.path.join(base_dir, ''))
zip.write(path, path)
log.info("adding '%s'", path)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in dirnames:
path = os.path.normpath(os.path.join(dirpath, name, ''))
zip.write(path, path)
log.info("adding '%s'", path)
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zip.write(path, path)
log.info("adding '%s'", path)
return zip_filename
ARCHIVE_FORMATS = {
'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"),
'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"),
'tar': (make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (make_zipfile, [], "ZIP file"),
}
def check_archive_formats(formats):
"""Returns the first format from the 'format' list that is unknown.
If all formats are known, returns None
"""
for format in formats:
if format not in ARCHIVE_FORMATS:
return format
return None
def make_archive(
base_name,
format,
root_dir=None,
base_dir=None,
verbose=0,
dry_run=0,
owner=None,
group=None,
):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
"bztar", "xztar", or "ztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
log.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run}
try:
format_info = ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError("unknown archive format '%s'" % format)
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
log.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
| mit | 115873bd3bdb1a00b5c779370f786269 | 29.614286 | 88 | 0.581545 | 3.903461 | false | false | false | false |
pypa/setuptools | setuptools/_distutils/tests/test_upload.py | 1 | 6634 | """Tests for distutils.command.upload."""
import os
import unittest.mock as mock
from urllib.request import HTTPError
from distutils.command import upload as upload_mod
from distutils.command.upload import upload
from distutils.core import Distribution
from distutils.errors import DistutilsError
from distutils.tests.test_config import PYPIRC, BasePyPIRCCommandTestCase
import pytest
PYPIRC_LONG_PASSWORD = """\
[distutils]
index-servers =
server1
server2
[server1]
username:me
password:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
[server2]
username:meagain
password: secret
realm:acme
repository:http://another.pypi/
"""
PYPIRC_NOPASSWORD = """\
[distutils]
index-servers =
server1
[server1]
username:me
"""
class FakeOpen:
def __init__(self, url, msg=None, code=None):
self.url = url
if not isinstance(url, str):
self.req = url
else:
self.req = None
self.msg = msg or 'OK'
self.code = code or 200
def getheader(self, name, default=None):
return {
'content-type': 'text/plain; charset=utf-8',
}.get(name.lower(), default)
def read(self):
return b'xyzzy'
def getcode(self):
return self.code
@pytest.fixture(autouse=True)
def urlopen(request, monkeypatch):
self = request.instance
monkeypatch.setattr(upload_mod, 'urlopen', self._urlopen)
self.next_msg = self.next_code = None
class TestUpload(BasePyPIRCCommandTestCase):
def _urlopen(self, url):
self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code)
return self.last_open
def test_finalize_options(self):
# new format
self.write_file(self.rc, PYPIRC)
dist = Distribution()
cmd = upload(dist)
cmd.finalize_options()
for attr, waited in (
('username', 'me'),
('password', 'secret'),
('realm', 'pypi'),
('repository', 'https://upload.pypi.org/legacy/'),
):
assert getattr(cmd, attr) == waited
def test_saved_password(self):
# file with no password
self.write_file(self.rc, PYPIRC_NOPASSWORD)
# make sure it passes
dist = Distribution()
cmd = upload(dist)
cmd.finalize_options()
assert cmd.password is None
# make sure we get it as well, if another command
# initialized it at the dist level
dist.password = 'xxx'
cmd = upload(dist)
cmd.finalize_options()
assert cmd.password == 'xxx'
def test_upload(self, caplog):
tmp = self.mkdtemp()
path = os.path.join(tmp, 'xxx')
self.write_file(path)
command, pyversion, filename = 'xxx', '2.6', path
dist_files = [(command, pyversion, filename)]
self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
# lets run it
pkg_dir, dist = self.create_dist(dist_files=dist_files)
cmd = upload(dist)
cmd.show_response = 1
cmd.ensure_finalized()
cmd.run()
# what did we send ?
headers = dict(self.last_open.req.headers)
assert int(headers['Content-length']) >= 2162
content_type = headers['Content-type']
assert content_type.startswith('multipart/form-data')
assert self.last_open.req.get_method() == 'POST'
expected_url = 'https://upload.pypi.org/legacy/'
assert self.last_open.req.get_full_url() == expected_url
data = self.last_open.req.data
assert b'xxx' in data
assert b'protocol_version' in data
assert b'sha256_digest' in data
assert (
b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf'
b'6860' in data
)
if b'md5_digest' in data:
assert b'f561aaf6ef0bf14d4208bb46a4ccb3ad' in data
if b'blake2_256_digest' in data:
assert (
b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be'
b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc'
b'ce443f1534330a' in data
)
# The PyPI response body was echoed
results = caplog.messages
assert results[-1] == 75 * '-' + '\nxyzzy\n' + 75 * '-'
# bpo-32304: archives whose last byte was b'\r' were corrupted due to
# normalization intended for Mac OS 9.
def test_upload_correct_cr(self):
# content that ends with \r should not be modified.
tmp = self.mkdtemp()
path = os.path.join(tmp, 'xxx')
self.write_file(path, content='yy\r')
command, pyversion, filename = 'xxx', '2.6', path
dist_files = [(command, pyversion, filename)]
self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
# other fields that ended with \r used to be modified, now are
# preserved.
pkg_dir, dist = self.create_dist(
dist_files=dist_files, description='long description\r'
)
cmd = upload(dist)
cmd.show_response = 1
cmd.ensure_finalized()
cmd.run()
headers = dict(self.last_open.req.headers)
assert int(headers['Content-length']) >= 2172
assert b'long description\r' in self.last_open.req.data
def test_upload_fails(self, caplog):
self.next_msg = "Not Found"
self.next_code = 404
with pytest.raises(DistutilsError):
self.test_upload(caplog)
@pytest.mark.parametrize(
'exception,expected,raised_exception',
[
(OSError('oserror'), 'oserror', OSError),
pytest.param(
HTTPError('url', 400, 'httperror', {}, None),
'Upload failed (400): httperror',
DistutilsError,
id="HTTP 400",
),
],
)
def test_wrong_exception_order(self, exception, expected, raised_exception, caplog):
tmp = self.mkdtemp()
path = os.path.join(tmp, 'xxx')
self.write_file(path)
dist_files = [('xxx', '2.6', path)] # command, pyversion, filename
self.write_file(self.rc, PYPIRC_LONG_PASSWORD)
pkg_dir, dist = self.create_dist(dist_files=dist_files)
with mock.patch(
'distutils.command.upload.urlopen',
new=mock.Mock(side_effect=exception),
):
with pytest.raises(raised_exception):
cmd = upload(dist)
cmd.ensure_finalized()
cmd.run()
results = caplog.messages
assert expected in results[-1]
caplog.clear()
| mit | 5a4c42418325cf39476522e14d805f91 | 29.571429 | 88 | 0.59994 | 3.691708 | false | true | false | false |
pypa/setuptools | setuptools/_distutils/command/bdist_rpm.py | 1 | 22016 | """distutils.command.bdist_rpm
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
distributions)."""
import subprocess
import sys
import os
from ..core import Command
from ..debug import DEBUG
from ..file_util import write_file
from ..errors import (
DistutilsOptionError,
DistutilsPlatformError,
DistutilsFileError,
DistutilsExecError,
)
from ..sysconfig import get_python_version
from distutils._log import log
class bdist_rpm(Command):
description = "create an RPM distribution"
user_options = [
('bdist-base=', None, "base directory for creating built distributions"),
(
'rpm-base=',
None,
"base directory for creating RPMs (defaults to \"rpm\" under "
"--bdist-base; must be specified for RPM 2)",
),
(
'dist-dir=',
'd',
"directory to put final RPM files in " "(and .spec files if --spec-only)",
),
(
'python=',
None,
"path to Python interpreter to hard-code in the .spec file "
"(default: \"python\")",
),
(
'fix-python',
None,
"hard-code the exact path to the current Python interpreter in "
"the .spec file",
),
('spec-only', None, "only regenerate spec file"),
('source-only', None, "only generate source RPM"),
('binary-only', None, "only generate binary RPM"),
('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"),
# More meta-data: too RPM-specific to put in the setup script,
# but needs to go in the .spec file -- so we make these options
# to "bdist_rpm". The idea is that packagers would put this
# info in setup.cfg, although they are of course free to
# supply it on the command line.
(
'distribution-name=',
None,
"name of the (Linux) distribution to which this "
"RPM applies (*not* the name of the module distribution!)",
),
('group=', None, "package classification [default: \"Development/Libraries\"]"),
('release=', None, "RPM release number"),
('serial=', None, "RPM serial number"),
(
'vendor=',
None,
"RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
"[default: maintainer or author from setup script]",
),
(
'packager=',
None,
"RPM packager (eg. \"Jane Doe <jane@example.net>\") " "[default: vendor]",
),
('doc-files=', None, "list of documentation files (space or comma-separated)"),
('changelog=', None, "RPM changelog"),
('icon=', None, "name of icon file"),
('provides=', None, "capabilities provided by this package"),
('requires=', None, "capabilities required by this package"),
('conflicts=', None, "capabilities which conflict with this package"),
('build-requires=', None, "capabilities required to build this package"),
('obsoletes=', None, "capabilities made obsolete by this package"),
('no-autoreq', None, "do not automatically calculate dependencies"),
# Actions to take when building RPM
('keep-temp', 'k', "don't clean up RPM build directory"),
('no-keep-temp', None, "clean up RPM build directory [default]"),
(
'use-rpm-opt-flags',
None,
"compile with RPM_OPT_FLAGS when building from source RPM",
),
('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"),
('rpm3-mode', None, "RPM 3 compatibility mode (default)"),
('rpm2-mode', None, "RPM 2 compatibility mode"),
# Add the hooks necessary for specifying custom scripts
('prep-script=', None, "Specify a script for the PREP phase of RPM building"),
('build-script=', None, "Specify a script for the BUILD phase of RPM building"),
(
'pre-install=',
None,
"Specify a script for the pre-INSTALL phase of RPM building",
),
(
'install-script=',
None,
"Specify a script for the INSTALL phase of RPM building",
),
(
'post-install=',
None,
"Specify a script for the post-INSTALL phase of RPM building",
),
(
'pre-uninstall=',
None,
"Specify a script for the pre-UNINSTALL phase of RPM building",
),
(
'post-uninstall=',
None,
"Specify a script for the post-UNINSTALL phase of RPM building",
),
('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"),
(
'verify-script=',
None,
"Specify a script for the VERIFY phase of the RPM build",
),
# Allow a packager to explicitly force an architecture
('force-arch=', None, "Force an architecture onto the RPM build process"),
('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"),
]
boolean_options = [
'keep-temp',
'use-rpm-opt-flags',
'rpm3-mode',
'no-autoreq',
'quiet',
]
negative_opt = {
'no-keep-temp': 'keep-temp',
'no-rpm-opt-flags': 'use-rpm-opt-flags',
'rpm2-mode': 'rpm3-mode',
}
def initialize_options(self):
self.bdist_base = None
self.rpm_base = None
self.dist_dir = None
self.python = None
self.fix_python = None
self.spec_only = None
self.binary_only = None
self.source_only = None
self.use_bzip2 = None
self.distribution_name = None
self.group = None
self.release = None
self.serial = None
self.vendor = None
self.packager = None
self.doc_files = None
self.changelog = None
self.icon = None
self.prep_script = None
self.build_script = None
self.install_script = None
self.clean_script = None
self.verify_script = None
self.pre_install = None
self.post_install = None
self.pre_uninstall = None
self.post_uninstall = None
self.prep = None
self.provides = None
self.requires = None
self.conflicts = None
self.build_requires = None
self.obsoletes = None
self.keep_temp = 0
self.use_rpm_opt_flags = 1
self.rpm3_mode = 1
self.no_autoreq = 0
self.force_arch = None
self.quiet = 0
def finalize_options(self):
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
if self.rpm_base is None:
if not self.rpm3_mode:
raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode")
self.rpm_base = os.path.join(self.bdist_base, "rpm")
if self.python is None:
if self.fix_python:
self.python = sys.executable
else:
self.python = "python3"
elif self.fix_python:
raise DistutilsOptionError(
"--python and --fix-python are mutually exclusive options"
)
if os.name != 'posix':
raise DistutilsPlatformError(
"don't know how to create RPM " "distributions on platform %s" % os.name
)
if self.binary_only and self.source_only:
raise DistutilsOptionError(
"cannot supply both '--source-only' and '--binary-only'"
)
# don't pass CFLAGS to pure python distributions
if not self.distribution.has_ext_modules():
self.use_rpm_opt_flags = 0
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
self.finalize_package_data()
def finalize_package_data(self):
self.ensure_string('group', "Development/Libraries")
self.ensure_string(
'vendor',
"%s <%s>"
% (self.distribution.get_contact(), self.distribution.get_contact_email()),
)
self.ensure_string('packager')
self.ensure_string_list('doc_files')
if isinstance(self.doc_files, list):
for readme in ('README', 'README.txt'):
if os.path.exists(readme) and readme not in self.doc_files:
self.doc_files.append(readme)
self.ensure_string('release', "1")
self.ensure_string('serial') # should it be an int?
self.ensure_string('distribution_name')
self.ensure_string('changelog')
# Format changelog correctly
self.changelog = self._format_changelog(self.changelog)
self.ensure_filename('icon')
self.ensure_filename('prep_script')
self.ensure_filename('build_script')
self.ensure_filename('install_script')
self.ensure_filename('clean_script')
self.ensure_filename('verify_script')
self.ensure_filename('pre_install')
self.ensure_filename('post_install')
self.ensure_filename('pre_uninstall')
self.ensure_filename('post_uninstall')
# XXX don't forget we punted on summaries and descriptions -- they
# should be handled here eventually!
# Now *this* is some meta-data that belongs in the setup script...
self.ensure_string_list('provides')
self.ensure_string_list('requires')
self.ensure_string_list('conflicts')
self.ensure_string_list('build_requires')
self.ensure_string_list('obsoletes')
self.ensure_string('force_arch')
def run(self): # noqa: C901
if DEBUG:
print("before _get_package_data():")
print("vendor =", self.vendor)
print("packager =", self.packager)
print("doc_files =", self.doc_files)
print("changelog =", self.changelog)
# make directories
if self.spec_only:
spec_dir = self.dist_dir
self.mkpath(spec_dir)
else:
rpm_dir = {}
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
rpm_dir[d] = os.path.join(self.rpm_base, d)
self.mkpath(rpm_dir[d])
spec_dir = rpm_dir['SPECS']
# Spec file goes into 'dist_dir' if '--spec-only specified',
# build/rpm.<plat> otherwise.
spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
self.execute(
write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
)
if self.spec_only: # stop if requested
return
# Make a source distribution and copy to SOURCES directory with
# optional icon.
saved_dist_files = self.distribution.dist_files[:]
sdist = self.reinitialize_command('sdist')
if self.use_bzip2:
sdist.formats = ['bztar']
else:
sdist.formats = ['gztar']
self.run_command('sdist')
self.distribution.dist_files = saved_dist_files
source = sdist.get_archive_files()[0]
source_dir = rpm_dir['SOURCES']
self.copy_file(source, source_dir)
if self.icon:
if os.path.exists(self.icon):
self.copy_file(self.icon, source_dir)
else:
raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
# build package
log.info("building RPMs")
rpm_cmd = ['rpmbuild']
if self.source_only: # what kind of RPMs?
rpm_cmd.append('-bs')
elif self.binary_only:
rpm_cmd.append('-bb')
else:
rpm_cmd.append('-ba')
rpm_cmd.extend(['--define', '__python %s' % self.python])
if self.rpm3_mode:
rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
if not self.keep_temp:
rpm_cmd.append('--clean')
if self.quiet:
rpm_cmd.append('--quiet')
rpm_cmd.append(spec_path)
# Determine the binary rpm names that should be built out of this spec
# file
# Note that some of these may not be really built (if the file
# list is empty)
nvr_string = "%{name}-%{version}-%{release}"
src_rpm = nvr_string + ".src.rpm"
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format(
src_rpm,
non_src_rpm,
spec_path,
)
out = os.popen(q_cmd)
try:
binary_rpms = []
source_rpm = None
while True:
line = out.readline()
if not line:
break
ell = line.strip().split()
assert len(ell) == 2
binary_rpms.append(ell[1])
# The source rpm is named after the first entry in the spec file
if source_rpm is None:
source_rpm = ell[0]
status = out.close()
if status:
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
finally:
out.close()
self.spawn(rpm_cmd)
if not self.dry_run:
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
if not self.binary_only:
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
assert os.path.exists(srpm)
self.move_file(srpm, self.dist_dir)
filename = os.path.join(self.dist_dir, source_rpm)
self.distribution.dist_files.append(('bdist_rpm', pyversion, filename))
if not self.source_only:
for rpm in binary_rpms:
rpm = os.path.join(rpm_dir['RPMS'], rpm)
if os.path.exists(rpm):
self.move_file(rpm, self.dist_dir)
filename = os.path.join(self.dist_dir, os.path.basename(rpm))
self.distribution.dist_files.append(
('bdist_rpm', pyversion, filename)
)
def _dist_path(self, path):
return os.path.join(self.dist_dir, os.path.basename(path))
def _make_spec_file(self): # noqa: C901
"""Generate the text of an RPM spec file and return it as a
list of strings (one per line).
"""
# definitions and headers
spec_file = [
'%define name ' + self.distribution.get_name(),
'%define version ' + self.distribution.get_version().replace('-', '_'),
'%define unmangled_version ' + self.distribution.get_version(),
'%define release ' + self.release.replace('-', '_'),
'',
'Summary: ' + (self.distribution.get_description() or "UNKNOWN"),
]
# Workaround for #14443 which affects some RPM based systems such as
# RHEL6 (and probably derivatives)
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
# Generate a potential replacement value for __os_install_post (whilst
# normalizing the whitespace to simplify the test for whether the
# invocation of brp-python-bytecompile passes in __python):
vendor_hook = '\n'.join(
[' %s \\' % line.strip() for line in vendor_hook.splitlines()]
)
problem = "brp-python-bytecompile \\\n"
fixed = "brp-python-bytecompile %{__python} \\\n"
fixed_hook = vendor_hook.replace(problem, fixed)
if fixed_hook != vendor_hook:
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
# put locale summaries into spec file
# XXX not supported for now (hard to put a dictionary
# in a config file -- arg!)
# for locale in self.summaries.keys():
# spec_file.append('Summary(%s): %s' % (locale,
# self.summaries[locale]))
spec_file.extend(
[
'Name: %{name}',
'Version: %{version}',
'Release: %{release}',
]
)
# XXX yuck! this filename is available from the "sdist" command,
# but only after it has run: and we create the spec file before
# running "sdist", in case of --spec-only.
if self.use_bzip2:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
else:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
spec_file.extend(
[
'License: ' + (self.distribution.get_license() or "UNKNOWN"),
'Group: ' + self.group,
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
'Prefix: %{_prefix}',
]
)
if not self.force_arch:
# noarch if no extension modules
if not self.distribution.has_ext_modules():
spec_file.append('BuildArch: noarch')
else:
spec_file.append('BuildArch: %s' % self.force_arch)
for field in (
'Vendor',
'Packager',
'Provides',
'Requires',
'Conflicts',
'Obsoletes',
):
val = getattr(self, field.lower())
if isinstance(val, list):
spec_file.append('{}: {}'.format(field, ' '.join(val)))
elif val is not None:
spec_file.append('{}: {}'.format(field, val))
if self.distribution.get_url():
spec_file.append('Url: ' + self.distribution.get_url())
if self.distribution_name:
spec_file.append('Distribution: ' + self.distribution_name)
if self.build_requires:
spec_file.append('BuildRequires: ' + ' '.join(self.build_requires))
if self.icon:
spec_file.append('Icon: ' + os.path.basename(self.icon))
if self.no_autoreq:
spec_file.append('AutoReq: 0')
spec_file.extend(
[
'',
'%description',
self.distribution.get_long_description() or "",
]
)
# put locale descriptions into spec file
# XXX again, suppressed because config file syntax doesn't
# easily support this ;-(
# for locale in self.descriptions.keys():
# spec_file.extend([
# '',
# '%description -l ' + locale,
# self.descriptions[locale],
# ])
# rpm scripts
# figure out default build script
def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0]))
def_build = "%s build" % def_setup_call
if self.use_rpm_opt_flags:
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
# insert contents of files
# XXX this is kind of misleading: user-supplied options are files
# that we open and interpolate into the spec file, but the defaults
# are just text that we drop in as-is. Hmmm.
install_cmd = (
'%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
) % def_setup_call
script_options = [
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
('build', 'build_script', def_build),
('install', 'install_script', install_cmd),
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
('verifyscript', 'verify_script', None),
('pre', 'pre_install', None),
('post', 'post_install', None),
('preun', 'pre_uninstall', None),
('postun', 'post_uninstall', None),
]
for (rpm_opt, attr, default) in script_options:
# Insert contents of file referred to, if no file is referred to
# use 'default' as contents of script
val = getattr(self, attr)
if val or default:
spec_file.extend(
[
'',
'%' + rpm_opt,
]
)
if val:
with open(val) as f:
spec_file.extend(f.read().split('\n'))
else:
spec_file.append(default)
# files section
spec_file.extend(
[
'',
'%files -f INSTALLED_FILES',
'%defattr(-,root,root)',
]
)
if self.doc_files:
spec_file.append('%doc ' + ' '.join(self.doc_files))
if self.changelog:
spec_file.extend(
[
'',
'%changelog',
]
)
spec_file.extend(self.changelog)
return spec_file
def _format_changelog(self, changelog):
"""Format the changelog correctly and convert it to a list of strings"""
if not changelog:
return changelog
new_changelog = []
for line in changelog.strip().split('\n'):
line = line.strip()
if line[0] == '*':
new_changelog.extend(['', line])
elif line[0] == '-':
new_changelog.append(line)
else:
new_changelog.append(' ' + line)
# strip trailing newline inserted by first changelog entry
if not new_changelog[0]:
del new_changelog[0]
return new_changelog
| mit | f9c0e6dc360063e264ea0b53dd662936 | 34.798374 | 88 | 0.529342 | 4.164964 | false | false | false | false |
pypa/setuptools | setuptools/config/_validate_pyproject/formats.py | 1 | 8736 | import logging
import os
import re
import string
import typing
from itertools import chain as _chain
_logger = logging.getLogger(__name__)
# -------------------------------------------------------------------------------------
# PEP 440
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
def pep440(version: str) -> bool:
return VERSION_REGEX.match(version) is not None
# -------------------------------------------------------------------------------------
# PEP 508
PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
def pep508_identifier(name: str) -> bool:
return PEP508_IDENTIFIER_REGEX.match(name) is not None
try:
try:
from packaging import requirements as _req
except ImportError: # pragma: no cover
# let's try setuptools vendored version
from setuptools._vendor.packaging import requirements as _req # type: ignore
def pep508(value: str) -> bool:
try:
_req.Requirement(value)
return True
except _req.InvalidRequirement:
return False
except ImportError: # pragma: no cover
_logger.warning(
"Could not find an installation of `packaging`. Requirements, dependencies and "
"versions might not be validated. "
"To enforce validation, please install `packaging`."
)
def pep508(value: str) -> bool:
return True
def pep508_versionspec(value: str) -> bool:
"""Expression that can be used to specify/lock versions (including ranges)"""
if any(c in value for c in (";", "]", "@")):
# In PEP 508:
# conditional markers, extras and URL specs are not included in the
# versionspec
return False
# Let's pretend we have a dependency called `requirement` with the given
# version spec, then we can re-use the pep508 function for validation:
return pep508(f"requirement{value}")
# -------------------------------------------------------------------------------------
# PEP 517
def pep517_backend_reference(value: str) -> bool:
module, _, obj = value.partition(":")
identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
return all(python_identifier(i) for i in identifiers if i)
# -------------------------------------------------------------------------------------
# Classifiers - PEP 301
def _download_classifiers() -> str:
import ssl
from email.message import Message
from urllib.request import urlopen
url = "https://pypi.org/pypi?:action=list_classifiers"
context = ssl.create_default_context()
with urlopen(url, context=context) as response:
headers = Message()
headers["content_type"] = response.getheader("content-type", "text/plain")
return response.read().decode(headers.get_param("charset", "utf-8"))
class _TroveClassifier:
"""The ``trove_classifiers`` package is the official way of validating classifiers,
however this package might not be always available.
As a workaround we can still download a list from PyPI.
We also don't want to be over strict about it, so simply skipping silently is an
option (classifiers will be validated anyway during the upload to PyPI).
"""
def __init__(self):
self.downloaded: typing.Union[None, False, typing.Set[str]] = None
self._skip_download = False
# None => not cached yet
# False => cache not available
self.__name__ = "trove_classifier" # Emulate a public function
def _disable_download(self):
# This is a private API. Only setuptools has the consent of using it.
self._skip_download = True
def __call__(self, value: str) -> bool:
if self.downloaded is False or self._skip_download is True:
return True
if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"):
self.downloaded = False
msg = (
"Install ``trove-classifiers`` to ensure proper validation. "
"Skipping download of classifiers list from PyPI (NO_NETWORK)."
)
_logger.debug(msg)
return True
if self.downloaded is None:
msg = (
"Install ``trove-classifiers`` to ensure proper validation. "
"Meanwhile a list of classifiers will be downloaded from PyPI."
)
_logger.debug(msg)
try:
self.downloaded = set(_download_classifiers().splitlines())
except Exception:
self.downloaded = False
_logger.debug("Problem with download, skipping validation")
return True
return value in self.downloaded or value.lower().startswith("private ::")
try:
from trove_classifiers import classifiers as _trove_classifiers
def trove_classifier(value: str) -> bool:
return value in _trove_classifiers or value.lower().startswith("private ::")
except ImportError: # pragma: no cover
trove_classifier = _TroveClassifier()
# -------------------------------------------------------------------------------------
# Non-PEP related
def url(value: str) -> bool:
from urllib.parse import urlparse
try:
parts = urlparse(value)
if not parts.scheme:
_logger.warning(
"For maximum compatibility please make sure to include a "
"`scheme` prefix in your URL (e.g. 'http://'). "
f"Given value: {value}"
)
if not (value.startswith("/") or value.startswith("\\") or "@" in value):
parts = urlparse(f"http://{value}")
return bool(parts.scheme and parts.netloc)
except Exception:
return False
# https://packaging.python.org/specifications/entry-points/
ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
def python_identifier(value: str) -> bool:
return value.isidentifier()
def python_qualified_identifier(value: str) -> bool:
if value.startswith(".") or value.endswith("."):
return False
return all(python_identifier(m) for m in value.split("."))
def python_module_name(value: str) -> bool:
return python_qualified_identifier(value)
def python_entrypoint_group(value: str) -> bool:
return ENTRYPOINT_GROUP_REGEX.match(value) is not None
def python_entrypoint_name(value: str) -> bool:
if not ENTRYPOINT_REGEX.match(value):
return False
if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
msg = f"Entry point `{value}` does not follow recommended pattern: "
msg += RECOMMEDED_ENTRYPOINT_PATTERN
_logger.warning(msg)
return True
def python_entrypoint_reference(value: str) -> bool:
module, _, rest = value.partition(":")
if "[" in rest:
obj, _, extras_ = rest.partition("[")
if extras_.strip()[-1] != "]":
return False
extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
if not all(pep508_identifier(e) for e in extras):
return False
_logger.warning(f"`{value}` - using extras for entry points is not recommended")
else:
obj = rest
module_parts = module.split(".")
identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
return all(python_identifier(i.strip()) for i in identifiers)
| mit | ed0fea06b8b46692c1128f3079597100 | 32.72973 | 88 | 0.555746 | 4.05007 | false | false | false | false |
pypa/setuptools | pkg_resources/_vendor/pyparsing/results.py | 12 | 25341 | # results.py
from collections.abc import MutableMapping, Mapping, MutableSequence, Iterator
import pprint
from weakref import ref as wkref
from typing import Tuple, Any
str_type: Tuple[type, ...] = (str, bytes)
_generator_type = type((_ for _ in ()))
class _ParseResultsWithOffset:
__slots__ = ["tup"]
def __init__(self, p1, p2):
self.tup = (p1, p2)
def __getitem__(self, i):
return self.tup[i]
def __getstate__(self):
return self.tup
def __setstate__(self, *args):
self.tup = args[0]
class ParseResults:
"""Structured parse results, to provide multiple means of access to
the parsed data:
- as a list (``len(results)``)
- by list index (``results[0], results[1]``, etc.)
- by attribute (``results.<results_name>`` - see :class:`ParserElement.set_results_name`)
Example::
integer = Word(nums)
date_str = (integer.set_results_name("year") + '/'
+ integer.set_results_name("month") + '/'
+ integer.set_results_name("day"))
# equivalent form:
# date_str = (integer("year") + '/'
# + integer("month") + '/'
# + integer("day"))
# parse_string returns a ParseResults object
result = date_str.parse_string("1999/12/31")
def test(s, fn=repr):
print("{} -> {}".format(s, fn(eval(s))))
test("list(result)")
test("result[0]")
test("result['month']")
test("result.day")
test("'month' in result")
test("'minutes' in result")
test("result.dump()", str)
prints::
list(result) -> ['1999', '/', '12', '/', '31']
result[0] -> '1999'
result['month'] -> '12'
result.day -> '31'
'month' in result -> True
'minutes' in result -> False
result.dump() -> ['1999', '/', '12', '/', '31']
- day: '31'
- month: '12'
- year: '1999'
"""
_null_values: Tuple[Any, ...] = (None, [], "", ())
__slots__ = [
"_name",
"_parent",
"_all_names",
"_modal",
"_toklist",
"_tokdict",
"__weakref__",
]
class List(list):
"""
Simple wrapper class to distinguish parsed list results that should be preserved
as actual Python lists, instead of being converted to :class:`ParseResults`:
LBRACK, RBRACK = map(pp.Suppress, "[]")
element = pp.Forward()
item = ppc.integer
element_list = LBRACK + pp.delimited_list(element) + RBRACK
# add parse actions to convert from ParseResults to actual Python collection types
def as_python_list(t):
return pp.ParseResults.List(t.as_list())
element_list.add_parse_action(as_python_list)
element <<= item | element_list
element.run_tests('''
100
[2,3,4]
[[2, 1],3,4]
[(2, 1),3,4]
(2,3,4)
''', post_parse=lambda s, r: (r[0], type(r[0])))
prints:
100
(100, <class 'int'>)
[2,3,4]
([2, 3, 4], <class 'list'>)
[[2, 1],3,4]
([[2, 1], 3, 4], <class 'list'>)
(Used internally by :class:`Group` when `aslist=True`.)
"""
def __new__(cls, contained=None):
if contained is None:
contained = []
if not isinstance(contained, list):
raise TypeError(
"{} may only be constructed with a list,"
" not {}".format(cls.__name__, type(contained).__name__)
)
return list.__new__(cls)
def __new__(cls, toklist=None, name=None, **kwargs):
if isinstance(toklist, ParseResults):
return toklist
self = object.__new__(cls)
self._name = None
self._parent = None
self._all_names = set()
if toklist is None:
self._toklist = []
elif isinstance(toklist, (list, _generator_type)):
self._toklist = (
[toklist[:]]
if isinstance(toklist, ParseResults.List)
else list(toklist)
)
else:
self._toklist = [toklist]
self._tokdict = dict()
return self
# Performance tuning: we construct a *lot* of these, so keep this
# constructor as small and fast as possible
def __init__(
self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance
):
self._modal = modal
if name is not None and name != "":
if isinstance(name, int):
name = str(name)
if not modal:
self._all_names = {name}
self._name = name
if toklist not in self._null_values:
if isinstance(toklist, (str_type, type)):
toklist = [toklist]
if asList:
if isinstance(toklist, ParseResults):
self[name] = _ParseResultsWithOffset(
ParseResults(toklist._toklist), 0
)
else:
self[name] = _ParseResultsWithOffset(
ParseResults(toklist[0]), 0
)
self[name]._name = name
else:
try:
self[name] = toklist[0]
except (KeyError, TypeError, IndexError):
if toklist is not self:
self[name] = toklist
else:
self._name = name
def __getitem__(self, i):
if isinstance(i, (int, slice)):
return self._toklist[i]
else:
if i not in self._all_names:
return self._tokdict[i][-1][0]
else:
return ParseResults([v[0] for v in self._tokdict[i]])
def __setitem__(self, k, v, isinstance=isinstance):
if isinstance(v, _ParseResultsWithOffset):
self._tokdict[k] = self._tokdict.get(k, list()) + [v]
sub = v[0]
elif isinstance(k, (int, slice)):
self._toklist[k] = v
sub = v
else:
self._tokdict[k] = self._tokdict.get(k, list()) + [
_ParseResultsWithOffset(v, 0)
]
sub = v
if isinstance(sub, ParseResults):
sub._parent = wkref(self)
def __delitem__(self, i):
if isinstance(i, (int, slice)):
mylen = len(self._toklist)
del self._toklist[i]
# convert int to slice
if isinstance(i, int):
if i < 0:
i += mylen
i = slice(i, i + 1)
# get removed indices
removed = list(range(*i.indices(mylen)))
removed.reverse()
# fixup indices in token dictionary
for name, occurrences in self._tokdict.items():
for j in removed:
for k, (value, position) in enumerate(occurrences):
occurrences[k] = _ParseResultsWithOffset(
value, position - (position > j)
)
else:
del self._tokdict[i]
def __contains__(self, k) -> bool:
return k in self._tokdict
def __len__(self) -> int:
return len(self._toklist)
def __bool__(self) -> bool:
return not not (self._toklist or self._tokdict)
def __iter__(self) -> Iterator:
return iter(self._toklist)
def __reversed__(self) -> Iterator:
return iter(self._toklist[::-1])
def keys(self):
return iter(self._tokdict)
def values(self):
return (self[k] for k in self.keys())
def items(self):
return ((k, self[k]) for k in self.keys())
def haskeys(self) -> bool:
"""
Since ``keys()`` returns an iterator, this method is helpful in bypassing
code that looks for the existence of any defined results names."""
return bool(self._tokdict)
def pop(self, *args, **kwargs):
"""
Removes and returns item at specified index (default= ``last``).
Supports both ``list`` and ``dict`` semantics for ``pop()``. If
passed no argument or an integer argument, it will use ``list``
semantics and pop tokens from the list of parsed tokens. If passed
a non-integer argument (most likely a string), it will use ``dict``
semantics and pop the corresponding value from any defined results
names. A second default return value argument is supported, just as in
``dict.pop()``.
Example::
numlist = Word(nums)[...]
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321']
def remove_first(tokens):
tokens.pop(0)
numlist.add_parse_action(remove_first)
print(numlist.parse_string("0 123 321")) # -> ['123', '321']
label = Word(alphas)
patt = label("LABEL") + Word(nums)[1, ...]
print(patt.parse_string("AAB 123 321").dump())
# Use pop() in a parse action to remove named result (note that corresponding value is not
# removed from list form of results)
def remove_LABEL(tokens):
tokens.pop("LABEL")
return tokens
patt.add_parse_action(remove_LABEL)
print(patt.parse_string("AAB 123 321").dump())
prints::
['AAB', '123', '321']
- LABEL: 'AAB'
['AAB', '123', '321']
"""
if not args:
args = [-1]
for k, v in kwargs.items():
if k == "default":
args = (args[0], v)
else:
raise TypeError(
"pop() got an unexpected keyword argument {!r}".format(k)
)
if isinstance(args[0], int) or len(args) == 1 or args[0] in self:
index = args[0]
ret = self[index]
del self[index]
return ret
else:
defaultvalue = args[1]
return defaultvalue
def get(self, key, default_value=None):
"""
Returns named result matching the given key, or if there is no
such name, then returns the given ``default_value`` or ``None`` if no
``default_value`` is specified.
Similar to ``dict.get()``.
Example::
integer = Word(nums)
date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
result = date_str.parse_string("1999/12/31")
print(result.get("year")) # -> '1999'
print(result.get("hour", "not specified")) # -> 'not specified'
print(result.get("hour")) # -> None
"""
if key in self:
return self[key]
else:
return default_value
def insert(self, index, ins_string):
"""
Inserts new element at location index in the list of parsed tokens.
Similar to ``list.insert()``.
Example::
numlist = Word(nums)[...]
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321']
# use a parse action to insert the parse location in the front of the parsed results
def insert_locn(locn, tokens):
tokens.insert(0, locn)
numlist.add_parse_action(insert_locn)
print(numlist.parse_string("0 123 321")) # -> [0, '0', '123', '321']
"""
self._toklist.insert(index, ins_string)
# fixup indices in token dictionary
for name, occurrences in self._tokdict.items():
for k, (value, position) in enumerate(occurrences):
occurrences[k] = _ParseResultsWithOffset(
value, position + (position > index)
)
def append(self, item):
"""
Add single element to end of ``ParseResults`` list of elements.
Example::
numlist = Word(nums)[...]
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321']
# use a parse action to compute the sum of the parsed integers, and add it to the end
def append_sum(tokens):
tokens.append(sum(map(int, tokens)))
numlist.add_parse_action(append_sum)
print(numlist.parse_string("0 123 321")) # -> ['0', '123', '321', 444]
"""
self._toklist.append(item)
def extend(self, itemseq):
"""
Add sequence of elements to end of ``ParseResults`` list of elements.
Example::
patt = Word(alphas)[1, ...]
# use a parse action to append the reverse of the matched strings, to make a palindrome
def make_palindrome(tokens):
tokens.extend(reversed([t[::-1] for t in tokens]))
return ''.join(tokens)
patt.add_parse_action(make_palindrome)
print(patt.parse_string("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
"""
if isinstance(itemseq, ParseResults):
self.__iadd__(itemseq)
else:
self._toklist.extend(itemseq)
def clear(self):
"""
Clear all elements and results names.
"""
del self._toklist[:]
self._tokdict.clear()
def __getattr__(self, name):
try:
return self[name]
except KeyError:
if name.startswith("__"):
raise AttributeError(name)
return ""
def __add__(self, other) -> "ParseResults":
ret = self.copy()
ret += other
return ret
def __iadd__(self, other) -> "ParseResults":
if other._tokdict:
offset = len(self._toklist)
addoffset = lambda a: offset if a < 0 else a + offset
otheritems = other._tokdict.items()
otherdictitems = [
(k, _ParseResultsWithOffset(v[0], addoffset(v[1])))
for k, vlist in otheritems
for v in vlist
]
for k, v in otherdictitems:
self[k] = v
if isinstance(v[0], ParseResults):
v[0]._parent = wkref(self)
self._toklist += other._toklist
self._all_names |= other._all_names
return self
def __radd__(self, other) -> "ParseResults":
if isinstance(other, int) and other == 0:
# useful for merging many ParseResults using sum() builtin
return self.copy()
else:
# this may raise a TypeError - so be it
return other + self
def __repr__(self) -> str:
return "{}({!r}, {})".format(type(self).__name__, self._toklist, self.as_dict())
def __str__(self) -> str:
return (
"["
+ ", ".join(
[
str(i) if isinstance(i, ParseResults) else repr(i)
for i in self._toklist
]
)
+ "]"
)
def _asStringList(self, sep=""):
out = []
for item in self._toklist:
if out and sep:
out.append(sep)
if isinstance(item, ParseResults):
out += item._asStringList()
else:
out.append(str(item))
return out
def as_list(self) -> list:
"""
Returns the parse results as a nested list of matching tokens, all converted to strings.
Example::
patt = Word(alphas)[1, ...]
result = patt.parse_string("sldkj lsdkj sldkj")
# even though the result prints in string-like form, it is actually a pyparsing ParseResults
print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
# Use as_list() to create an actual list
result_list = result.as_list()
print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
"""
return [
res.as_list() if isinstance(res, ParseResults) else res
for res in self._toklist
]
def as_dict(self) -> dict:
"""
Returns the named parse results as a nested dictionary.
Example::
integer = Word(nums)
date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
result = date_str.parse_string('12/31/1999')
print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
result_dict = result.as_dict()
print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
# even though a ParseResults supports dict-like access, sometime you just need to have a dict
import json
print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
print(json.dumps(result.as_dict())) # -> {"month": "31", "day": "1999", "year": "12"}
"""
def to_item(obj):
if isinstance(obj, ParseResults):
return obj.as_dict() if obj.haskeys() else [to_item(v) for v in obj]
else:
return obj
return dict((k, to_item(v)) for k, v in self.items())
def copy(self) -> "ParseResults":
"""
Returns a new copy of a :class:`ParseResults` object.
"""
ret = ParseResults(self._toklist)
ret._tokdict = self._tokdict.copy()
ret._parent = self._parent
ret._all_names |= self._all_names
ret._name = self._name
return ret
def get_name(self):
r"""
Returns the results name for this token expression. Useful when several
different expressions might match at a particular location.
Example::
integer = Word(nums)
ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
house_number_expr = Suppress('#') + Word(nums, alphanums)
user_data = (Group(house_number_expr)("house_number")
| Group(ssn_expr)("ssn")
| Group(integer)("age"))
user_info = user_data[1, ...]
result = user_info.parse_string("22 111-22-3333 #221B")
for item in result:
print(item.get_name(), ':', item[0])
prints::
age : 22
ssn : 111-22-3333
house_number : 221B
"""
if self._name:
return self._name
elif self._parent:
par = self._parent()
def find_in_parent(sub):
return next(
(
k
for k, vlist in par._tokdict.items()
for v, loc in vlist
if sub is v
),
None,
)
return find_in_parent(self) if par else None
elif (
len(self) == 1
and len(self._tokdict) == 1
and next(iter(self._tokdict.values()))[0][1] in (0, -1)
):
return next(iter(self._tokdict.keys()))
else:
return None
def dump(self, indent="", full=True, include_list=True, _depth=0) -> str:
"""
Diagnostic method for listing out the contents of
a :class:`ParseResults`. Accepts an optional ``indent`` argument so
that this string can be embedded in a nested display of other data.
Example::
integer = Word(nums)
date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
result = date_str.parse_string('1999/12/31')
print(result.dump())
prints::
['1999', '/', '12', '/', '31']
- day: '31'
- month: '12'
- year: '1999'
"""
out = []
NL = "\n"
out.append(indent + str(self.as_list()) if include_list else "")
if full:
if self.haskeys():
items = sorted((str(k), v) for k, v in self.items())
for k, v in items:
if out:
out.append(NL)
out.append("{}{}- {}: ".format(indent, (" " * _depth), k))
if isinstance(v, ParseResults):
if v:
out.append(
v.dump(
indent=indent,
full=full,
include_list=include_list,
_depth=_depth + 1,
)
)
else:
out.append(str(v))
else:
out.append(repr(v))
if any(isinstance(vv, ParseResults) for vv in self):
v = self
for i, vv in enumerate(v):
if isinstance(vv, ParseResults):
out.append(
"\n{}{}[{}]:\n{}{}{}".format(
indent,
(" " * (_depth)),
i,
indent,
(" " * (_depth + 1)),
vv.dump(
indent=indent,
full=full,
include_list=include_list,
_depth=_depth + 1,
),
)
)
else:
out.append(
"\n%s%s[%d]:\n%s%s%s"
% (
indent,
(" " * (_depth)),
i,
indent,
(" " * (_depth + 1)),
str(vv),
)
)
return "".join(out)
def pprint(self, *args, **kwargs):
"""
Pretty-printer for parsed results as a list, using the
`pprint <https://docs.python.org/3/library/pprint.html>`_ module.
Accepts additional positional or keyword args as defined for
`pprint.pprint <https://docs.python.org/3/library/pprint.html#pprint.pprint>`_ .
Example::
ident = Word(alphas, alphanums)
num = Word(nums)
func = Forward()
term = ident | num | Group('(' + func + ')')
func <<= ident + Group(Optional(delimited_list(term)))
result = func.parse_string("fna a,b,(fnb c,d,200),100")
result.pprint(width=40)
prints::
['fna',
['a',
'b',
['(', 'fnb', ['c', 'd', '200'], ')'],
'100']]
"""
pprint.pprint(self.as_list(), *args, **kwargs)
# add support for pickle protocol
def __getstate__(self):
return (
self._toklist,
(
self._tokdict.copy(),
self._parent is not None and self._parent() or None,
self._all_names,
self._name,
),
)
def __setstate__(self, state):
self._toklist, (self._tokdict, par, inAccumNames, self._name) = state
self._all_names = set(inAccumNames)
if par is not None:
self._parent = wkref(par)
else:
self._parent = None
def __getnewargs__(self):
return self._toklist, self._name
def __dir__(self):
return dir(type(self)) + list(self.keys())
@classmethod
def from_dict(cls, other, name=None) -> "ParseResults":
"""
Helper classmethod to construct a ``ParseResults`` from a ``dict``, preserving the
name-value relations as results names. If an optional ``name`` argument is
given, a nested ``ParseResults`` will be returned.
"""
def is_iterable(obj):
try:
iter(obj)
except Exception:
return False
else:
return not isinstance(obj, str_type)
ret = cls([])
for k, v in other.items():
if isinstance(v, Mapping):
ret += cls.from_dict(v, name=k)
else:
ret += cls([v], name=k, asList=is_iterable(v))
if name is not None:
ret = cls([ret], name=name)
return ret
asList = as_list
asDict = as_dict
getName = get_name
MutableMapping.register(ParseResults)
MutableSequence.register(ParseResults)
| mit | baf13cce8c5de7990d36a4140dcf6b61 | 32.343421 | 183 | 0.470542 | 4.328096 | false | false | false | false |
pypa/setuptools | setuptools/_vendor/importlib_resources/_compat.py | 8 | 2706 | # flake8: noqa
import abc
import sys
import pathlib
from contextlib import suppress
if sys.version_info >= (3, 10):
from zipfile import Path as ZipPath # type: ignore
else:
from ..zipp import Path as ZipPath # type: ignore
try:
from typing import runtime_checkable # type: ignore
except ImportError:
def runtime_checkable(cls): # type: ignore
return cls
try:
from typing import Protocol # type: ignore
except ImportError:
Protocol = abc.ABC # type: ignore
class TraversableResourcesLoader:
"""
Adapt loaders to provide TraversableResources and other
compatibility.
Used primarily for Python 3.9 and earlier where the native
loaders do not yet implement TraversableResources.
"""
def __init__(self, spec):
self.spec = spec
@property
def path(self):
return self.spec.origin
def get_resource_reader(self, name):
from . import readers, _adapters
def _zip_reader(spec):
with suppress(AttributeError):
return readers.ZipReader(spec.loader, spec.name)
def _namespace_reader(spec):
with suppress(AttributeError, ValueError):
return readers.NamespaceReader(spec.submodule_search_locations)
def _available_reader(spec):
with suppress(AttributeError):
return spec.loader.get_resource_reader(spec.name)
def _native_reader(spec):
reader = _available_reader(spec)
return reader if hasattr(reader, 'files') else None
def _file_reader(spec):
try:
path = pathlib.Path(self.path)
except TypeError:
return None
if path.exists():
return readers.FileReader(self)
return (
# native reader if it supplies 'files'
_native_reader(self.spec)
or
# local ZipReader if a zip module
_zip_reader(self.spec)
or
# local NamespaceReader if a namespace module
_namespace_reader(self.spec)
or
# local FileReader
_file_reader(self.spec)
# fallback - adapt the spec ResourceReader to TraversableReader
or _adapters.CompatibilityFiles(self.spec)
)
def wrap_spec(package):
"""
Construct a package spec with traversable compatibility
on the spec/loader/reader.
Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
from above for older Python compatibility (<3.10).
"""
from . import _adapters
return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
| mit | 1f6ece2b9e4f3b4b6c476ffc0d72078f | 26.612245 | 84 | 0.623429 | 4.44335 | false | false | false | false |
pypa/setuptools | setuptools/tests/test_config_discovery.py | 1 | 22192 | import os
import sys
from configparser import ConfigParser
from itertools import product
from setuptools.command.sdist import sdist
from setuptools.dist import Distribution
from setuptools.discovery import find_package_path, find_parent_package
from setuptools.errors import PackageDiscoveryError
import setuptools # noqa -- force distutils.core to be patched
import distutils.core
import pytest
import jaraco.path
from path import Path as _Path
from .contexts import quiet
from .integration.helpers import get_sdist_members, get_wheel_members, run
from .textwrap import DALS
class TestFindParentPackage:
def test_single_package(self, tmp_path):
# find_parent_package should find a non-namespace parent package
(tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True)
(tmp_path / "src/namespace/pkg/nested/__init__.py").touch()
(tmp_path / "src/namespace/pkg/__init__.py").touch()
packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"]
assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg"
def test_multiple_toplevel(self, tmp_path):
# find_parent_package should return null if the given list of packages does not
# have a single parent package
multiple = ["pkg", "pkg1", "pkg2"]
for name in multiple:
(tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True)
(tmp_path / f"src/{name}/__init__.py").touch()
assert find_parent_package(multiple, {"": "src"}, tmp_path) is None
class TestDiscoverPackagesAndPyModules:
"""Make sure discovered values for ``packages`` and ``py_modules`` work
similarly to explicit configuration for the simple scenarios.
"""
OPTIONS = {
# Different options according to the circumstance being tested
"explicit-src": {
"package_dir": {"": "src"},
"packages": ["pkg"]
},
"variation-lib": {
"package_dir": {"": "lib"}, # variation of the source-layout
},
"explicit-flat": {
"packages": ["pkg"]
},
"explicit-single_module": {
"py_modules": ["pkg"]
},
"explicit-namespace": {
"packages": ["ns", "ns.pkg"]
},
"automatic-src": {},
"automatic-flat": {},
"automatic-single_module": {},
"automatic-namespace": {}
}
FILES = {
"src": ["src/pkg/__init__.py", "src/pkg/main.py"],
"lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"],
"flat": ["pkg/__init__.py", "pkg/main.py"],
"single_module": ["pkg.py"],
"namespace": ["ns/pkg/__init__.py"]
}
def _get_info(self, circumstance):
_, _, layout = circumstance.partition("-")
files = self.FILES[layout]
options = self.OPTIONS[circumstance]
return files, options
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
def test_sdist_filelist(self, tmp_path, circumstance):
files, options = self._get_info(circumstance)
_populate_project_dir(tmp_path, files, options)
_, cmd = _run_sdist_programatically(tmp_path, options)
manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files]
for file in files:
assert any(f.endswith(file) for f in manifest)
@pytest.mark.parametrize("circumstance", OPTIONS.keys())
def test_project(self, tmp_path, circumstance):
files, options = self._get_info(circumstance)
_populate_project_dir(tmp_path, files, options)
# Simulate a pre-existing `build` directory
(tmp_path / "build").mkdir()
(tmp_path / "build/lib").mkdir()
(tmp_path / "build/bdist.linux-x86_64").mkdir()
(tmp_path / "build/bdist.linux-x86_64/file.py").touch()
(tmp_path / "build/lib/__init__.py").touch()
(tmp_path / "build/lib/file.py").touch()
(tmp_path / "dist").mkdir()
(tmp_path / "dist/file.py").touch()
_run_build(tmp_path)
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
print("~~~~~ sdist_members ~~~~~")
print('\n'.join(sdist_files))
assert sdist_files >= set(files)
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
print("~~~~~ wheel_members ~~~~~")
print('\n'.join(wheel_files))
orig_files = {f.replace("src/", "").replace("lib/", "") for f in files}
assert wheel_files >= orig_files
# Make sure build files are not included by mistake
for file in wheel_files:
assert "build" not in files
assert "dist" not in files
PURPOSEFULLY_EMPY = {
"setup.cfg": DALS(
"""
[metadata]
name = myproj
version = 0.0.0
[options]
{param} =
"""
),
"setup.py": DALS(
"""
__import__('setuptools').setup(
name="myproj",
version="0.0.0",
{param}=[]
)
"""
),
"pyproject.toml": DALS(
"""
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
[project]
name = "myproj"
version = "0.0.0"
[tool.setuptools]
{param} = []
"""
),
"template-pyproject.toml": DALS(
"""
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
"""
)
}
@pytest.mark.parametrize(
"config_file, param, circumstance",
product(
["setup.cfg", "setup.py", "pyproject.toml"],
["packages", "py_modules"],
FILES.keys()
)
)
def test_purposefully_empty(self, tmp_path, config_file, param, circumstance):
files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"]
_populate_project_dir(tmp_path, files, {})
if config_file == "pyproject.toml":
template_param = param.replace("_", "-")
else:
# Make sure build works with or without setup.cfg
pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"]
(tmp_path / "pyproject.toml").write_text(pyproject)
template_param = param
config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param)
(tmp_path / config_file).write_text(config)
dist = _get_dist(tmp_path, {})
# When either parameter package or py_modules is an empty list,
# then there should be no discovery
assert getattr(dist, param) == []
other = {"py_modules": "packages", "packages": "py_modules"}[param]
assert getattr(dist, other) is None
@pytest.mark.parametrize(
"extra_files, pkgs",
[
(["venv/bin/simulate_venv"], {"pkg"}),
(["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}),
(["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}),
(
# Type stubs can also be namespaced
["namespace-stubs/pkg/__init__.pyi"],
{"pkg", "namespace-stubs", "namespace-stubs.pkg"},
),
(
# Just the top-level package can have `-stubs`, ignore nested ones
["namespace-stubs/pkg-stubs/__init__.pyi"],
{"pkg", "namespace-stubs"}
),
(["_hidden/file.py"], {"pkg"}),
(["news/finalize.py"], {"pkg"}),
]
)
def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs):
files = self.FILES["flat"] + extra_files
_populate_project_dir(tmp_path, files, {})
dist = _get_dist(tmp_path, {})
assert set(dist.packages) == pkgs
@pytest.mark.parametrize(
"extra_files",
[
["other/__init__.py"],
["other/finalize.py"],
]
)
def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files):
files = self.FILES["flat"] + extra_files
_populate_project_dir(tmp_path, files, {})
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
def test_flat_layout_with_single_module(self, tmp_path):
files = self.FILES["single_module"] + ["invalid-module-name.py"]
_populate_project_dir(tmp_path, files, {})
dist = _get_dist(tmp_path, {})
assert set(dist.py_modules) == {"pkg"}
def test_flat_layout_with_multiple_modules(self, tmp_path):
files = self.FILES["single_module"] + ["valid_module_name.py"]
_populate_project_dir(tmp_path, files, {})
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
def test_py_modules_when_wheel_dir_is_cwd(self, tmp_path):
"""Regression for issue 3692"""
from setuptools import build_meta
pyproject = '[project]\nname = "test"\nversion = "1"'
(tmp_path / "pyproject.toml").write_text(DALS(pyproject), encoding="utf-8")
(tmp_path / "foo.py").touch()
with jaraco.path.DirectoryStack().context(tmp_path):
build_meta.build_wheel(".")
# Ensure py_modules are found
wheel_files = get_wheel_members(next(tmp_path.glob("*.whl")))
assert "foo.py" in wheel_files
class TestNoConfig:
DEFAULT_VERSION = "0.0.0" # Default version given by setuptools
EXAMPLES = {
"pkg1": ["src/pkg1.py"],
"pkg2": ["src/pkg2/__init__.py"],
"pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"],
"pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"],
"ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"],
"ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"],
}
@pytest.mark.parametrize("example", EXAMPLES.keys())
def test_discover_name(self, tmp_path, example):
_populate_project_dir(tmp_path, self.EXAMPLES[example], {})
dist = _get_dist(tmp_path, {})
assert dist.get_name() == example
def test_build_with_discovered_name(self, tmp_path):
files = ["src/ns/nested/pkg/__init__.py"]
_populate_project_dir(tmp_path, files, {})
_run_build(tmp_path, "--sdist")
# Expected distribution file
dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz"
assert dist_file.is_file()
class TestWithAttrDirective:
@pytest.mark.parametrize(
"folder, opts",
[
("src", {}),
("lib", {"packages": "find:", "packages.find": {"where": "lib"}}),
]
)
def test_setupcfg_metadata(self, tmp_path, folder, opts):
files = [f"{folder}/pkg/__init__.py", "setup.cfg"]
_populate_project_dir(tmp_path, files, opts)
(tmp_path / folder / "pkg/__init__.py").write_text("version = 42")
(tmp_path / "setup.cfg").write_text(
"[metadata]\nversion = attr: pkg.version\n"
+ (tmp_path / "setup.cfg").read_text()
)
dist = _get_dist(tmp_path, {})
assert dist.get_name() == "pkg"
assert dist.get_version() == "42"
assert dist.package_dir
package_path = find_package_path("pkg", dist.package_dir, tmp_path)
assert os.path.exists(package_path)
assert folder in _Path(package_path).parts()
_run_build(tmp_path, "--sdist")
dist_file = tmp_path / "dist/pkg-42.tar.gz"
assert dist_file.is_file()
def test_pyproject_metadata(self, tmp_path):
_populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {})
(tmp_path / "src/pkg/__init__.py").write_text("version = 42")
(tmp_path / "pyproject.toml").write_text(
"[project]\nname = 'pkg'\ndynamic = ['version']\n"
"[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n"
)
dist = _get_dist(tmp_path, {})
assert dist.get_version() == "42"
assert dist.package_dir == {"": "src"}
class TestWithCExtension:
def _simulate_package_with_extension(self, tmp_path):
# This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0
files = [
"benchmarks/file.py",
"docs/Makefile",
"docs/requirements.txt",
"docs/source/conf.py",
"proj/header.h",
"proj/file.py",
"py/proj.cpp",
"py/other.cpp",
"py/file.py",
"py/py.typed",
"py/tests/test_proj.py",
"README.rst",
]
_populate_project_dir(tmp_path, files, {})
setup_script = """
from setuptools import Extension, setup
ext_modules = [
Extension(
"proj",
["py/proj.cpp", "py/other.cpp"],
include_dirs=["."],
language="c++",
),
]
setup(ext_modules=ext_modules)
"""
(tmp_path / "setup.py").write_text(DALS(setup_script))
def test_skip_discovery_with_setupcfg_metadata(self, tmp_path):
"""Ensure that auto-discovery is not triggered when the project is based on
C-extensions only, for backward compatibility.
"""
self._simulate_package_with_extension(tmp_path)
pyproject = """
[build-system]
requires = []
build-backend = 'setuptools.build_meta'
"""
(tmp_path / "pyproject.toml").write_text(DALS(pyproject))
setupcfg = """
[metadata]
name = proj
version = 42
"""
(tmp_path / "setup.cfg").write_text(DALS(setupcfg))
dist = _get_dist(tmp_path, {})
assert dist.get_name() == "proj"
assert dist.get_version() == "42"
assert dist.py_modules is None
assert dist.packages is None
assert len(dist.ext_modules) == 1
assert dist.ext_modules[0].name == "proj"
def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path):
"""When opting-in to pyproject.toml metadata, auto-discovery will be active if
the package lists C-extensions, but does not configure py-modules or packages.
This way we ensure users with complex package layouts that would lead to the
discovery of multiple top-level modules/packages see errors and are forced to
explicitly set ``packages`` or ``py-modules``.
"""
self._simulate_package_with_extension(tmp_path)
pyproject = """
[project]
name = 'proj'
version = '42'
"""
(tmp_path / "pyproject.toml").write_text(DALS(pyproject))
with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"):
_get_dist(tmp_path, {})
class TestWithPackageData:
def _simulate_package_with_data_files(self, tmp_path, src_root):
files = [
f"{src_root}/proj/__init__.py",
f"{src_root}/proj/file1.txt",
f"{src_root}/proj/nested/file2.txt",
]
_populate_project_dir(tmp_path, files, {})
manifest = """
global-include *.py *.txt
"""
(tmp_path / "MANIFEST.in").write_text(DALS(manifest))
EXAMPLE_SETUPCFG = """
[metadata]
name = proj
version = 42
[options]
include_package_data = True
"""
EXAMPLE_PYPROJECT = """
[project]
name = "proj"
version = "42"
"""
PYPROJECT_PACKAGE_DIR = """
[tool.setuptools]
package-dir = {"" = "src"}
"""
@pytest.mark.parametrize(
"src_root, files",
[
(".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
(".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}),
("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}),
(
"src",
{
"setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS(
"""
packages = find:
package_dir =
=src
[options.packages.find]
where = src
"""
)
}
),
(
"src",
{
"pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS(
"""
[tool.setuptools]
package-dir = {"" = "src"}
"""
)
},
),
]
)
def test_include_package_data(self, tmp_path, src_root, files):
"""
Make sure auto-discovery does not affect package include_package_data.
See issue #3196.
"""
jaraco.path.build(files, prefix=str(tmp_path))
self._simulate_package_with_data_files(tmp_path, src_root)
expected = {
os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"),
os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"),
}
_run_build(tmp_path)
sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz")))
print("~~~~~ sdist_members ~~~~~")
print('\n'.join(sdist_files))
assert sdist_files >= expected
wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl")))
print("~~~~~ wheel_members ~~~~~")
print('\n'.join(wheel_files))
orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected}
assert wheel_files >= orig_files
def test_compatible_with_numpy_configuration(tmp_path):
files = [
"dir1/__init__.py",
"dir2/__init__.py",
"file.py",
]
_populate_project_dir(tmp_path, files, {})
dist = Distribution({})
dist.configuration = object()
dist.set_defaults()
assert dist.py_modules is None
assert dist.packages is None
def test_name_discovery_doesnt_break_cli(tmpdir_cwd):
jaraco.path.build({"pkg.py": ""})
dist = Distribution({})
dist.script_args = ["--name"]
dist.set_defaults()
dist.parse_command_line() # <-- no exception should be raised here.
assert dist.get_name() == "pkg"
def test_preserve_explicit_name_with_dynamic_version(tmpdir_cwd, monkeypatch):
"""According to #3545 it seems that ``name`` discovery is running,
even when the project already explicitly sets it.
This seems to be related to parsing of dynamic versions (via ``attr`` directive),
which requires the auto-discovery of ``package_dir``.
"""
files = {
"src": {
"pkg": {"__init__.py": "__version__ = 42\n"},
},
"pyproject.toml": DALS("""
[project]
name = "myproj" # purposefully different from package name
dynamic = ["version"]
[tool.setuptools.dynamic]
version = {"attr" = "pkg.__version__"}
""")
}
jaraco.path.build(files)
dist = Distribution({})
orig_analyse_name = dist.set_defaults.analyse_name
def spy_analyse_name():
# We can check if name discovery was triggered by ensuring the original
# name remains instead of the package name.
orig_analyse_name()
assert dist.get_name() == "myproj"
monkeypatch.setattr(dist.set_defaults, "analyse_name", spy_analyse_name)
dist.parse_config_files()
assert dist.get_version() == "42"
assert set(dist.packages) == {"pkg"}
def _populate_project_dir(root, files, options):
# NOTE: Currently pypa/build will refuse to build the project if no
# `pyproject.toml` or `setup.py` is found. So it is impossible to do
# completely "config-less" projects.
(root / "setup.py").write_text("import setuptools\nsetuptools.setup()")
(root / "README.md").write_text("# Example Package")
(root / "LICENSE").write_text("Copyright (c) 2018")
_write_setupcfg(root, options)
paths = (root / f for f in files)
for path in paths:
path.parent.mkdir(exist_ok=True, parents=True)
path.touch()
def _write_setupcfg(root, options):
if not options:
print("~~~~~ **NO** setup.cfg ~~~~~")
return
setupcfg = ConfigParser()
setupcfg.add_section("options")
for key, value in options.items():
if key == "packages.find":
setupcfg.add_section(f"options.{key}")
setupcfg[f"options.{key}"].update(value)
elif isinstance(value, list):
setupcfg["options"][key] = ", ".join(value)
elif isinstance(value, dict):
str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items())
setupcfg["options"][key] = "\n" + str_value
else:
setupcfg["options"][key] = str(value)
with open(root / "setup.cfg", "w") as f:
setupcfg.write(f)
print("~~~~~ setup.cfg ~~~~~")
print((root / "setup.cfg").read_text())
def _run_build(path, *flags):
cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)]
return run(cmd, env={'DISTUTILS_DEBUG': ''})
def _get_dist(dist_path, attrs):
root = "/".join(os.path.split(dist_path)) # POSIX-style
script = dist_path / 'setup.py'
if script.exists():
with _Path(dist_path):
dist = distutils.core.run_setup("setup.py", {}, stop_after="init")
else:
dist = Distribution(attrs)
dist.src_root = root
dist.script_name = "setup.py"
with _Path(dist_path):
dist.parse_config_files()
dist.set_defaults()
return dist
def _run_sdist_programatically(dist_path, attrs):
dist = _get_dist(dist_path, attrs)
cmd = sdist(dist)
cmd.ensure_finalized()
assert cmd.distribution.packages or cmd.distribution.py_modules
with quiet(), _Path(dist_path):
cmd.run()
return dist, cmd
| mit | 50776102a6d74d2689666b5cfd697326 | 33.838305 | 88 | 0.547224 | 3.713521 | false | true | false | false |
pypa/setuptools | setuptools/tests/test_packageindex.py | 1 | 10263 | import sys
import os
import distutils.errors
import platform
import urllib.request
import urllib.error
import http.client
from unittest import mock
import pytest
import setuptools.package_index
from .textwrap import DALS
class TestPackageIndex:
def test_regex(self):
hash_url = 'http://other_url?:action=show_md5&'
hash_url += 'digest=0123456789abcdef0123456789abcdef'
doc = """
<a href="http://some_url">Name</a>
(<a title="MD5 hash"
href="{hash_url}">md5</a>)
""".lstrip().format(
**locals()
)
assert setuptools.package_index.PYPI_MD5.match(doc)
def test_bad_url_bad_port(self):
index = setuptools.package_index.PackageIndex()
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
try:
v = index.open_url(url)
except Exception as v:
assert url in str(v)
else:
assert isinstance(v, urllib.error.HTTPError)
def test_bad_url_typo(self):
# issue 16
# easy_install inquant.contentmirror.plone breaks because of a typo
# in its home URL
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
url = (
'url:%20https://svn.plone.org/svn'
'/collective/inquant.contentmirror.plone/trunk'
)
try:
v = index.open_url(url)
except Exception as v:
assert url in str(v)
else:
assert isinstance(v, urllib.error.HTTPError)
def test_bad_url_bad_status_line(self):
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
def _urlopen(*args):
raise http.client.BadStatusLine('line')
index.opener = _urlopen
url = 'http://example.com'
try:
index.open_url(url)
except Exception as exc:
assert 'line' in str(exc)
else:
raise AssertionError('Should have raise here!')
def test_bad_url_double_scheme(self):
"""
A bad URL with a double scheme should raise a DistutilsError.
"""
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
# issue 20
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
try:
index.open_url(url)
except distutils.errors.DistutilsError as error:
msg = str(error)
assert (
'nonnumeric port' in msg
or 'getaddrinfo failed' in msg
or 'Name or service not known' in msg
)
return
raise RuntimeError("Did not raise")
def test_bad_url_screwy_href(self):
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
# issue #160
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# this should not fail
url = 'http://example.com'
page = '<a href="http://www.famfamfam.com](' 'http://www.famfamfam.com/">'
index.process_index(url, page)
def test_url_ok(self):
index = setuptools.package_index.PackageIndex(hosts=('www.example.com',))
url = 'file:///tmp/test_package_index'
assert index.url_ok(url, True)
def test_parse_bdist_wininst(self):
parse = setuptools.package_index.parse_bdist_wininst
actual = parse('reportlab-2.5.win32-py2.4.exe')
expected = 'reportlab-2.5', '2.4', 'win32'
assert actual == expected
actual = parse('reportlab-2.5.win32.exe')
expected = 'reportlab-2.5', None, 'win32'
assert actual == expected
actual = parse('reportlab-2.5.win-amd64-py2.7.exe')
expected = 'reportlab-2.5', '2.7', 'win-amd64'
assert actual == expected
actual = parse('reportlab-2.5.win-amd64.exe')
expected = 'reportlab-2.5', None, 'win-amd64'
assert actual == expected
def test__vcs_split_rev_from_url(self):
"""
Test the basic usage of _vcs_split_rev_from_url
"""
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
url, rev = vsrfu('https://example.com/bar@2995')
assert url == 'https://example.com/bar'
assert rev == '2995'
def test_local_index(self, tmpdir):
"""
local_open should be able to read an index from the file system.
"""
index_file = tmpdir / 'index.html'
with index_file.open('w') as f:
f.write('<div>content</div>')
url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/'
res = setuptools.package_index.local_open(url)
assert 'content' in res.read()
def test_egg_fragment(self):
"""
EGG fragments must comply to PEP 440
"""
epoch = [
'',
'1!',
]
releases = [
'0',
'0.0',
'0.0.0',
]
pre = [
'a0',
'b0',
'rc0',
]
post = ['.post0']
dev = [
'.dev0',
]
local = [
('', ''),
('+ubuntu.0', '+ubuntu.0'),
('+ubuntu-0', '+ubuntu.0'),
('+ubuntu_0', '+ubuntu.0'),
]
versions = [
[''.join([e, r, p, loc]) for loc in locs]
for e in epoch
for r in releases
for p in sum([pre, post, dev], [''])
for locs in local
]
for v, vc in versions:
dists = list(
setuptools.package_index.distros_for_url(
'http://example.com/example.zip#egg=example-' + v
)
)
assert dists[0].version == ''
assert dists[1].version == vc
def test_download_git_with_rev(self, tmpdir):
url = 'git+https://github.example/group/project@master#egg=foo'
index = setuptools.package_index.PackageIndex()
with mock.patch("os.system") as os_system_mock:
result = index.download(url, str(tmpdir))
os_system_mock.assert_called()
expected_dir = str(tmpdir / 'project@master')
expected = (
'git clone --quiet ' 'https://github.example/group/project {expected_dir}'
).format(**locals())
first_call_args = os_system_mock.call_args_list[0][0]
assert first_call_args == (expected,)
tmpl = 'git -C {expected_dir} checkout --quiet master'
expected = tmpl.format(**locals())
assert os_system_mock.call_args_list[1][0] == (expected,)
assert result == expected_dir
def test_download_git_no_rev(self, tmpdir):
url = 'git+https://github.example/group/project#egg=foo'
index = setuptools.package_index.PackageIndex()
with mock.patch("os.system") as os_system_mock:
result = index.download(url, str(tmpdir))
os_system_mock.assert_called()
expected_dir = str(tmpdir / 'project')
expected = (
'git clone --quiet ' 'https://github.example/group/project {expected_dir}'
).format(**locals())
os_system_mock.assert_called_once_with(expected)
def test_download_svn(self, tmpdir):
url = 'svn+https://svn.example/project#egg=foo'
index = setuptools.package_index.PackageIndex()
with pytest.warns(UserWarning):
with mock.patch("os.system") as os_system_mock:
result = index.download(url, str(tmpdir))
os_system_mock.assert_called()
expected_dir = str(tmpdir / 'project')
expected = (
'svn checkout -q ' 'svn+https://svn.example/project {expected_dir}'
).format(**locals())
os_system_mock.assert_called_once_with(expected)
class TestContentCheckers:
def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
)
checker.feed('You should probably not be using MD5'.encode('ascii'))
assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478'
assert checker.is_valid()
def test_other_fragment(self):
"Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#something%20completely%20different'
)
checker.feed('anything'.encode('ascii'))
assert checker.is_valid()
def test_blank_md5(self):
"Content checks should succeed if a hash is empty"
checker = setuptools.package_index.HashChecker.from_url('http://foo/bar#md5=')
checker.feed('anything'.encode('ascii'))
assert checker.is_valid()
def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
)
assert checker.hash_name == 'md5'
def test_report(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478'
)
rep = checker.report(lambda x: x, 'My message about %s')
assert rep == 'My message about md5'
@pytest.fixture
def temp_home(tmpdir, monkeypatch):
key = (
'USERPROFILE'
if platform.system() == 'Windows' and sys.version_info > (3, 8)
else 'HOME'
)
monkeypatch.setitem(os.environ, key, str(tmpdir))
return tmpdir
class TestPyPIConfig:
def test_percent_in_password(self, temp_home):
pypirc = temp_home / '.pypirc'
pypirc.write(
DALS(
"""
[pypi]
repository=https://pypi.org
username=jaraco
password=pity%
"""
)
)
cfg = setuptools.package_index.PyPIConfig()
cred = cfg.creds_by_repository['https://pypi.org']
assert cred.username == 'jaraco'
assert cred.password == 'pity%'
@pytest.mark.timeout(1)
def test_REL_DoS():
"""
REL should not hang on a contrived attack string.
"""
setuptools.package_index.REL.search('< rel=' + ' ' * 2**12)
| mit | 3b26d81920b7d21b68dc497c8f285888 | 31.580952 | 86 | 0.565916 | 3.732 | false | true | false | false |
locustio/locust | locust/test/test_log.py | 1 | 6737 | from unittest import mock
import socket
import subprocess
import textwrap
from logging import getLogger
import gevent
from locust import log
from locust.log import greenlet_exception_logger
from .testcases import LocustTestCase
from .util import temporary_file
from . import changed_rlimit
class TestGreenletExceptionLogger(LocustTestCase):
# Gevent outputs all unhandled exceptions to stderr, so we'll suppress that in this test
@mock.patch("sys.stderr.write")
def test_greenlet_exception_logger(self, mocked_stderr):
self.assertFalse(log.unhandled_greenlet_exception)
def thread():
raise ValueError("Boom!?")
logger = getLogger("greenlet_test_logger")
g = gevent.spawn(thread)
g.link_exception(greenlet_exception_logger(logger))
g.join()
self.assertEqual(1, len(self.mocked_log.critical))
msg = self.mocked_log.critical[0]
self.assertIn("Unhandled exception in greenlet: ", msg["message"])
self.assertTrue(isinstance(msg["exc_info"][1], ValueError))
self.assertIn("Boom!?", str(msg["exc_info"][1]))
self.assertTrue(log.unhandled_greenlet_exception)
class TestLoggingOptions(LocustTestCase):
def test_logging_output(self):
with temporary_file(
textwrap.dedent(
"""
import logging
from locust import User, task, constant
custom_logger = logging.getLogger("custom_logger")
class MyUser(User):
wait_time = constant(2)
@task
def my_task(self):
print("running my_task")
logging.info("custom log message")
custom_logger.info("test")
"""
)
) as file_path:
output = subprocess.check_output(
[
"locust",
"-f",
file_path,
"-u",
"1",
"-r",
"1",
"-t",
"1",
"--headless",
],
stderr=subprocess.STDOUT,
timeout=10,
text=True,
)
self.assertIn(
f"{socket.gethostname()}/INFO/locust.main: Run time limit set to 1 seconds",
output,
)
self.assertIn(
f"{socket.gethostname()}/INFO/locust.main: --run-time limit reached, shutting down",
output,
)
self.assertIn(
f"{socket.gethostname()}/INFO/locust.main: Shutting down (exit code 0)",
output,
)
self.assertIn(
"\nrunning my_task\n",
output,
)
# check that custom message of root logger is also printed
self.assertIn(
f"{socket.gethostname()}/INFO/root: custom log message",
output,
)
# check that custom message of custom_logger is also printed
self.assertIn(
f"{socket.gethostname()}/INFO/custom_logger: test",
output,
)
def test_skip_logging(self):
with temporary_file(
textwrap.dedent(
"""
from locust import User, task, constant
class MyUser(User):
wait_time = constant(2)
@task
def my_task(self):
print("running my_task")
"""
)
) as file_path:
output = subprocess.check_output(
[
"locust",
"-f",
file_path,
"-u",
"1",
"-r",
"1",
"-t",
"1",
"--headless",
"--skip-log-setup",
],
stderr=subprocess.STDOUT,
timeout=10,
text=True,
)
if not changed_rlimit:
self.assertTrue(output.strip().endswith("running my_task"))
else:
self.assertEqual("running my_task", output.strip())
def test_log_to_file(self):
with temporary_file(
textwrap.dedent(
"""
import logging
from locust import User, task, constant
class MyUser(User):
wait_time = constant(2)
@task
def my_task(self):
print("running my_task")
logging.info("custom log message")
"""
)
) as file_path:
with temporary_file("", suffix=".log") as log_file_path:
try:
output = subprocess.check_output(
[
"locust",
"-f",
file_path,
"-u",
"1",
"-r",
"1",
"-t",
"1",
"--headless",
"--logfile",
log_file_path,
],
stderr=subprocess.STDOUT,
timeout=10,
text=True,
)
except subprocess.CalledProcessError as e:
raise AssertionError(f"Running locust command failed. Output was:\n\n{e.stdout}") from e
with open(log_file_path, encoding="utf-8") as f:
log_content = f.read()
# make sure print still appears in output
self.assertIn("running my_task", output)
# check that log messages don't go into output
self.assertNotIn("Starting Locust", output)
self.assertNotIn("Run time limit set to 1 seconds", output)
# check that log messages goes into file
self.assertIn(
f"{socket.gethostname()}/INFO/locust.main: Run time limit set to 1 seconds",
log_content,
)
self.assertIn(
f"{socket.gethostname()}/INFO/locust.main: --run-time limit reached, shutting down",
log_content,
)
self.assertIn(
f"{socket.gethostname()}/INFO/locust.main: Shutting down (exit code 0)",
log_content,
)
# check that message of custom logger also went into log file
self.assertIn(
f"{socket.gethostname()}/INFO/root: custom log message",
log_content,
)
| mit | 436c493629c7a1651d8c8de57f790003 | 31.703883 | 108 | 0.468309 | 4.906773 | false | true | false | false |
hydralabs/pyamf | pyamf/amf3.py | 3 | 44902 | # Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
AMF3 implementation.
C{AMF3} is the default serialization for
U{ActionScript<http://en.wikipedia.org/wiki/ActionScript>} 3.0 and provides
various advantages over L{AMF0<pyamf.amf0>}, which is used for ActionScript 1.0
and 2.0. It adds support for sending C{int} and C{uint} objects as integers and
supports data types that are available only in ActionScript 3.0, such as
L{ByteArray} and L{ArrayCollection}.
@see: U{Official AMF3 Specification in English
<http://opensource.adobe.com/wiki/download/attachments/1114283/amf3_spec_05_05_08.pdf>}
@see: U{Official AMF3 Specification in Japanese
<http://opensource.adobe.com/wiki/download/attachments/1114283/JP_amf3_spec_121207.pdf>}
@see: U{AMF3 documentation on OSFlash
<http://osflash.org/documentation/amf3>}
@since: 0.1
"""
import datetime
import zlib
import pyamf
from pyamf import codec, util, xml, python
__all__ = [
'ByteArray',
'Context',
'Encoder',
'Decoder',
'use_proxies_default',
]
#: If True encode/decode lists/tuples to L{ArrayCollection
#: <pyamf.flex.ArrayCollection>} and dicts to L{ObjectProxy
#: <pyamf.flex.ObjectProxy>}
use_proxies_default = False
#: The undefined type is represented by the undefined type marker. No further
#: information is encoded for this value.
TYPE_UNDEFINED = '\x00'
#: The null type is represented by the null type marker. No further
#: information is encoded for this value.
TYPE_NULL = '\x01'
#: The false type is represented by the false type marker and is used to
#: encode a Boolean value of C{false}. No further information is encoded for
#: this value.
TYPE_BOOL_FALSE = '\x02'
#: The true type is represented by the true type marker and is used to encode
#: a Boolean value of C{true}. No further information is encoded for this
#: value.
TYPE_BOOL_TRUE = '\x03'
#: In AMF 3 integers are serialized using a variable length signed 29-bit
#: integer.
#: @see: U{Parsing Integers on OSFlash (external)
#: <http://osflash.org/amf3/parsing_integers>}
TYPE_INTEGER = '\x04'
#: This type is used to encode an ActionScript Number or an ActionScript
#: C{int} of value greater than or equal to 2^28 or an ActionScript uint of
#: value greater than or equal to 2^29. The encoded value is is always an 8
#: byte IEEE-754 double precision floating point value in network byte order
#: (sign bit in low memory). The AMF 3 number type is encoded in the same
#: manner as the AMF 0 L{Number<pyamf.amf0.TYPE_NUMBER>} type.
TYPE_NUMBER = '\x05'
#: ActionScript String values are represented using a single string type in
#: AMF 3 - the concept of string and long string types from AMF 0 is not used.
#: Strings can be sent as a reference to a previously occurring String by
#: using an index to the implicit string reference table. Strings are encoding
#: using UTF-8 - however the header may either describe a string literal or a
#: string reference.
TYPE_STRING = '\x06'
#: ActionScript 3.0 introduced a new XML type however the legacy C{XMLDocument}
#: type from ActionScript 1.0 and 2.0.is retained in the language as
#: C{flash.xml.XMLDocument}. Similar to AMF 0, the structure of an
#: C{XMLDocument} needs to be flattened into a string representation for
#: serialization. As with other strings in AMF, the content is encoded in
#: UTF-8. XMLDocuments can be sent as a reference to a previously occurring
#: C{XMLDocument} instance by using an index to the implicit object reference
#: table.
#: @see: U{OSFlash documentation (external)
#: <http://osflash.org/documentation/amf3
#: #x07_-_xml_legacy_flash.xml.xmldocument_class>}
TYPE_XML = '\x07'
#: In AMF 3 an ActionScript Date is serialized as the number of
#: milliseconds elapsed since the epoch of midnight, 1st Jan 1970 in the
#: UTC time zone. Local time zone information is not sent.
TYPE_DATE = '\x08'
#: ActionScript Arrays are described based on the nature of their indices,
#: i.e. their type and how they are positioned in the Array.
TYPE_ARRAY = '\x09'
#: A single AMF 3 type handles ActionScript Objects and custom user classes.
TYPE_OBJECT = '\x0A'
#: ActionScript 3.0 introduces a new top-level XML class that supports
#: U{E4X<http://en.wikipedia.org/wiki/E4X>} syntax.
#: For serialization purposes the XML type needs to be flattened into a
#: string representation. As with other strings in AMF, the content is
#: encoded using UTF-8.
TYPE_XMLSTRING = '\x0B'
#: ActionScript 3.0 introduces the L{ByteArray} type to hold an Array
#: of bytes. AMF 3 serializes this type using a variable length encoding
#: 29-bit integer for the byte-length prefix followed by the raw bytes
#: of the L{ByteArray}.
#: @see: U{Parsing ByteArrays on OSFlash (external)
#: <http://osflash.org/documentation/amf3/parsing_byte_arrays>}
TYPE_BYTEARRAY = '\x0C'
#: Reference bit.
REFERENCE_BIT = 0x01
#: The maximum value for an int that will avoid promotion to an
#: ActionScript Number when sent via AMF 3 is represented by a
#: signed 29 bit integer: 2^28 - 1.
MAX_29B_INT = 0x0FFFFFFF
#: The minimum that can be represented by a signed 29 bit integer.
MIN_29B_INT = -0x10000000
ENCODED_INT_CACHE = {}
class ObjectEncoding:
"""
AMF object encodings.
"""
#: Property list encoding.
#: The remaining integer-data represents the number of class members that
#: exist. The property names are read as string-data. The values are then
#: read as AMF3-data.
STATIC = 0x00
#: Externalizable object.
#: What follows is the value of the "inner" object, including type code.
#: This value appears for objects that implement IExternalizable, such as
#: L{ArrayCollection} and L{ObjectProxy}.
EXTERNAL = 0x01
#: Name-value encoding.
#: The property names and values are encoded as string-data followed by
#: AMF3-data until there is an empty string property name. If there is a
#: class-def reference there are no property names and the number of values
#: is equal to the number of properties in the class-def.
DYNAMIC = 0x02
#: Proxy object.
PROXY = 0x03
class DataOutput(object):
"""
I am a C{StringIO} type object containing byte data from the AMF stream.
ActionScript 3.0 introduced the C{flash.utils.ByteArray} class to support
the manipulation of raw data in the form of an Array of bytes.
I provide a set of methods for writing binary data with ActionScript 3.0.
This class is the I/O counterpart to the L{DataInput} class, which reads
binary data.
@see: U{IDataOutput on Adobe Help (external)
<http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/utils/IDataOutput.html>}
"""
def __init__(self, encoder):
"""
@param encoder: Encoder containing the stream.
@type encoder: L{amf3.Encoder<pyamf.amf3.Encoder>}
"""
self.encoder = encoder
self.stream = encoder.stream
def writeBoolean(self, value):
"""
Writes a Boolean value.
@type value: C{bool}
@param value: A C{Boolean} value determining which byte is written.
If the parameter is C{True}, C{1} is written; if C{False}, C{0} is
written.
@raise ValueError: Non-boolean value found.
"""
if not isinstance(value, bool):
raise ValueError("Non-boolean value found")
if value is True:
self.stream.write_uchar(1)
else:
self.stream.write_uchar(0)
def writeByte(self, value):
"""
Writes a byte.
@type value: C{int}
"""
self.stream.write_char(value)
def writeUnsignedByte(self, value):
"""
Writes an unsigned byte.
@type value: C{int}
@since: 0.5
"""
return self.stream.write_uchar(value)
def writeDouble(self, value):
"""
Writes an IEEE 754 double-precision (64-bit) floating
point number.
@type value: C{number}
"""
self.stream.write_double(value)
def writeFloat(self, value):
"""
Writes an IEEE 754 single-precision (32-bit) floating
point number.
@type value: C{float}
"""
self.stream.write_float(value)
def writeInt(self, value):
"""
Writes a 32-bit signed integer.
@type value: C{int}
"""
self.stream.write_long(value)
def writeMultiByte(self, value, charset):
"""
Writes a multibyte string to the datastream using the
specified character set.
@type value: C{str}
@param value: The string value to be written.
@type charset: C{str}
@param charset: The string denoting the character set to use. Possible
character set strings include C{shift-jis}, C{cn-gb},
C{iso-8859-1} and others.
@see: U{Supported character sets on Adobe Help (external)
<http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/charset-codes.html>}
"""
if type(value) is unicode:
value = value.encode(charset)
self.stream.write(value)
def writeObject(self, value):
"""
Writes an object to data stream in AMF serialized format.
@param value: The object to be serialized.
"""
self.encoder.writeElement(value)
def writeShort(self, value):
"""
Writes a 16-bit integer.
@type value: C{int}
@param value: A byte value as an integer.
"""
self.stream.write_short(value)
def writeUnsignedShort(self, value):
"""
Writes a 16-bit unsigned integer.
@type value: C{int}
@param value: A byte value as an integer.
@since: 0.5
"""
self.stream.write_ushort(value)
def writeUnsignedInt(self, value):
"""
Writes a 32-bit unsigned integer.
@type value: C{int}
@param value: A byte value as an unsigned integer.
"""
self.stream.write_ulong(value)
def writeUTF(self, value):
"""
Writes a UTF-8 string to the data stream.
The length of the UTF-8 string in bytes is written first,
as a 16-bit integer, followed by the bytes representing the
characters of the string.
@type value: C{str}
@param value: The string value to be written.
"""
buf = util.BufferedByteStream()
buf.write_utf8_string(value)
bytes = buf.getvalue()
self.stream.write_ushort(len(bytes))
self.stream.write(bytes)
def writeUTFBytes(self, value):
"""
Writes a UTF-8 string. Similar to L{writeUTF}, but does
not prefix the string with a 16-bit length word.
@type value: C{str}
@param value: The string value to be written.
"""
val = None
if isinstance(value, unicode):
val = value
else:
val = unicode(value, 'utf8')
self.stream.write_utf8_string(val)
class DataInput(object):
"""
I provide a set of methods for reading binary data with ActionScript 3.0.
This class is the I/O counterpart to the L{DataOutput} class, which writes
binary data.
@see: U{IDataInput on Adobe Help (external)
<http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/utils/IDataInput.html>}
"""
def __init__(self, decoder=None):
"""
@param decoder: AMF3 decoder containing the stream.
@type decoder: L{amf3.Decoder<pyamf.amf3.Decoder>}
"""
self.decoder = decoder
self.stream = decoder.stream
def readBoolean(self):
"""
Read C{Boolean}.
@raise ValueError: Error reading Boolean.
@rtype: C{bool}
@return: A Boolean value, C{True} if the byte
is nonzero, C{False} otherwise.
"""
byte = self.stream.read(1)
if byte == '\x00':
return False
elif byte == '\x01':
return True
else:
raise ValueError("Error reading boolean")
def readByte(self):
"""
Reads a signed byte.
@rtype: C{int}
@return: The returned value is in the range -128 to 127.
"""
return self.stream.read_char()
def readDouble(self):
"""
Reads an IEEE 754 double-precision floating point number from the
data stream.
@rtype: C{number}
@return: An IEEE 754 double-precision floating point number.
"""
return self.stream.read_double()
def readFloat(self):
"""
Reads an IEEE 754 single-precision floating point number from the
data stream.
@rtype: C{number}
@return: An IEEE 754 single-precision floating point number.
"""
return self.stream.read_float()
def readInt(self):
"""
Reads a signed 32-bit integer from the data stream.
@rtype: C{int}
@return: The returned value is in the range -2147483648 to 2147483647.
"""
return self.stream.read_long()
def readMultiByte(self, length, charset):
"""
Reads a multibyte string of specified length from the data stream
using the specified character set.
@type length: C{int}
@param length: The number of bytes from the data stream to read.
@type charset: C{str}
@param charset: The string denoting the character set to use.
@rtype: C{str}
@return: UTF-8 encoded string.
"""
# FIXME nick: how to work out the code point byte size (on the fly)?
bytes = self.stream.read(length)
return unicode(bytes, charset)
def readObject(self):
"""
Reads an object from the data stream.
@return: The deserialized object.
"""
return self.decoder.readElement()
def readShort(self):
"""
Reads a signed 16-bit integer from the data stream.
@rtype: C{uint}
@return: The returned value is in the range -32768 to 32767.
"""
return self.stream.read_short()
def readUnsignedByte(self):
"""
Reads an unsigned byte from the data stream.
@rtype: C{uint}
@return: The returned value is in the range 0 to 255.
"""
return self.stream.read_uchar()
def readUnsignedInt(self):
"""
Reads an unsigned 32-bit integer from the data stream.
@rtype: C{uint}
@return: The returned value is in the range 0 to 4294967295.
"""
return self.stream.read_ulong()
def readUnsignedShort(self):
"""
Reads an unsigned 16-bit integer from the data stream.
@rtype: C{uint}
@return: The returned value is in the range 0 to 65535.
"""
return self.stream.read_ushort()
def readUTF(self):
"""
Reads a UTF-8 string from the data stream.
The string is assumed to be prefixed with an unsigned
short indicating the length in bytes.
@rtype: C{str}
@return: A UTF-8 string produced by the byte
representation of characters.
"""
length = self.stream.read_ushort()
return self.stream.read_utf8_string(length)
def readUTFBytes(self, length):
"""
Reads a sequence of C{length} UTF-8 bytes from the data
stream and returns a string.
@type length: C{int}
@param length: The number of bytes from the data stream to read.
@rtype: C{str}
@return: A UTF-8 string produced by the byte representation of
characters of specified C{length}.
"""
return self.readMultiByte(length, 'utf-8')
class ByteArray(util.BufferedByteStream, DataInput, DataOutput):
"""
I am a C{StringIO} type object containing byte data from the AMF stream.
ActionScript 3.0 introduced the C{flash.utils.ByteArray} class to support
the manipulation of raw data in the form of an Array of bytes.
Supports C{zlib} compression.
Possible uses of the C{ByteArray} class:
- Creating a custom protocol to connect to a client.
- Writing your own AMF/Remoting packet.
- Optimizing the size of your data by using custom data types.
@see: U{ByteArray on Adobe Help (external)
<http://help.adobe.com/en_US/FlashPlatform/reference/actionscript/3/flash/utils/ByteArray.html>}
"""
_zlib_header = '\x78\x9c'
class __amf__:
amf3 = True
def __init__(self, buf=None):
self.context = Context()
util.BufferedByteStream.__init__(self, buf)
DataInput.__init__(self, Decoder(self, self.context))
DataOutput.__init__(self, Encoder(self, self.context))
self.compressed = self.peek(2) == ByteArray._zlib_header
def readObject(self):
self.context.clear()
return super(ByteArray, self).readObject()
def writeObject(self, obj):
self.context.clear()
return super(ByteArray, self).writeObject(obj)
def __cmp__(self, other):
if isinstance(other, ByteArray):
return cmp(self.getvalue(), other.getvalue())
return cmp(self.getvalue(), other)
def __str__(self):
buf = self.getvalue()
if not self.compressed:
return buf
buf = zlib.compress(buf)
# FIXME nick: hacked
return buf[0] + '\xda' + buf[2:]
def compress(self):
"""
Forces compression of the underlying stream.
"""
self.compressed = True
class ClassDefinition(object):
"""
This is an internal class used by L{Encoder}/L{Decoder} to hold details
about transient class trait definitions.
"""
def __init__(self, alias):
self.alias = alias
self.reference = None
alias.compile()
self.attr_len = 0
if alias.static_attrs:
self.attr_len = len(alias.static_attrs)
self.encoding = ObjectEncoding.DYNAMIC
if alias.external:
self.encoding = ObjectEncoding.EXTERNAL
elif not alias.dynamic:
if alias.encodable_properties is not None:
if len(alias.static_attrs) == len(alias.encodable_properties):
self.encoding = ObjectEncoding.STATIC
else:
self.encoding = ObjectEncoding.STATIC
def __repr__(self):
my_repr = (
'<%s.ClassDefinition reference=%r encoding=%r alias=%r at 0x%x>'
)
return my_repr % (
self.__class__.__module__,
self.reference,
self.encoding,
self.alias,
id(self)
)
class Context(codec.Context):
"""
I hold the AMF3 context for en/decoding streams.
@ivar strings: A list of string references.
@type strings: L{codec.ByteStringReferenceCollection}
@ivar classes: A list of L{ClassDefinition}.
@type classes: C{list}
"""
def __init__(self, **kwargs):
self.strings = codec.ByteStringReferenceCollection()
self.classes = {}
self.class_ref = {}
self.class_idx = 0
codec.Context.__init__(self, **kwargs)
def clear(self):
"""
Clears the context.
"""
codec.Context.clear(self)
self.strings.clear()
self.proxied_objects = {}
self.classes = {}
self.class_ref = {}
self.class_idx = 0
def getString(self, ref):
"""
Gets a string based on a reference C{ref}.
@param ref: The reference index.
@type ref: C{str}
@rtype: C{str} or C{None}
@return: The referenced string.
"""
return self.strings.getByReference(ref)
def getStringReference(self, s):
"""
Return string reference.
@type s: C{str}
@param s: The referenced string.
@return: The reference index to the string.
@rtype: C{int} or C{None}
"""
return self.strings.getReferenceTo(s)
def addString(self, s):
"""
Creates a reference to C{s}. If the reference already exists, that
reference is returned.
@type s: C{str}
@param s: The string to be referenced.
@rtype: C{int}
@return: The reference index.
@raise TypeError: The parameter C{s} is not of C{basestring} type.
"""
if not isinstance(s, basestring):
raise TypeError
if len(s) == 0:
return -1
return self.strings.append(s)
def getClassByReference(self, ref):
"""
Return class reference.
@return: Class reference.
"""
return self.class_ref.get(ref)
def getClass(self, klass):
"""
Returns a class reference.
@return: Class reference.
"""
return self.classes.get(klass)
def addClass(self, alias, klass):
"""
Creates a reference to C{class_def}.
@param alias: C{ClassDefinition} instance.
@type alias: C{ClassDefinition}
"""
ref = self.class_idx
self.class_ref[ref] = alias
cd = self.classes[klass] = alias
cd.reference = ref
self.class_idx += 1
return ref
def getObjectForProxy(self, proxy):
"""
Returns the unproxied version of C{proxy} as stored in the context, or
unproxies the proxy and returns that 'raw' object.
@see: L{pyamf.flex.unproxy_object}
@since: 0.6
"""
obj = self.proxied_objects.get(id(proxy))
if obj is None:
from pyamf import flex
obj = flex.unproxy_object(proxy)
self.addProxyObject(obj, proxy)
return obj
def addProxyObject(self, obj, proxied):
"""
Stores a reference to the unproxied and proxied versions of C{obj} for
later retrieval.
@since: 0.6
"""
self.proxied_objects[id(obj)] = proxied
self.proxied_objects[id(proxied)] = obj
def getProxyForObject(self, obj):
"""
Returns the proxied version of C{obj} as stored in the context, or
creates a new proxied object and returns that.
@see: L{pyamf.flex.proxy_object}
@since: 0.6
"""
proxied = self.proxied_objects.get(id(obj))
if proxied is None:
from pyamf import flex
proxied = flex.proxy_object(obj)
self.addProxyObject(obj, proxied)
return proxied
class Decoder(codec.Decoder):
"""
Decodes an AMF3 data stream.
"""
def __init__(self, *args, **kwargs):
self.use_proxies = kwargs.pop('use_proxies', use_proxies_default)
codec.Decoder.__init__(self, *args, **kwargs)
def buildContext(self, **kwargs):
return Context(**kwargs)
def getTypeFunc(self, data):
if data == TYPE_UNDEFINED:
return self.readUndefined
elif data == TYPE_NULL:
return self.readNull
elif data == TYPE_BOOL_FALSE:
return self.readBoolFalse
elif data == TYPE_BOOL_TRUE:
return self.readBoolTrue
elif data == TYPE_INTEGER:
return self.readInteger
elif data == TYPE_NUMBER:
return self.readNumber
elif data == TYPE_STRING:
return self.readString
elif data == TYPE_XML:
return self.readXML
elif data == TYPE_DATE:
return self.readDate
elif data == TYPE_ARRAY:
return self.readArray
elif data == TYPE_OBJECT:
return self.readObject
elif data == TYPE_XMLSTRING:
return self.readXMLString
elif data == TYPE_BYTEARRAY:
return self.readByteArray
def readProxy(self, obj):
"""
Decodes a proxied object from the stream.
@since: 0.6
"""
return self.context.getObjectForProxy(obj)
def readUndefined(self):
"""
Read undefined.
"""
return pyamf.Undefined
def readNull(self):
"""
Read null.
@return: C{None}
@rtype: C{None}
"""
return None
def readBoolFalse(self):
"""
Returns C{False}.
@return: C{False}
@rtype: C{bool}
"""
return False
def readBoolTrue(self):
"""
Returns C{True}.
@return: C{True}
@rtype: C{bool}
"""
return True
def readNumber(self):
"""
Read number.
"""
return self.stream.read_double()
def readInteger(self, signed=True):
"""
Reads and returns an integer from the stream.
@type signed: C{bool}
@see: U{Parsing integers on OSFlash
<http://osflash.org/amf3/parsing_integers>} for the AMF3 integer data
format.
"""
return decode_int(self.stream, signed)
def _readLength(self):
x = decode_int(self.stream, False)
return (x >> 1, x & REFERENCE_BIT == 0)
def readBytes(self):
"""
Reads and returns a utf-8 encoded byte array.
"""
length, is_reference = self._readLength()
if is_reference:
return self.context.getString(length)
if length == 0:
return ''
result = self.stream.read(length)
self.context.addString(result)
return result
def readString(self):
"""
Reads and returns a string from the stream.
"""
length, is_reference = self._readLength()
if is_reference:
result = self.context.getString(length)
return self.context.getStringForBytes(result)
if length == 0:
return ''
result = self.stream.read(length)
self.context.addString(result)
return self.context.getStringForBytes(result)
def readDate(self):
"""
Read date from the stream.
The timezone is ignored as the date is always in UTC.
"""
ref = self.readInteger(False)
if ref & REFERENCE_BIT == 0:
return self.context.getObject(ref >> 1)
ms = self.stream.read_double()
result = util.get_datetime(ms / 1000.0)
if self.timezone_offset is not None:
result += self.timezone_offset
self.context.addObject(result)
return result
def readArray(self):
"""
Reads an array from the stream.
@warning: There is a very specific problem with AMF3 where the first
three bytes of an encoded empty C{dict} will mirror that of an encoded
C{{'': 1, '2': 2}}
"""
size = self.readInteger(False)
if size & REFERENCE_BIT == 0:
return self.context.getObject(size >> 1)
size >>= 1
key = self.readBytes()
if key == '':
# integer indexes only -> python list
result = []
self.context.addObject(result)
for i in xrange(size):
result.append(self.readElement())
return result
result = pyamf.MixedArray()
self.context.addObject(result)
while key:
result[key] = self.readElement()
key = self.readBytes()
for i in xrange(size):
el = self.readElement()
result[i] = el
return result
def _getClassDefinition(self, ref):
"""
Reads class definition from the stream.
"""
is_ref = ref & REFERENCE_BIT == 0
ref >>= 1
if is_ref:
class_def = self.context.getClassByReference(ref)
return class_def
name = self.readBytes()
alias = None
if name == '':
name = pyamf.ASObject
try:
alias = pyamf.get_class_alias(name)
except pyamf.UnknownClassAlias:
if self.strict:
raise
alias = pyamf.TypedObjectClassAlias(name)
class_def = ClassDefinition(alias)
class_def.encoding = ref & 0x03
class_def.attr_len = ref >> 2
class_def.static_properties = []
if class_def.attr_len > 0:
for i in xrange(class_def.attr_len):
key = self.readBytes()
class_def.static_properties.append(key)
self.context.addClass(class_def, alias.klass)
return class_def
def _readStatic(self, class_def, obj):
for attr in class_def.static_properties:
obj[attr] = self.readElement()
def _readDynamic(self, class_def, obj):
attr = self.readBytes()
while attr:
obj[attr] = self.readElement()
attr = self.readBytes()
def readObject(self):
"""
Reads an object from the stream.
@raise ReferenceError: Unknown reference found.
@raise DecodeError: Unknown object encoding detected.
"""
ref = self.readInteger(False)
if ref & REFERENCE_BIT == 0:
obj = self.context.getObject(ref >> 1)
if obj is None:
raise pyamf.ReferenceError(
'Unknown reference %d' % (ref >> 1,)
)
if self.use_proxies is True:
obj = self.readProxy(obj)
return obj
ref >>= 1
class_def = self._getClassDefinition(ref)
alias = class_def.alias
obj = alias.createInstance(codec=self)
obj_attrs = dict()
self.context.addObject(obj)
if class_def.encoding in (
ObjectEncoding.EXTERNAL,
ObjectEncoding.PROXY):
obj.__readamf__(DataInput(self))
if self.use_proxies is True:
obj = self.readProxy(obj)
return obj
elif class_def.encoding == ObjectEncoding.DYNAMIC:
self._readStatic(class_def, obj_attrs)
self._readDynamic(class_def, obj_attrs)
elif class_def.encoding == ObjectEncoding.STATIC:
self._readStatic(class_def, obj_attrs)
else:
raise pyamf.DecodeError("Unknown object encoding")
alias.applyAttributes(obj, obj_attrs, codec=self)
if self.use_proxies is True:
obj = self.readProxy(obj)
return obj
def readXML(self):
"""
Reads an xml object from the stream.
@return: An etree interface compatible object
@see: L{xml.set_default_interface}
"""
ref = self.readInteger(False)
if ref & REFERENCE_BIT == 0:
return self.context.getObject(ref >> 1)
xmlstring = self.stream.read(ref >> 1)
x = xml.fromstring(
xmlstring,
forbid_dtd=self.context.forbid_dtd,
forbid_entities=self.context.forbid_entities,
)
self.context.addObject(x)
return x
def readXMLString(self):
"""
Reads a string from the data stream and converts it into
an XML Tree.
@see: L{readXML}
"""
return self.readXML()
def readByteArray(self):
"""
Reads a string of data from the stream.
Detects if the L{ByteArray} was compressed using C{zlib}.
@see: L{ByteArray}
@note: This is not supported in ActionScript 1.0 and 2.0.
"""
ref = self.readInteger(False)
if ref & REFERENCE_BIT == 0:
return self.context.getObject(ref >> 1)
buffer = self.stream.read(ref >> 1)
compressed = False
if buffer[0:2] == ByteArray._zlib_header:
try:
buffer = zlib.decompress(buffer)
compressed = True
except zlib.error:
pass
obj = ByteArray(buffer)
obj.compressed = compressed
self.context.addObject(obj)
return obj
class Encoder(codec.Encoder):
"""
Encodes an AMF3 data stream.
"""
def __init__(self, *args, **kwargs):
self.use_proxies = kwargs.pop('use_proxies', use_proxies_default)
self.string_references = kwargs.pop('string_references', True)
codec.Encoder.__init__(self, *args, **kwargs)
def buildContext(self, **kwargs):
return Context(**kwargs)
def getTypeFunc(self, data):
"""
@see: L{codec.Encoder.getTypeFunc}
"""
t = type(data)
if t in python.int_types:
return self.writeInteger
elif t is ByteArray:
return self.writeByteArray
elif t is pyamf.MixedArray:
return self.writeDict
return codec.Encoder.getTypeFunc(self, data)
def writeUndefined(self, n):
"""
Writes an C{pyamf.Undefined} value to the stream.
"""
self.stream.write(TYPE_UNDEFINED)
def writeNull(self, n):
"""
Writes a C{null} value to the stream.
"""
self.stream.write(TYPE_NULL)
def writeBoolean(self, n):
"""
Writes a Boolean to the stream.
"""
t = TYPE_BOOL_TRUE
if n is False:
t = TYPE_BOOL_FALSE
self.stream.write(t)
def _writeInteger(self, n):
"""
AMF3 integers are encoded.
@param n: The integer data to be encoded to the AMF3 data stream.
@type n: integer data
@see: U{Parsing Integers on OSFlash
<http://osflash.org/documentation/amf3/parsing_integers>}
for more info.
"""
self.stream.write(encode_int(n))
def writeInteger(self, n):
"""
Writes an integer to the stream.
@type n: integer data
@param n: The integer data to be encoded to the AMF3 data stream.
"""
if n < MIN_29B_INT or n > MAX_29B_INT:
self.writeNumber(float(n))
return
self.stream.write(TYPE_INTEGER)
self.stream.write(encode_int(n))
def writeNumber(self, n):
"""
Writes a float to the stream.
@type n: C{float}
"""
self.stream.write(TYPE_NUMBER)
self.stream.write_double(n)
def serialiseBytes(self, b):
if len(b) == 0:
self.stream.write_uchar(REFERENCE_BIT)
return
if self.string_references:
ref = self.context.getStringReference(b)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addString(b)
self._writeInteger((len(b) << 1) | REFERENCE_BIT)
self.stream.write(b)
def serialiseString(self, s):
"""
Writes a raw string to the stream.
@type s: C{str}
@param s: The string data to be encoded to the AMF3 data stream.
"""
if type(s) is unicode:
s = self.context.getBytesForString(s)
self.serialiseBytes(s)
def writeBytes(self, b):
"""
Writes a raw string to the stream.
"""
self.stream.write(TYPE_STRING)
self.serialiseBytes(b)
def writeString(self, s):
"""
Writes a string to the stream. It will be B{UTF-8} encoded.
"""
s = self.context.getBytesForString(s)
self.writeBytes(s)
def writeDate(self, n):
"""
Writes a C{datetime} instance to the stream.
Does not support C{datetime.time} instances because AMF3 has
no way to encode time objects, so please use C{datetime.datetime}
instead.
@type n: L{datetime}
@param n: The C{Date} data to be encoded to the AMF3 data stream.
@raise EncodeError: A datetime.time instance was found
"""
if isinstance(n, datetime.time):
raise pyamf.EncodeError(
'A datetime.time instance was found but AMF3 has no way to '
'encode time objects. Please use datetime.datetime instead '
'(got:%r)' % (n,)
)
self.stream.write(TYPE_DATE)
ref = self.context.getObjectReference(n)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addObject(n)
self.stream.write_uchar(REFERENCE_BIT)
if self.timezone_offset is not None:
n -= self.timezone_offset
ms = util.get_timestamp(n)
self.stream.write_double(ms * 1000.0)
def writeList(self, n, is_proxy=False):
"""
Writes a C{tuple}, C{set} or C{list} to the stream.
@type n: One of C{__builtin__.tuple}, C{__builtin__.set}
or C{__builtin__.list}
@param n: The C{list} data to be encoded to the AMF3 data stream.
"""
if self.use_proxies and not is_proxy:
self.writeProxy(n)
return
self.stream.write(TYPE_ARRAY)
ref = self.context.getObjectReference(n)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addObject(n)
self._writeInteger((len(n) << 1) | REFERENCE_BIT)
self.stream.write('\x01')
[self.writeElement(x) for x in n]
def writeDict(self, n):
"""
Writes a C{dict} to the stream.
@type n: C{__builtin__.dict}
@param n: The C{dict} data to be encoded to the AMF3 data stream.
@raise ValueError: Non C{int}/C{str} key value found in the C{dict}
@raise EncodeError: C{dict} contains empty string keys.
"""
# Design bug in AMF3 that cannot read/write empty key strings
# for more info
if '' in n:
raise pyamf.EncodeError("dicts cannot contain empty string keys")
if self.use_proxies:
self.writeProxy(n)
return
self.stream.write(TYPE_ARRAY)
ref = self.context.getObjectReference(n)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addObject(n)
# The AMF3 spec demands that all str based indicies be listed first
keys = n.keys()
int_keys = []
str_keys = []
for x in keys:
if isinstance(x, python.int_types):
int_keys.append(x)
elif isinstance(x, python.str_types):
str_keys.append(x)
else:
raise ValueError("Non int/str key value found in dict")
# Make sure the integer keys are within range
l = len(int_keys)
for x in int_keys:
if l < x <= 0:
# treat as a string key
str_keys.append(x)
del int_keys[int_keys.index(x)]
int_keys.sort()
# If integer keys don't start at 0, they will be treated as strings
if len(int_keys) > 0 and int_keys[0] != 0:
for x in int_keys:
str_keys.append(str(x))
del int_keys[int_keys.index(x)]
self._writeInteger(len(int_keys) << 1 | REFERENCE_BIT)
for x in str_keys:
self.serialiseString(x)
self.writeElement(n[x])
self.stream.write_uchar(0x01)
for k in int_keys:
self.writeElement(n[k])
def writeProxy(self, obj):
"""
Encodes a proxied object to the stream.
@since: 0.6
"""
proxy = self.context.getProxyForObject(obj)
self.writeObject(proxy, is_proxy=True)
def writeObject(self, obj, is_proxy=False):
"""
Writes an object to the stream.
"""
if self.use_proxies and not is_proxy:
self.writeProxy(obj)
return
self.stream.write(TYPE_OBJECT)
ref = self.context.getObjectReference(obj)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addObject(obj)
# object is not referenced, serialise it
kls = obj.__class__
definition = self.context.getClass(kls)
alias = None
class_ref = False # if the class definition is a reference
if definition:
class_ref = True
alias = definition.alias
else:
alias = self.context.getClassAlias(kls)
definition = ClassDefinition(alias)
self.context.addClass(definition, alias.klass)
if class_ref:
self.stream.write(definition.reference)
else:
ref = 0
if definition.encoding != ObjectEncoding.EXTERNAL:
ref += definition.attr_len << 4
final_reference = encode_int(
ref |
definition.encoding << 2 |
REFERENCE_BIT << 1 |
REFERENCE_BIT
)
self.stream.write(final_reference)
definition.reference = encode_int(
definition.reference << 2 | REFERENCE_BIT)
if alias.anonymous:
self.stream.write('\x01')
else:
self.serialiseString(alias.alias)
# work out what the final reference for the class will be.
# this is okay because the next time an object of the same
# class is encoded, class_ref will be True and never get here
# again.
if alias.external:
obj.__writeamf__(DataOutput(self))
return
attrs = alias.getEncodableAttributes(obj, codec=self)
if alias.static_attrs:
if not class_ref:
[self.serialiseString(attr) for attr in alias.static_attrs]
for attr in alias.static_attrs:
value = attrs.pop(attr)
self.writeElement(value)
if definition.encoding == ObjectEncoding.STATIC:
return
if definition.encoding == ObjectEncoding.DYNAMIC:
if attrs:
for attr, value in attrs.iteritems():
if type(attr) in python.int_types:
attr = str(attr)
self.serialiseString(attr)
self.writeElement(value)
self.stream.write('\x01')
def writeByteArray(self, n):
"""
Writes a L{ByteArray} to the data stream.
@param n: The L{ByteArray} data to be encoded to the AMF3 data stream.
@type n: L{ByteArray}
"""
self.stream.write(TYPE_BYTEARRAY)
ref = self.context.getObjectReference(n)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addObject(n)
buf = str(n)
l = len(buf)
self._writeInteger(l << 1 | REFERENCE_BIT)
self.stream.write(buf)
def writeXML(self, n):
"""
Writes a XML string to the data stream.
@type n: L{ET<xml.ET>}
@param n: The XML Document to be encoded to the AMF3 data stream.
"""
self.stream.write(TYPE_XMLSTRING)
ref = self.context.getObjectReference(n)
if ref != -1:
self._writeInteger(ref << 1)
return
self.context.addObject(n)
self.serialiseString(xml.tostring(n).encode('utf-8'))
def encode_int(n):
"""
Encodes an int as a variable length signed 29-bit integer as defined by
the spec.
@param n: The integer to be encoded
@return: The encoded string
@rtype: C{str}
@raise OverflowError: C{c} is out of range.
"""
global ENCODED_INT_CACHE
try:
return ENCODED_INT_CACHE[n]
except KeyError:
pass
if n < MIN_29B_INT or n > MAX_29B_INT:
raise OverflowError("Out of range")
if n < 0:
n += 0x20000000
bytes = ''
real_value = None
if n > 0x1fffff:
real_value = n
n >>= 1
bytes += chr(0x80 | ((n >> 21) & 0xff))
if n > 0x3fff:
bytes += chr(0x80 | ((n >> 14) & 0xff))
if n > 0x7f:
bytes += chr(0x80 | ((n >> 7) & 0xff))
if real_value is not None:
n = real_value
if n > 0x1fffff:
bytes += chr(n & 0xff)
else:
bytes += chr(n & 0x7f)
ENCODED_INT_CACHE[n] = bytes
return bytes
def decode_int(stream, signed=False):
"""
Decode C{int}.
"""
n = result = 0
b = stream.read_uchar()
while b & 0x80 != 0 and n < 3:
result <<= 7
result |= b & 0x7f
b = stream.read_uchar()
n += 1
if n < 3:
result <<= 7
result |= b
else:
result <<= 8
result |= b
if result & 0x10000000 != 0:
if signed:
result -= 0x20000000
else:
result <<= 1
result += 1
return result
pyamf.register_class(ByteArray)
| mit | 086eb9834738c538e257e82c39f614c5 | 26.479804 | 102 | 0.580375 | 3.975739 | false | false | false | false |
hydralabs/pyamf | pyamf/remoting/amf0.py | 6 | 5208 | # Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
AMF0 Remoting support.
@since: 0.1.0
"""
import traceback
import sys
from pyamf import remoting
from pyamf.remoting import gateway
class RequestProcessor(object):
def __init__(self, gateway):
self.gateway = gateway
@property
def logger(self):
if not self.gateway.logger:
return None
return self.gateway.logger
def authenticateRequest(self, request, service_request, *args, **kwargs):
"""
Authenticates the request against the service.
@param request: The AMF request
@type request: L{Request<pyamf.remoting.Request>}
"""
username = password = None
if 'Credentials' in request.headers:
cred = request.headers['Credentials']
username = cred['userid']
password = cred['password']
return self.gateway.authenticateRequest(
service_request,
username,
password,
*args,
**kwargs
)
def buildErrorResponse(self, request, error=None):
"""
Builds an error response.
@param request: The AMF request
@type request: L{Request<pyamf.remoting.Request>}
@return: The AMF response
@rtype: L{Response<pyamf.remoting.Response>}
"""
if error is not None:
cls, e, tb = error
else:
cls, e, tb = sys.exc_info()
fault = build_fault(cls, e, tb, self.gateway.debug)
return remoting.Response(fault, status=remoting.STATUS_ERROR)
def _getBody(self, request, response, service_request, **kwargs):
if 'DescribeService' in request.headers:
return service_request.service.description
return self.gateway.callServiceRequest(
service_request,
*request.body,
**kwargs
)
def __call__(self, request, *args, **kwargs):
"""
Processes an AMF0 request.
@param request: The request to be processed.
@type request: L{Request<pyamf.remoting.Request>}
@return: The response to the request.
@rtype: L{Response<pyamf.remoting.Response>}
"""
response = remoting.Response(None)
try:
service_request = self.gateway.getServiceRequest(
request,
request.target
)
except gateway.UnknownServiceError:
if self.logger:
self.logger.error(
'Unknown endpoint %r' % (request.target,)
)
return self.buildErrorResponse(request)
# we have a valid service, now attempt authentication
try:
authd = self.authenticateRequest(
request,
service_request,
*args,
**kwargs
)
except (SystemExit, KeyboardInterrupt):
raise
except:
if self.logger:
self.logger.exception(
'Unexpected error while authenticating request %r',
request.target
)
return self.buildErrorResponse(request)
if not authd:
# authentication failed
response.status = remoting.STATUS_ERROR
response.body = remoting.ErrorFault(
code='AuthenticationError',
description='Authentication failed'
)
return response
# authentication succeeded, now fire the preprocessor (if there is one)
try:
self.gateway.preprocessRequest(service_request, *args, **kwargs)
except (SystemExit, KeyboardInterrupt):
raise
except:
if self.logger:
self.logger.exception(
'Unexpected error while pre-processing request %r',
request.target
)
return self.buildErrorResponse(request)
try:
response.body = self._getBody(
request,
response,
service_request,
*args,
**kwargs
)
return response
except (SystemExit, KeyboardInterrupt):
raise
except:
if self.logger:
self.logger.exception(
'Unexpected error while processing request %r',
request.target
)
return self.buildErrorResponse(request)
def build_fault(cls, e, tb, include_traceback=False):
"""
Builds a L{ErrorFault<pyamf.remoting.ErrorFault>} object based on the last
exception raised.
If include_traceback is C{False} then the traceback will not be added to
the L{remoting.ErrorFault}.
"""
if hasattr(cls, '_amf_code'):
code = cls._amf_code
else:
code = cls.__name__
details = None
if include_traceback:
details = traceback.format_exception(cls, e, tb)
return remoting.ErrorFault(
code=code,
description=unicode(e),
details=details
)
| mit | 6ff26121b4938e5faf0730fc2737ca21 | 26.267016 | 79 | 0.551651 | 4.725953 | false | false | false | false |
hydralabs/pyamf | pyamf/alias.py | 6 | 17749 | # Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
Class alias base functionality.
@since: 0.6
"""
import inspect
import pyamf
from pyamf import python, util
class UnknownClassAlias(Exception):
"""
Raised if the AMF stream specifies an Actionscript class that does not
have a Python class alias.
@see: L{register_class}
"""
class ClassAlias(object):
"""
Class alias. Provides class/instance meta data to the En/Decoder to allow
fine grain control and some performance increases.
"""
def __init__(self, klass, alias=None, **kwargs):
if not isinstance(klass, python.class_types):
raise TypeError('klass must be a class type, got %r' % type(klass))
self.checkClass(klass)
self.klass = klass
self.alias = alias
if hasattr(self.alias, 'decode'):
self.alias = self.alias.decode('utf-8')
self.static_attrs = kwargs.pop('static_attrs', None)
self.exclude_attrs = kwargs.pop('exclude_attrs', None)
self.readonly_attrs = kwargs.pop('readonly_attrs', None)
self.proxy_attrs = kwargs.pop('proxy_attrs', None)
self.amf3 = kwargs.pop('amf3', None)
self.external = kwargs.pop('external', None)
self.dynamic = kwargs.pop('dynamic', None)
self.synonym_attrs = kwargs.pop('synonym_attrs', {})
self._compiled = False
self.anonymous = False
self.sealed = None
self.bases = None
if self.alias is None:
self.anonymous = True
# we don't set this to None because AMF3 untyped objects have a
# class name of ''
self.alias = ''
else:
if self.alias == '':
raise ValueError('Cannot set class alias as \'\'')
if not kwargs.pop('defer', False):
self.compile()
if kwargs:
raise TypeError('Unexpected keyword arguments %r' % (kwargs,))
def _checkExternal(self):
k = self.klass
if not hasattr(k, '__readamf__'):
raise AttributeError(
"An externalised class was specified, but"
" no __readamf__ attribute was found for %r" % (k,)
)
if not hasattr(k, '__writeamf__'):
raise AttributeError(
"An externalised class was specified, but"
" no __writeamf__ attribute was found for %r" % (k,)
)
if not hasattr(k.__readamf__, '__call__'):
raise TypeError("%s.__readamf__ must be callable" % (k.__name__,))
if not hasattr(k.__writeamf__, '__call__'):
raise TypeError("%s.__writeamf__ must be callable" % (k.__name__,))
def compile(self):
"""
This compiles the alias into a form that can be of most benefit to the
en/decoder.
"""
if self._compiled:
return
self.decodable_properties = set()
self.encodable_properties = set()
self.inherited_dynamic = None
self.inherited_sealed = None
self.bases = []
self.exclude_attrs = set(self.exclude_attrs or [])
self.readonly_attrs = set(self.readonly_attrs or [])
self.static_attrs = list(self.static_attrs or [])
self.static_attrs_set = set(self.static_attrs)
self.proxy_attrs = set(self.proxy_attrs or [])
self.sealed = util.is_class_sealed(self.klass)
if self.external:
self._checkExternal()
self._finalise_compile()
# this class is external so no more compiling is necessary
return
if hasattr(self.klass, '__slots__'):
self.decodable_properties.update(self.klass.__slots__)
self.encodable_properties.update(self.klass.__slots__)
for k, v in self.klass.__dict__.iteritems():
if not isinstance(v, property):
continue
if v.fget:
self.encodable_properties.update([k])
if v.fset:
self.decodable_properties.update([k])
else:
self.readonly_attrs.update([k])
mro = inspect.getmro(self.klass)[1:]
for c in mro:
self._compile_base_class(c)
self.getCustomProperties()
self._finalise_compile()
def _compile_base_class(self, klass):
if klass is object:
return
try:
alias = pyamf.get_class_alias(klass)
except UnknownClassAlias:
alias = pyamf.register_class(klass)
alias.compile()
self.bases.append((klass, alias))
if alias.exclude_attrs:
self.exclude_attrs.update(alias.exclude_attrs)
if alias.readonly_attrs:
self.readonly_attrs.update(alias.readonly_attrs)
if alias.static_attrs:
self.static_attrs_set.update(alias.static_attrs)
for a in alias.static_attrs:
if a not in self.static_attrs:
self.static_attrs.insert(0, a)
if alias.proxy_attrs:
self.proxy_attrs.update(alias.proxy_attrs)
if alias.encodable_properties:
self.encodable_properties.update(alias.encodable_properties)
if alias.decodable_properties:
self.decodable_properties.update(alias.decodable_properties)
if self.amf3 is None and alias.amf3:
self.amf3 = alias.amf3
if self.dynamic is None and alias.dynamic is not None:
self.inherited_dynamic = alias.dynamic
if alias.sealed is not None:
self.inherited_sealed = alias.sealed
if alias.synonym_attrs:
x = self.synonym_attrs
self.synonym_attrs = alias.synonym_attrs.copy()
self.synonym_attrs.update(x)
def _finalise_compile(self):
if self.dynamic is None:
self.dynamic = True
if self.inherited_dynamic is not None:
if not self.inherited_dynamic \
and not self.sealed \
and self.inherited_sealed:
self.dynamic = True
else:
self.dynamic = self.inherited_dynamic
if self.sealed:
self.dynamic = False
if self.amf3 is None:
self.amf3 = False
if self.external is None:
self.external = False
if self.static_attrs:
self.static_attrs = list(self.static_attrs)
self.static_attrs.sort()
self.encodable_properties.update(self.static_attrs)
self.decodable_properties.update(self.static_attrs)
if self.static_attrs:
self.static_attrs_set.update(self.static_attrs)
if self.exclude_attrs:
self.static_attrs_set.difference_update(self.exclude_attrs)
for a in self.static_attrs_set:
if a not in self.static_attrs:
self.static_attrs.remove(a)
if not self.exclude_attrs:
self.exclude_attrs = None
else:
self.encodable_properties.difference_update(self.exclude_attrs)
self.decodable_properties.difference_update(self.exclude_attrs)
if self.exclude_attrs is not None:
self.exclude_attrs = list(self.exclude_attrs)
self.exclude_attrs.sort()
if not self.readonly_attrs:
self.readonly_attrs = None
else:
self.decodable_properties.difference_update(self.readonly_attrs)
if self.readonly_attrs is not None:
self.readonly_attrs = list(self.readonly_attrs)
self.readonly_attrs.sort()
if not self.proxy_attrs:
self.proxy_attrs = None
else:
self.proxy_attrs = list(self.proxy_attrs)
self.proxy_attrs.sort()
if len(self.decodable_properties) == 0:
self.decodable_properties = None
else:
self.decodable_properties = list(self.decodable_properties)
self.decodable_properties.sort()
if len(self.encodable_properties) == 0:
self.encodable_properties = None
else:
self.encodable_properties = list(self.encodable_properties)
self.encodable_properties.sort()
self.non_static_encodable_properties = None
if self.encodable_properties:
self.non_static_encodable_properties = set(
self.encodable_properties
)
if self.static_attrs:
self.non_static_encodable_properties.difference_update(
self.static_attrs
)
self.shortcut_encode = True
self.shortcut_decode = True
if (self.encodable_properties or self.static_attrs or
self.exclude_attrs or self.proxy_attrs or self.external or
self.synonym_attrs):
self.shortcut_encode = False
if (self.decodable_properties or self.static_attrs or
self.exclude_attrs or self.readonly_attrs or
not self.dynamic or self.external or self.synonym_attrs):
self.shortcut_decode = False
self.is_dict = False
if issubclass(self.klass, dict) or self.klass is dict:
self.is_dict = True
self._compiled = True
def is_compiled(self):
return self._compiled
def __str__(self):
return self.alias
def __repr__(self):
k = self.__class__
return '<%s.%s alias=%r class=%r @ 0x%x>' % (
k.__module__,
k.__name__,
self.alias,
self.klass,
id(self)
)
def __eq__(self, other):
if isinstance(other, basestring):
return self.alias == other
elif isinstance(other, self.__class__):
return self.klass == other.klass
elif isinstance(other, python.class_types):
return self.klass == other
else:
return False
def __hash__(self):
return id(self)
def checkClass(self, klass):
"""
This function is used to check if the class being aliased fits certain
criteria. The default is to check that C{__new__} is available or the
C{__init__} constructor does not need additional arguments. If this is
the case then L{TypeError} will be raised.
@since: 0.4
"""
# Check for __new__ support.
if hasattr(klass, '__new__') and hasattr(klass.__new__, '__call__'):
# Should be good to go.
return
# Check that the constructor of the class doesn't require any additonal
# arguments.
if not (
hasattr(klass, '__init__') and hasattr(klass.__init__, '__call__')
):
return
klass_func = klass.__init__.im_func
if not hasattr(klass_func, 'func_code'):
# Can't examine it, assume it's OK.
return
if klass_func.func_defaults:
available_arguments = len(klass_func.func_defaults) + 1
else:
available_arguments = 1
needed_arguments = klass_func.func_code.co_argcount
if available_arguments >= needed_arguments:
# Looks good to me.
return
spec = inspect.getargspec(klass_func)
raise TypeError(
"__init__ doesn't support additional arguments: %s" % (
inspect.formatargspec(*spec)
)
)
def getAttribute(self, obj, attr, codec=None):
"""
Get the attribute `attr` from `obj`. If no attribute exists,
`pyamf.Undefined` is returned.
@param codec: The current `pyamf.codec.Codec` getting the attribute
(if there is one).
"""
return getattr(obj, attr)
def getEncodableAttributes(self, obj, codec=None):
"""
Must return a C{dict} of attributes to be encoded, even if its empty.
@param codec: An optional argument that will contain the encoder
instance calling this function.
@since: 0.5
"""
if not self._compiled:
self.compile()
if self.is_dict:
return dict(obj)
if self.shortcut_encode and self.dynamic:
return obj.__dict__.copy()
attrs = {}
if self.static_attrs:
for attr in self.static_attrs:
try:
attrs[attr] = self.getAttribute(obj, attr, codec=codec)
except AttributeError:
attrs[attr] = pyamf.Undefined
if not self.dynamic:
if self.non_static_encodable_properties:
for attr in self.non_static_encodable_properties:
attrs[attr] = self.getAttribute(obj, attr, codec=codec)
return attrs
dynamic_props = util.get_properties(obj)
if not self.shortcut_encode:
dynamic_props = set(dynamic_props)
if self.encodable_properties:
dynamic_props.update(self.encodable_properties)
if self.static_attrs:
dynamic_props.difference_update(self.static_attrs)
if self.exclude_attrs:
dynamic_props.difference_update(self.exclude_attrs)
for attr in dynamic_props:
attrs[attr] = self.getAttribute(obj, attr, codec=codec)
if self.proxy_attrs is not None and attrs and codec:
context = codec.context
for k, v in attrs.copy().iteritems():
if k in self.proxy_attrs:
attrs[k] = context.getProxyForObject(v)
if self.synonym_attrs:
missing = object()
for k, v in self.synonym_attrs.iteritems():
value = attrs.pop(k, missing)
if value is missing:
continue
attrs[v] = value
return attrs
def getDecodableAttributes(self, obj, attrs, codec=None):
"""
Returns a dictionary of attributes for C{obj} that has been filtered,
based on the supplied C{attrs}. This allows for fine grain control
over what will finally end up on the object or not.
@param obj: The object that will recieve the attributes.
@param attrs: The C{attrs} dictionary that has been decoded.
@param codec: An optional argument that will contain the decoder
instance calling this function.
@return: A dictionary of attributes that can be applied to C{obj}
@since: 0.5
"""
if not self._compiled:
self.compile()
changed = False
props = set(attrs.keys())
if self.static_attrs:
missing_attrs = self.static_attrs_set.difference(props)
if missing_attrs:
raise AttributeError(
'Static attributes %r expected when decoding %r' % (
missing_attrs, self.klass
)
)
props.difference_update(self.static_attrs)
if not props:
return attrs
if not self.dynamic:
if not self.decodable_properties:
props = set()
else:
props.intersection_update(self.decodable_properties)
changed = True
if self.readonly_attrs:
props.difference_update(self.readonly_attrs)
changed = True
if self.exclude_attrs:
props.difference_update(self.exclude_attrs)
changed = True
if self.proxy_attrs is not None and codec:
context = codec.context
for k in self.proxy_attrs:
try:
v = attrs[k]
except KeyError:
continue
attrs[k] = context.getObjectForProxy(v)
if changed:
# apply all filters before synonyms
a = {}
[a.__setitem__(p, attrs[p]) for p in props]
attrs = a
if self.synonym_attrs:
missing = object()
for k, v in self.synonym_attrs.iteritems():
value = attrs.pop(v, missing)
if value is missing:
continue
attrs[k] = value
return attrs
def applyAttributes(self, obj, attrs, codec=None):
"""
Applies the collection of attributes C{attrs} to aliased object C{obj}.
Called when decoding reading aliased objects from an AMF byte stream.
Override this to provide fine grain control of application of
attributes to C{obj}.
@param codec: An optional argument that will contain the en/decoder
instance calling this function.
"""
if not self._compiled:
self.compile()
if not self.shortcut_decode:
attrs = self.getDecodableAttributes(obj, attrs, codec=codec)
else:
if self.is_dict:
obj.update(attrs)
return
if not self.sealed:
obj.__dict__.update(attrs)
return
util.set_attrs(obj, attrs)
def getCustomProperties(self):
"""
Overrride this to provide known static properties based on the aliased
class.
@since: 0.5
"""
def createInstance(self, codec=None):
"""
Creates an instance of the klass.
@return: Instance of C{self.klass}.
"""
if type(self.klass) is type:
return self.klass.__new__(self.klass)
return self.klass()
| mit | 7c47e0f1a778adc4cf36975bbd42040e | 29.236797 | 79 | 0.558285 | 4.317441 | false | false | false | false |
hydralabs/pyamf | doc/tutorials/examples/actionscript/bytearray/python/client.py | 8 | 1278 | # Copyright (c) The PyAMF Project.
# See LICENSE for details.
"""
Python ByteArray example.
@since: 0.5
"""
import os
from optparse import OptionParser
from gateway import images_root
from pyamf.amf3 import ByteArray
from pyamf.remoting.client import RemotingService
# parse commandline options
parser = OptionParser()
parser.add_option("-p", "--port", default=8000,
dest="port", help="port number [default: %default]")
parser.add_option("--host", default="127.0.0.1",
dest="host", help="host address [default: %default]")
(options, args) = parser.parse_args()
# define gateway
url = 'http://%s:%d' % (options.host, int(options.port))
server = RemotingService(url)
service = server.getService('getSnapshots')()
# get list of snapshots
base_path = service[0]
types = service[1]
snapshots = service[2]
print "Found %d snapshot(s):" % (len(snapshots))
for snapshot in snapshots:
print "\t%s%s" % (base_path, snapshot['name'])
# save snapshot
path = 'django-logo.jpg'
image = os.path.join(images_root, path)
file = open(image, 'r').read()
snapshot = ByteArray()
snapshot.write(file)
save_snapshot = server.getService('ByteArray.saveSnapshot')
saved = save_snapshot(snapshot, 'jpg')
print "Saved snapshot:\n\t%s:\t%s" % (saved['name'], saved['url'])
| mit | 290eef0c89aacf9d7977fa237a1eae1a | 22.666667 | 66 | 0.698748 | 3.124694 | false | false | false | false |
hydralabs/pyamf | doc/tutorials/examples/gateways/appengine/demo/simplejson/jsonfilter.py | 178 | 1662 | import simplejson
import cgi
class JSONFilter(object):
def __init__(self, app, mime_type='text/x-json'):
self.app = app
self.mime_type = mime_type
def __call__(self, environ, start_response):
# Read JSON POST input to jsonfilter.json if matching mime type
response = {'status': '200 OK', 'headers': []}
def json_start_response(status, headers):
response['status'] = status
response['headers'].extend(headers)
environ['jsonfilter.mime_type'] = self.mime_type
if environ.get('REQUEST_METHOD', '') == 'POST':
if environ.get('CONTENT_TYPE', '') == self.mime_type:
args = [_ for _ in [environ.get('CONTENT_LENGTH')] if _]
data = environ['wsgi.input'].read(*map(int, args))
environ['jsonfilter.json'] = simplejson.loads(data)
res = simplejson.dumps(self.app(environ, json_start_response))
jsonp = cgi.parse_qs(environ.get('QUERY_STRING', '')).get('jsonp')
if jsonp:
content_type = 'text/javascript'
res = ''.join(jsonp + ['(', res, ')'])
elif 'Opera' in environ.get('HTTP_USER_AGENT', ''):
# Opera has bunk XMLHttpRequest support for most mime types
content_type = 'text/plain'
else:
content_type = self.mime_type
headers = [
('Content-type', content_type),
('Content-length', len(res)),
]
headers.extend(response['headers'])
start_response(response['status'], headers)
return [res]
def factory(app, global_conf, **kw):
return JSONFilter(app, **kw)
| mit | 6f46f8d726c919255b60cacf2b438c6d | 40.55 | 74 | 0.565584 | 3.929078 | false | false | false | false |
hydralabs/pyamf | pyamf/adapters/_google_appengine_ext_ndb.py | 6 | 10205 | # Copyright (c) The PyAMF Project.
# See LICENSE.txt for details.
"""
Google App Engine ndb adapter module.
"""
import datetime
from google.appengine.ext import ndb
from google.appengine.ext.ndb import polymodel
import pyamf
from pyamf.adapters import util, models as adapter_models, gae_base
NDB_STUB_NAME = 'gae_ndb_stub'
class NDBReferenceCollection(gae_base.EntityReferenceCollection):
"""
This helper class holds a dict of klass to key/objects loaded from the
Datastore.
@since: 0.4.1
"""
base_classes = (ndb.Model, ndb.Expando)
class NDBStubCollection(gae_base.StubCollection):
def fetchEntities(self):
return dict(zip(self.to_fetch, ndb.get_multi(self.to_fetch)))
class NDBClassAlias(gae_base.BaseDatastoreClassAlias):
"""
This class contains all the business logic to interact with Google's
Datastore API's. Any C{ndb.Model} or C{ndb.Expando} classes will use this
class alias for encoding/decoding.
We also add a number of indexes to the encoder context to aggressively
decrease the number of Datastore API's that we need to complete.
"""
base_classes = (ndb.Model, polymodel.PolyModel)
context_stub_name = NDB_STUB_NAME
def getEntityRefCollection(self, codec):
return get_ndb_context(codec)
def makeStubCollection(self):
return NDBStubCollection()
def encode_key(self, obj):
key = obj.key
if not key:
return None
return key.urlsafe()
def decode_key(self, key):
return ndb.Key(urlsafe=key)
def getCustomProperties(self):
props = {}
# list of property names that are considered read only
read_only_props = []
repeated_props = {}
non_repeated_props = {}
# list of property names that are computed
computed_props = {}
for name, prop in self.klass._properties.iteritems():
props[name] = prop
if prop._repeated:
repeated_props[name] = prop
else:
non_repeated_props[name] = prop
if isinstance(prop, ndb.ComputedProperty):
computed_props[name] = prop
if issubclass(self.klass, polymodel.PolyModel):
del props['class']
# check if the property is a defined as a computed property. These
# types of properties are read-only
for name, value in self.klass.__dict__.iteritems():
if isinstance(value, ndb.ComputedProperty):
read_only_props.append(name)
self.encodable_properties.update(props.keys())
self.decodable_properties.update(props.keys())
self.readonly_attrs.update(read_only_props)
if computed_props:
self.decodable_properties.difference_update(computed_props.keys())
self.model_properties = props or None
self.repeated_properties = repeated_props or None
self.non_repeated_properties = non_repeated_props or None
self.computed_properties = computed_props or None
def getDecodableAttributes(self, obj, attrs, codec=None):
attrs = super(NDBClassAlias, self).getDecodableAttributes(
obj, attrs, codec=codec
)
if self.repeated_properties:
for name, prop in self.repeated_properties.iteritems():
try:
value = attrs[name]
except KeyError:
continue
if not value:
attrs[name] = []
continue
for idx, val in enumerate(value):
value[idx] = adapter_models.decode_model_property(
obj,
prop,
val
)
attrs[name] = value
if self.non_repeated_properties:
adapter_models.decode_model_properties(
obj,
self.non_repeated_properties,
attrs,
)
return attrs
def encode_property(self, obj, prop, value):
if not prop._repeated:
return adapter_models.encode_model_property(
obj,
prop,
value
)
if not value:
return []
for idx, val in enumerate(value):
value[idx] = adapter_models.encode_model_property(
obj,
prop,
val
)
return value
def getEncodableAttributes(self, obj, codec=None):
attrs = super(NDBClassAlias, self).getEncodableAttributes(
obj, codec=codec
)
if self.model_properties:
for name in self.encodable_properties:
prop = self.model_properties.get(name, None)
if not prop:
continue
try:
value = attrs[name]
except KeyError:
value = self.getAttribute(obj, name, codec=codec)
attrs[name] = self.encode_property(
obj,
prop,
value
)
if isinstance(obj, ndb.Expando):
for name, prop in obj._properties.iteritems():
if name in self.model_properties:
continue
value = self.getAttribute(obj, name, codec=codec)
attrs[name] = self.encode_property(
obj,
prop,
value
)
return attrs
def get_ndb_context(context):
"""
Returns a reference to the C{gae_ndb_objects} on the context. If it doesn't
exist then it is created.
@param context: The context to load the C{gae_ndb_objects} index from.
@return: The C{gae_ndb_objects} index reference.
@rtype: Instance of L{GAEReferenceCollection}
@since: 0.4.1
"""
try:
return context['gae_ndb_context']
except KeyError:
r = context['gae_ndb_context'] = NDBReferenceCollection()
return r
def encode_ndb_instance(obj, encoder=None):
"""
The GAE Datastore creates new instances of objects for each get request.
This is a problem for PyAMF as it uses the id(obj) of the object to do
reference checking.
We could just ignore the problem, but the objects are conceptually the
same so the effort should be made to attempt to resolve references for a
given object graph.
We create a new map on the encoder context object which contains a dict of
C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We
use the datastore key to do the reference checking.
@since: 0.4.1
"""
if not obj.key or not obj.key.id():
encoder.writeObject(obj)
return
referenced_object = _get_by_class_key(
encoder,
obj.__class__,
obj.key,
obj
)
if not referenced_object:
encoder.writeElement(None)
return
encoder.writeObject(referenced_object)
def encode_ndb_key(key, encoder=None):
"""
When encountering an L{ndb.Key} instance, find the entity in the datastore
and encode that.
"""
klass = ndb.Model._kind_map.get(key.kind())
referenced_object = _get_by_class_key(
encoder,
klass,
key,
)
if not referenced_object:
encoder.writeElement(None)
return
encoder.writeObject(referenced_object)
def _get_by_class_key(codec, klass, key, obj=None):
gae_objects = get_ndb_context(codec.context.extra)
try:
return gae_objects.get(klass, key)
except KeyError:
if not obj:
obj = key.get()
gae_objects.set(klass, key, obj)
return obj
@adapter_models.register_property_decoder(ndb.KeyProperty)
def decode_key_property(obj, prop, value):
if not value:
return None
return ndb.Key(urlsafe=value)
@adapter_models.register_property_decoder(ndb.DateProperty)
def decode_time_property(obj, prop, value):
if not hasattr(value, 'date'):
return value
return value.date()
@adapter_models.register_property_decoder(ndb.FloatProperty)
def decode_float_property(obj, prop, value):
if isinstance(value, (int, long)):
return float(value)
return value
@adapter_models.register_property_decoder(ndb.IntegerProperty)
def decode_int_property(obj, prop, value):
if isinstance(value, float):
long_val = long(value)
# only convert the type if there is no mantissa - otherwise
# let the chips fall where they may
if long_val == value:
return long_val
return value
@adapter_models.register_property_encoder(ndb.KeyProperty)
def encode_key_property(obj, prop, value):
if not hasattr(value, 'urlsafe'):
return value
return value.urlsafe()
@adapter_models.register_property_encoder(ndb.TimeProperty)
def encode_time_property(obj, prop, value):
# PyAMF supports datetime.datetime objects and won't decide what date to
# add to this time value. Users will have to figure it out themselves
raise pyamf.EncodeError('ndb.TimeProperty is not supported by PyAMF')
@adapter_models.register_property_encoder(ndb.DateProperty)
def encode_date_property(obj, prop, value):
if not value:
return value
return datetime.datetime.combine(
value,
datetime.time(0, 0, 0)
)
def post_ndb_process(payload, context):
"""
"""
stubs = context.get(NDB_STUB_NAME, None)
if not stubs:
return payload
stubs.transform()
return payload
# small optimisation to compile the ndb.Model base class
if hasattr(ndb.model, '_NotEqualMixin'):
not_equal_mixin = pyamf.register_class(ndb.model._NotEqualMixin)
not_equal_mixin.compile()
del not_equal_mixin
# initialise the module here: hook into pyamf
pyamf.register_alias_type(NDBClassAlias, ndb.Model, ndb.Expando)
pyamf.add_type(ndb.Query, util.to_list)
pyamf.add_type(ndb.Model, encode_ndb_instance)
pyamf.add_post_decode_processor(post_ndb_process)
pyamf.add_type(ndb.Key, encode_ndb_key)
| mit | ca35df6180f795590fbbf2ee2d5a7cf0 | 25.997354 | 79 | 0.609309 | 4.006675 | false | false | false | false |
qtile/qtile | libqtile/scripts/start.py | 2 | 4388 | # Copyright (c) 2008, Aldo Cortesi. All rights reserved.
# Copyright (c) 2011, Florian Mounier
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Set the locale before any widgets or anything are imported, so any widget
# whose defaults depend on a reasonable locale sees something reasonable.
import locale
from os import getenv, makedirs, path
from sys import exit
import libqtile.backend
from libqtile import confreader
from libqtile.log_utils import logger
def rename_process():
"""
Try to rename the qtile process if py-setproctitle is installed:
http://code.google.com/p/py-setproctitle/
Will fail silently if it's not installed. Setting the title lets you do
stuff like "killall qtile".
"""
try:
import setproctitle
setproctitle.setproctitle("qtile")
except ImportError:
pass
def make_qtile(options):
kore = libqtile.backend.get_core(options.backend)
if not path.isfile(options.configfile):
try:
makedirs(path.dirname(options.configfile), exist_ok=True)
from shutil import copyfile
default_config_path = path.join(
path.dirname(__file__), "..", "resources", "default_config.py"
)
copyfile(default_config_path, options.configfile)
logger.info("Copied default_config.py to %s", options.configfile)
except Exception:
logger.exception("Failed to copy default_config.py to %s:", options.configfile)
config = confreader.Config(options.configfile)
# XXX: the import is here because we need to call init_log
# before start importing stuff
from libqtile.core.manager import Qtile
return Qtile(
kore,
config,
no_spawn=options.no_spawn,
state=options.state,
socket_path=options.socket,
)
def start(options):
try:
locale.setlocale(locale.LC_ALL, locale.getdefaultlocale())
except locale.Error:
pass
rename_process()
q = make_qtile(options)
try:
q.loop()
except Exception:
logger.exception("Qtile crashed")
exit(1)
logger.info("Exiting...")
def add_subcommand(subparsers, parents):
parser = subparsers.add_parser("start", parents=parents, help="Start the window manager")
parser.add_argument(
"-c",
"--config",
action="store",
default=path.expanduser(
path.join(getenv("XDG_CONFIG_HOME", "~/.config"), "qtile", "config.py")
),
dest="configfile",
help="Use the specified configuration file",
)
parser.add_argument(
"-s",
"--socket",
action="store",
default=None,
dest="socket",
help="Path of the Qtile IPC socket.",
)
parser.add_argument(
"-n",
"--no-spawn",
action="store_true",
default=False,
dest="no_spawn",
help="Avoid spawning apps. (Used for restart)",
)
parser.add_argument(
"--with-state",
default=None,
dest="state",
help="Pickled QtileState object (typically used only internally)",
)
parser.add_argument(
"-b",
"--backend",
default="x11",
dest="backend",
choices=libqtile.backend.CORES,
help="Use specified backend.",
)
parser.set_defaults(func=start)
| mit | 5ead7646468a7b78581f9e592b507955 | 30.568345 | 93 | 0.655424 | 4.120188 | false | true | false | false |
qtile/qtile | libqtile/ipc.py | 2 | 9252 | # Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
A simple IPC mechanism for communicating between two local processes. We
use marshal to serialize data - this means that both client and server must
run the same Python version, and that clients must be trusted (as
un-marshalling untrusted data can result in arbitrary code execution).
"""
from __future__ import annotations
import asyncio
import fcntl
import json
import marshal
import os.path
import socket
import struct
from typing import Any
from libqtile.log_utils import logger
from libqtile.utils import get_cache_dir
HDRFORMAT = "!L"
HDRLEN = struct.calcsize(HDRFORMAT)
SOCKBASE = "qtilesocket.%s"
class IPCError(Exception):
pass
def find_sockfile(display: str | None = None):
"""
Finds the appropriate socket file for the given display.
If unspecified, the socket file is determined as follows:
- If WAYLAND_DISPLAY is set, use it.
- else if DISPLAY is set, use that.
- else check for the existence of a socket file for WAYLAND_DISPLAY=wayland-0
and if it exists, use it.
- else check for the existence of a socket file for DISPLAY=:0
and if it exists, use it.
- else raise an IPCError.
"""
cache_directory = get_cache_dir()
if display:
return os.path.join(cache_directory, SOCKBASE % display)
display = os.environ.get("WAYLAND_DISPLAY")
if display:
return os.path.join(cache_directory, SOCKBASE % display)
display = os.environ.get("DISPLAY")
if display:
return os.path.join(cache_directory, SOCKBASE % display)
sockfile = os.path.join(cache_directory, SOCKBASE % "wayland-0")
if os.path.exists(sockfile):
return sockfile
sockfile = os.path.join(cache_directory, SOCKBASE % ":0")
if os.path.exists(sockfile):
return sockfile
raise IPCError("Could not find socket file.")
class _IPC:
"""A helper class to handle properly packing and unpacking messages"""
@staticmethod
def unpack(data: bytes, *, is_json: bool | None = None) -> tuple[Any, bool]:
"""Unpack the incoming message
Parameters
----------
data: bytes
The incoming message to unpack
is_json: bool | None
If the message should be unpacked as json. By default, try to
unpack json and fallback gracefully to marshalled bytes.
Returns
-------
tuple[Any, bool]
A tuple of the unpacked object and a boolean denoting if the
message was deserialized using json. If True, the return message
should be packed as json.
"""
if is_json is None or is_json:
try:
return json.loads(data.decode()), True
except ValueError as e:
if is_json:
raise IPCError("Unable to decode json data") from e
try:
assert len(data) >= HDRLEN
size = struct.unpack(HDRFORMAT, data[:HDRLEN])[0]
assert size >= len(data[HDRLEN:])
return marshal.loads(data[HDRLEN : HDRLEN + size]), False
except AssertionError as e:
raise IPCError("error reading reply! (probably the socket was disconnected)") from e
@staticmethod
def pack(msg: Any, *, is_json: bool = False) -> bytes:
"""Pack the object into a message to pass"""
if is_json:
json_obj = json.dumps(msg, default=_IPC._json_encoder)
return json_obj.encode()
msg_bytes = marshal.dumps(msg)
size = struct.pack(HDRFORMAT, len(msg_bytes))
return size + msg_bytes
@staticmethod
def _json_encoder(field: Any) -> Any:
"""Convert non-serializable types to ones understood by stdlib json module"""
if isinstance(field, set):
return list(field)
raise ValueError(f"Tried to JSON serialize unsupported type {type(field)}: {field}")
class Client:
def __init__(self, socket_path: str, is_json=False) -> None:
"""Create a new IPC client
Parameters
----------
socket_path: str
The file path to the file that is used to open the connection to
the running IPC server.
is_json: bool
Pack and unpack messages as json
"""
self.socket_path = socket_path
self.is_json = is_json
def call(self, data: Any) -> Any:
return self.send(data)
def send(self, msg: Any) -> Any:
"""Send the message and return the response from the server
If any exception is raised by the server, that will propogate out of
this call.
"""
return asyncio.run(self.async_send(msg))
async def async_send(self, msg: Any) -> Any:
"""Send the message to the server
Connect to the server, then pack and send the message to the server,
then wait for and return the response from the server.
"""
try:
reader, writer = await asyncio.wait_for(
asyncio.open_unix_connection(path=self.socket_path), timeout=3
)
except (ConnectionRefusedError, FileNotFoundError):
raise IPCError("Could not open {}".format(self.socket_path))
try:
send_data = _IPC.pack(msg, is_json=self.is_json)
writer.write(send_data)
writer.write_eof()
read_data = await asyncio.wait_for(reader.read(), timeout=10)
except asyncio.TimeoutError:
raise IPCError("Server not responding")
finally:
# see the note in Server._server_callback()
writer.close()
await writer.wait_closed()
data, _ = _IPC.unpack(read_data, is_json=self.is_json)
return data
class Server:
def __init__(self, socket_path: str, handler) -> None:
self.socket_path = socket_path
self.handler = handler
self.server = None # type: asyncio.AbstractServer | None
if os.path.exists(socket_path):
os.unlink(socket_path)
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
flags = fcntl.fcntl(self.sock.fileno(), fcntl.F_GETFD)
fcntl.fcntl(self.sock.fileno(), fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
self.sock.bind(self.socket_path)
async def _server_callback(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
) -> None:
"""Callback when a connection is made to the server
Read the data sent from the client, execute the requested command, and
send the reply back to the client.
"""
try:
logger.debug("Connection made to server")
data = await reader.read()
logger.debug("EOF received by server")
req, is_json = _IPC.unpack(data)
except IPCError:
logger.warning("Invalid data received, closing connection")
else:
rep = self.handler(req)
result = _IPC.pack(rep, is_json=is_json)
logger.debug("Sending result on receive EOF")
writer.write(result)
logger.debug("Closing connection on receive EOF")
writer.write_eof()
finally:
writer.close()
await writer.wait_closed()
async def __aenter__(self) -> "Server":
"""Start and return the server"""
await self.start()
return self
async def __aexit__(self, _exc_type, _exc_value, _tb) -> None:
"""Close and shutdown the server"""
await self.close()
async def start(self) -> None:
"""Start the server"""
assert self.server is None
logger.debug("Starting server")
server_coroutine = asyncio.start_unix_server(self._server_callback, sock=self.sock)
self.server = await server_coroutine
async def close(self) -> None:
"""Close and shutdown the server"""
assert self.server is not None
logger.debug("Stopping server on close")
self.server.close()
await self.server.wait_closed()
self.server = None
| mit | 1446abf9bd015a50dd5a773d6b329715 | 33.014706 | 96 | 0.628405 | 4.184532 | false | false | false | false |
qtile/qtile | libqtile/widget/countdown.py | 2 | 2363 | # Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 roger
# Copyright (c) 2014 Tycho Andersen
# Copyright (c) 2014 Adi Sieker
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
from libqtile.widget import base
class Countdown(base.InLoopPollText):
"""A simple countdown timer text widget"""
defaults = [
(
"format",
"{D}d {H}h {M}m {S}s",
"Format of the displayed text. Available variables:"
"{D} == days, {H} == hours, {M} == minutes, {S} seconds.",
),
("update_interval", 1.0, "Update interval in seconds for the clock"),
("date", datetime.now(), "The datetime for the end of the countdown"),
]
def __init__(self, **config):
base.InLoopPollText.__init__(self, **config)
self.add_defaults(Countdown.defaults)
def poll(self):
now = datetime.now()
days = hours = minutes = seconds = 0
if not self.date < now:
delta = self.date - now
days = delta.days
hours, rem = divmod(delta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
data = {
"D": "%02d" % days,
"H": "%02d" % hours,
"M": "%02d" % minutes,
"S": "%02d" % seconds,
}
return self.format.format(**data)
| mit | 2eb7614f5cc67cfbaade4f00e1453806 | 35.921875 | 79 | 0.644096 | 4.039316 | false | false | false | false |
qtile/qtile | test/widgets/docs_screenshots/ss_netgraph.py | 2 | 2383 | # Copyright (c) 2022 elParaguayo
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import random
import sys
from importlib import reload
from types import ModuleType
import pytest
values = []
for _ in range(100):
odds = random.randint(0, 10)
val = 0 if odds < 8 else random.randint(1000, 8000)
values.append(val)
class MockPsutil(ModuleType):
up = 0
down = 0
@classmethod
def net_io_counters(cls, pernic=False, _nowrap=True):
class IOCounters:
def __init__(self):
self.bytes_sent = 100
self.bytes_recv = 1034
if pernic:
return {"wlp58s0": IOCounters(), "lo": IOCounters()}
return IOCounters()
@pytest.fixture
def widget(monkeypatch):
monkeypatch.setitem(sys.modules, "psutil", MockPsutil("psutil"))
from libqtile.widget import graph
reload(graph)
yield graph.NetGraph
@pytest.mark.parametrize(
"screenshot_manager",
[
{},
{"type": "box"},
{"type": "line"},
{"type": "line", "line_width": 1},
{"start_pos": "top"},
],
indirect=True,
)
def ss_netgraph(screenshot_manager):
widget = screenshot_manager.c.widget["netgraph"]
widget.eval(f"self.values={values}")
widget.eval(f"self.maxvalue={max(values)}")
widget.eval("self.draw()")
screenshot_manager.take_screenshot()
| mit | c28bac25bb31264246cfd473416e1f75 | 30.773333 | 79 | 0.686949 | 3.938843 | false | false | false | false |
qtile/qtile | libqtile/backend/wayland/drawer.py | 1 | 2980 | from __future__ import annotations
from typing import TYPE_CHECKING
import cairocffi
from libqtile import utils
from libqtile.backend import base
if TYPE_CHECKING:
from libqtile.backend.wayland.window import Internal
from libqtile.core.manager import Qtile
from libqtile.utils import ColorsType
class Drawer(base.Drawer):
"""
A helper class for drawing and text layout.
1. We stage drawing operations locally in memory using a cairo RecordingSurface.
2. Then apply these operations to our ImageSurface self._source.
3. Then copy the pixels onto the window's wlr_texture.
"""
def __init__(self, qtile: Qtile, win: Internal, width: int, height: int):
base.Drawer.__init__(self, qtile, win, width, height)
self._stride = cairocffi.ImageSurface.format_stride_for_width(
cairocffi.FORMAT_ARGB32, self.width
)
self._source = cairocffi.ImageSurface(cairocffi.FORMAT_ARGB32, width, height)
with cairocffi.Context(self._source) as context:
# Initialise surface to all black
context.set_source_rgba(*utils.rgb("#000000"))
context.paint()
def _draw(
self,
offsetx: int = 0,
offsety: int = 0,
width: int | None = None,
height: int | None = None,
) -> None:
if offsetx > self._win.width:
return
# We need to set the current draw area so we can compare to the previous one
self.current_rect = (offsetx, offsety, width, height)
# rect_changed = current_rect != self.previous_rect
if not self.needs_update:
return
# Keep track of latest rect covered by this drawwer
self.previous_rect = self.current_rect
# Make sure geometry doesn't extend beyond texture
if width is None:
width = self.width
if width > self._win.width - offsetx:
width = self._win.width - offsetx
if height is None:
height = self.height
if height > self._win.height - offsety:
height = self._win.height - offsety
# Paint RecordingSurface operations our window's ImageSurface
with cairocffi.Context(self._source) as context:
context.set_source_surface(self.surface)
context.paint()
# Copy drawn ImageSurface data into rendered wlr_texture
self._win.texture.write_pixels(
self._stride,
width,
height,
cairocffi.cairo.cairo_image_surface_get_data(self._source._pointer),
dst_x=offsetx,
dst_y=offsety,
)
self._win.damage()
def clear(self, colour: ColorsType) -> None:
# Draw background straight to ImageSurface
ctx = cairocffi.Context(self._source)
ctx.save()
ctx.set_operator(cairocffi.OPERATOR_SOURCE)
self.set_source_rgb(colour, ctx=ctx)
ctx.paint()
ctx.restore()
| mit | 38c8bb38cc9c7a4c1702235de075e0cc | 32.111111 | 85 | 0.620805 | 3.973333 | false | false | false | false |
qtile/qtile | libqtile/backend/x11/drawer.py | 1 | 5970 | from __future__ import annotations
from typing import TYPE_CHECKING
import cairocffi
import xcffib.xproto
from libqtile import utils
from libqtile.backend import base
if TYPE_CHECKING:
from libqtile.backend.base import Internal
from libqtile.core.manager import Qtile
class Drawer(base.Drawer):
"""A helper class for drawing to Internal windows.
The underlying surface here is an XCBSurface backed by a pixmap. We draw to the
pixmap starting at offset 0, 0, and when the time comes to display to the window (on
draw()), we copy the appropriate portion of the pixmap onto the window. In the event
that our drawing area is resized, we invalidate the underlying surface and pixmap
and recreate them when we need them again with the new geometry.
"""
def __init__(self, qtile: Qtile, win: Internal, width: int, height: int):
base.Drawer.__init__(self, qtile, win, width, height)
self._xcb_surface = None
self._pixmap = None
self._gc = None
self._depth, self._visual = qtile.core.conn.default_screen._get_depth_and_visual(
win._depth
)
def finalize(self):
self._free_xcb_surface()
self._free_pixmap()
self._free_gc()
base.Drawer.finalize(self)
@property
def width(self):
return self._width
@width.setter
def width(self, width):
if width > self._width:
self._free_xcb_surface()
self._free_pixmap()
self._width = width
@property
def height(self):
return self._height
@height.setter
def height(self, height):
if height > self._height:
self._free_xcb_surface()
self._free_pixmap()
self._height = height
@property
def pixmap(self):
if self._pixmap is None:
# draw here since the only use case of this function is in the
# systray widget which expects a filled pixmap.
self.draw()
return self._pixmap
def _create_gc(self):
gc = self.qtile.core.conn.conn.generate_id()
self.qtile.core.conn.conn.core.CreateGC(
gc,
self._win.wid,
xcffib.xproto.GC.Foreground | xcffib.xproto.GC.Background,
[
self.qtile.core.conn.default_screen.black_pixel,
self.qtile.core.conn.default_screen.white_pixel,
],
)
return gc
def _free_gc(self):
if self._gc is not None:
self.qtile.core.conn.conn.core.FreeGC(self._gc)
self._gc = None
def _create_xcb_surface(self):
surface = cairocffi.XCBSurface(
self.qtile.core.conn.conn,
self._pixmap,
self._visual,
self.width,
self.height,
)
return surface
def _free_xcb_surface(self):
if self._xcb_surface is not None:
self._xcb_surface.finish()
self._xcb_surface = None
def _create_pixmap(self):
pixmap = self.qtile.core.conn.conn.generate_id()
self.qtile.core.conn.conn.core.CreatePixmap(
self._depth,
pixmap,
self._win.wid,
self.width,
self.height,
)
return pixmap
def _free_pixmap(self):
if self._pixmap is not None:
self.qtile.core.conn.conn.core.FreePixmap(self._pixmap)
self._pixmap = None
def _check_xcb(self):
# If the Drawer has been resized/invalidated we need to recreate these
if self._xcb_surface is None:
self._pixmap = self._create_pixmap()
self._xcb_surface = self._create_xcb_surface()
def _paint(self):
# Only attempt to run RecordingSurface's operations if ie actually need to
if self.needs_update:
# Paint RecordingSurface operations to the XCBSurface
ctx = cairocffi.Context(self._xcb_surface)
ctx.set_source_surface(self.surface, 0, 0)
ctx.paint()
self.previous_rect = self.current_rect
def _draw(
self,
offsetx: int = 0,
offsety: int = 0,
width: int | None = None,
height: int | None = None,
):
self.current_rect = (offsetx, offsety, width, height)
# If this is our first draw, create the gc
if self._gc is None:
self._gc = self._create_gc()
# Check if we need to re-create XCBSurface
# This may not be needed now that we call in `clear`
self._check_xcb()
# paint stored operations(if any) to XCBSurface
self._paint()
# Finally, copy XCBSurface's underlying pixmap to the window.
self.qtile.core.conn.conn.core.CopyArea(
self._pixmap,
self._win.wid,
self._gc,
0,
0, # srcx, srcy
offsetx,
offsety, # dstx, dsty
self.width if width is None else width,
self.height if height is None else height,
)
def _find_root_visual(self):
for i in self.qtile.core.conn.default_screen.allowed_depths:
for v in i.visuals:
if v.visual_id == self.qtile.core.conn.default_screen.root_visual:
return v
def clear(self, colour):
# Check if we need to re-create XCBSurface
self._check_xcb()
# Draw background straigt to XCB surface
ctx = cairocffi.Context(self._xcb_surface)
ctx.save()
ctx.set_operator(cairocffi.OPERATOR_SOURCE)
self.set_source_rgb(colour, ctx=ctx)
ctx.paint()
ctx.restore()
def set_source_rgb(self, colour, ctx=None):
# Remove transparency from non-32 bit windows
if utils.has_transparency(colour) and self._depth != 32:
colour = utils.remove_transparency(colour)
base.Drawer.set_source_rgb(self, colour, ctx)
| mit | 6bea00de0496845c9f5eb8e60a20aaf0 | 29.773196 | 89 | 0.58325 | 3.86658 | false | false | false | false |
qtile/qtile | libqtile/widget/launchbar.py | 1 | 11028 | # Copyright (c) 2014 Tycho Andersen
# Copyright (c) 2014 dequis
# Copyright (c) 2014-2015 Joseph Razik
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2015 reus
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
This module define a widget that displays icons to launch softwares or commands
when clicked -- a launchbar.
Only png icon files are displayed, not xpm because cairo doesn't support
loading of xpm file.
The order of displaying (from left to right) is in the order of the list.
If no icon was found for the name provided and if default_icon is set to None
then the name is printed instead. If default_icon is defined then this icon is
displayed instead.
To execute a software:
- ('thunderbird', 'thunderbird -safe-mode', 'launch thunderbird in safe mode')
To execute a python command in qtile, begin with by 'qshell:'
- ('logout', 'qshell:self.qtile.shutdown()', 'logout from qtile')
"""
from __future__ import annotations
import os.path
import cairocffi
from xdg.IconTheme import getIconPath
from libqtile import bar
from libqtile.images import Img
from libqtile.log_utils import logger
from libqtile.widget import base
class LaunchBar(base._Widget):
"""
A widget that display icons to launch the associated command.
Text will displayed when no icon is found.
Widget requirements: `pyxdg <https://pypi.org/project/pyxdg/>`__.
"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
("padding", 2, "Padding between icons"),
(
"default_icon",
"/usr/share/icons/oxygen/256x256/mimetypes/application-x-executable.png",
"Default icon not found",
),
("font", "sans", "Text font"),
("fontsize", None, "Font pixel size. Calculated if None."),
("fontshadow", None, "Font shadow color, default is None (no shadow)"),
("foreground", "#ffffff", "Text colour."),
(
"progs",
[],
"A list of tuples (software_name, command_to_execute, comment), for example:"
" [('thunderbird', 'thunderbird -safe-mode', 'launch thunderbird in safe mode'), "
" ('logout', 'qshell:self.qtile.shutdown()', 'logout from qtile')]",
),
("text_only", False, "Don't use any icons."),
("icon_size", None, "Size of icons. ``None`` to fit to bar."),
("padding_y", 0, "Vertical adjustment for icons."),
]
def __init__(
self, _progs: list[tuple[str, str, str]] | None = None, width=bar.CALCULATED, **config
):
base._Widget.__init__(self, width, **config)
self.add_defaults(LaunchBar.defaults)
self.surfaces: dict[str, Img | base._TextBox] = {}
self.icons_files: dict[str, str | None] = {}
self.icons_widths: dict[str, int] = {}
self.icons_offsets: dict[str, int] = {}
if _progs:
logger.warning(
"The use of a positional argument in LaunchBar is deprecated. "
"Please update your config to use progs=[...]."
)
config["progs"] = _progs
# For now, ignore the comments but may be one day it will be useful
self.progs = dict(
enumerate(
[
{
"name": prog[0],
"cmd": prog[1],
"comment": prog[2] if len(prog) > 2 else None,
}
for prog in config.get("progs", list())
]
)
)
self.progs_name = set([prog["name"] for prog in self.progs.values()])
self.length_type = bar.STATIC
self.length = 0
def _configure(self, qtile, pbar):
base._Widget._configure(self, qtile, pbar)
self.lookup_icons()
self.setup_images()
self.length = self.calculate_length()
def setup_images(self):
"""Create image structures for each icon files."""
self._icon_size = self.icon_size if self.icon_size is not None else self.bar.height - 4
self._icon_padding = (self.bar.height - self._icon_size) // 2
for img_name, iconfile in self.icons_files.items():
if iconfile is None or self.text_only:
# Only warn the user that there's no icon if they haven't set text only mode
if not self.text_only:
logger.warning(
'No icon found for application "%s" (%s) switch to text mode',
img_name,
iconfile,
)
# if no icon is found and no default icon was set, we just
# print the name, based on a textbox.
textbox = base._TextBox()
textbox._configure(self.qtile, self.bar)
textbox.layout = self.drawer.textlayout(
textbox.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=textbox.markup,
)
# the name will be displayed
textbox.text = img_name
textbox.calculate_length()
self.icons_widths[img_name] = textbox.width
self.surfaces[img_name] = textbox
continue
else:
try:
img = Img.from_path(iconfile)
except cairocffi.Error:
logger.exception(
'Error loading icon for application "%s" (%s)', img_name, iconfile
)
return
input_width = img.width
input_height = img.height
sp = input_height / (self._icon_size)
width = int(input_width / sp)
imgpat = cairocffi.SurfacePattern(img.surface)
scaler = cairocffi.Matrix()
scaler.scale(sp, sp)
scaler.translate(self.padding * -1, -2)
imgpat.set_matrix(scaler)
imgpat.set_filter(cairocffi.FILTER_BEST)
self.surfaces[img_name] = imgpat
self.icons_widths[img_name] = width
def _lookup_icon(self, name):
"""Search for the icon corresponding to one command."""
self.icons_files[name] = None
# if the software_name is directly an absolute path icon file
if os.path.isabs(name):
# name start with '/' thus it's an absolute path
root, ext = os.path.splitext(name)
if ext == ".png":
self.icons_files[name] = name if os.path.isfile(name) else None
else:
# try to add the extension
self.icons_files[name] = name + ".png" if os.path.isfile(name + ".png") else None
else:
self.icons_files[name] = getIconPath(name)
# no search method found an icon, so default icon
if self.icons_files[name] is None:
self.icons_files[name] = self.default_icon
def lookup_icons(self):
"""Search for the icons corresponding to the commands to execute."""
if self.default_icon is not None:
if not os.path.isfile(self.default_icon):
# if the default icon provided is not found, switch to
# text mode
self.default_icon = None
for name in self.progs_name:
self._lookup_icon(name)
def get_icon_in_position(self, x, y):
"""Determine which icon is clicked according to its position."""
for i in self.progs:
if x < (
self.icons_offsets[i]
+ self.icons_widths[self.progs[i]["name"]]
+ self.padding / 2
):
return i
def button_press(self, x, y, button):
"""Launch the associated command to the clicked icon."""
base._Widget.button_press(self, x, y, button)
if button == 1:
icon = self.get_icon_in_position(x, y)
if icon is not None:
cmd = self.progs[icon]["cmd"]
if cmd.startswith("qshell:"):
exec(cmd[7:].lstrip())
else:
self.qtile.spawn(cmd)
self.draw()
def draw(self):
"""Draw the icons in the widget."""
self.drawer.clear(self.background or self.bar.background)
xoffset = 0
for i in sorted(self.progs.keys()):
self.drawer.ctx.save()
self.drawer.ctx.translate(xoffset, 0)
self.icons_offsets[i] = xoffset + self.padding
name = self.progs[i]["name"]
icon_width = self.icons_widths[name]
if isinstance(self.surfaces[name], base._TextBox):
# display the name if no icon was found and no default icon
textbox = self.surfaces[name]
textbox.layout.draw(
self.padding + textbox.actual_padding,
int((self.bar.height - textbox.layout.height) / 2.0) + 1,
)
else:
# display an icon
self.drawer.ctx.save()
self.drawer.ctx.translate(0, self._icon_padding + self.padding_y)
self.drawer.ctx.set_source(self.surfaces[name])
self.drawer.ctx.paint()
self.drawer.ctx.restore()
self.drawer.ctx.restore()
self.drawer.draw(
offsetx=self.offset + xoffset,
offsety=self.offsety,
width=icon_width + self.padding,
)
xoffset += icon_width + self.padding
self.drawer.draw(offsetx=self.offset, offsety=self.offsety, width=self.width)
def calculate_length(self):
"""Compute the width of the widget according to each icon width."""
return sum(
self.icons_widths[prg["name"]] for prg in self.progs.values()
) + self.padding * (len(self.progs) + 1)
| mit | ca6df1a5f9e82a4c79f4b4e04ccf3ea9 | 38.669065 | 97 | 0.568553 | 4.084444 | false | false | false | false |
qtile/qtile | libqtile/scripts/main.py | 2 | 2254 | import argparse
import logging
import sys
from libqtile.log_utils import get_default_log, init_log
from libqtile.scripts import check, cmd_obj, migrate, run_cmd, shell, start, top
try:
# Python>3.7 can get the version from importlib
from importlib.metadata import distribution
VERSION = distribution("qtile").version
except ModuleNotFoundError:
try:
# pkg_resources is required for 3.7
import pkg_resources
VERSION = pkg_resources.require("qtile")[0].version
except (pkg_resources.DistributionNotFound, ModuleNotFoundError):
VERSION = "dev"
def main():
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument(
"-l",
"--log-level",
default="WARNING",
dest="log_level",
type=str.upper,
choices=("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"),
help="Set qtile log level",
)
main_parser = argparse.ArgumentParser(
prog="qtile",
description="A full-featured, pure-Python tiling window manager.",
)
main_parser.add_argument(
"-v",
"--version",
action="version",
version=VERSION,
)
subparsers = main_parser.add_subparsers()
start.add_subcommand(subparsers, [parent_parser])
shell.add_subcommand(subparsers, [parent_parser])
top.add_subcommand(subparsers, [parent_parser])
run_cmd.add_subcommand(subparsers, [parent_parser])
cmd_obj.add_subcommand(subparsers, [parent_parser])
check.add_subcommand(subparsers, [parent_parser])
migrate.add_subcommand(subparsers, [parent_parser])
# `qtile help` should print help
def print_help(options):
main_parser.print_help()
help_ = subparsers.add_parser("help", help="Print help information and exit")
help_.set_defaults(func=print_help)
options = main_parser.parse_args()
try:
log_level = getattr(logging, options.log_level)
init_log(log_level, log_path=get_default_log())
options.func(options)
except AttributeError:
main_parser.print_usage()
print("")
print("Did you mean:")
print(" ".join(sys.argv + ["start"]))
sys.exit(1)
if __name__ == "__main__":
main()
| mit | a272fb44ad071d417e7fb30605f23a5f | 28.657895 | 81 | 0.64197 | 3.801012 | false | false | false | false |
qtile/qtile | libqtile/scripts/shell.py | 2 | 2316 | # Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from libqtile import ipc, sh
from libqtile.command import interface
def qshell(args) -> None:
if args.socket is None:
socket = ipc.find_sockfile()
else:
socket = args.socket
client = ipc.Client(socket, is_json=args.is_json)
cmd_object = interface.IPCCommandInterface(client)
qsh = sh.QSh(cmd_object)
if args.command is not None:
qsh.process_line(args.command)
else:
qsh.loop()
def add_subcommand(subparsers, parents):
parser = subparsers.add_parser("shell", parents=parents, help="shell-like interface to qtile")
parser.add_argument(
"-s",
"--socket",
action="store",
type=str,
default=None,
help="Use specified socket to connect to qtile.",
)
parser.add_argument(
"-c",
"--command",
action="store",
type=str,
default=None,
help="Run the specified qshell command and exit.",
)
parser.add_argument(
"-j",
"--json",
action="store_true",
default=False,
dest="is_json",
help="Use json in order to communicate with qtile server.",
)
parser.set_defaults(func=qshell)
| mit | 05fb62b1461565de3d549f1219cb0d0e | 34.630769 | 98 | 0.681347 | 4.063158 | false | false | false | false |
qtile/qtile | libqtile/widget/pulseaudio_ffi.py | 2 | 14097 | # -*- coding: utf-8 -*-
from cffi import FFI
pulseaudio_ffi = FFI()
pulseaudio_ffi.set_source(
"libqtile.widget._pulse_audio",
"""
#include "pulse/sample.h"
#include "pulse/volume.h"
#include "pulse/def.h"
#include "pulse/introspect.h"
#include "pulse/mainloop.h"
#include "pulse/mainloop-api.h"
#include "pulse/format.h"
#include "pulse/context.h"
#include "pulse/subscribe.h"
""",
libraries=["pulse"],
)
pulseaudio_ffi.cdef(
"""
/** mainloop-api.h */
typedef struct pa_mainloop_api pa_mainloop_api;
/** mainloop.h */
typedef struct pa_mainloop pa_mainloop;
pa_mainloop *pa_mainloop_new(void);
void pa_mainloop_free(pa_mainloop* m);
int pa_mainloop_prepare(pa_mainloop *m, int timeout);
int pa_mainloop_poll(pa_mainloop *m);
int pa_mainloop_dispatch(pa_mainloop *m);
int pa_mainloop_get_retval(pa_mainloop *m);
int pa_mainloop_iterate(pa_mainloop *m, int block, int *retval);
int pa_mainloop_run(pa_mainloop *m, int *retval);
pa_mainloop_api* pa_mainloop_get_api(pa_mainloop*m);
void pa_mainloop_quit(pa_mainloop *m, int retval);
/** def.h */
typedef enum pa_context_flags {
PA_CONTEXT_NOFLAGS = ...,
PA_CONTEXT_NOAUTOSPAWN = ...,
PA_CONTEXT_NOFAIL = ...
} pa_context_flags_t;
typedef struct pa_spawn_api {
void (*prefork)(void);
void (*postfork)(void);
void (*atfork)(void);
} pa_spawn_api;
typedef enum pa_context_state {
PA_CONTEXT_UNCONNECTED,
PA_CONTEXT_CONNECTING,
PA_CONTEXT_AUTHORIZING,
PA_CONTEXT_SETTING_NAME,
PA_CONTEXT_READY,
PA_CONTEXT_FAILED,
PA_CONTEXT_TERMINATED
} pa_context_state_t;
typedef enum pa_operation_state {
PA_OPERATION_RUNNING,
PA_OPERATION_DONE,
PA_OPERATION_CANCELLED
} pa_operation_state_t;
typedef enum pa_sink_flags {
PA_SINK_NOFLAGS = ...,
PA_SINK_HW_VOLUME_CTRL = ...,
PA_SINK_LATENCY = ...,
PA_SINK_HARDWARE = ...,
PA_SINK_NETWORK = ...,
PA_SINK_HW_MUTE_CTRL = ...,
PA_SINK_DECIBEL_VOLUME = ...,
PA_SINK_FLAT_VOLUME = ...,
PA_SINK_DYNAMIC_LATENCY = ...,
PA_SINK_SET_FORMATS = ...,
} pa_sink_flags_t;
typedef enum pa_sink_state {
PA_SINK_INVALID_STATE = ...,
PA_SINK_RUNNING = ...,
PA_SINK_IDLE = ...,
PA_SINK_SUSPENDED = ...,
PA_SINK_INIT = ...,
PA_SINK_UNLINKED = ...
} pa_sink_state_t;
typedef enum pa_subscription_mask {
PA_SUBSCRIPTION_MASK_NULL = ...,
/**< No events */
PA_SUBSCRIPTION_MASK_SINK = ...,
/**< Sink events */
PA_SUBSCRIPTION_MASK_SOURCE = ...,
/**< Source events */
PA_SUBSCRIPTION_MASK_SINK_INPUT = ...,
/**< Sink input events */
PA_SUBSCRIPTION_MASK_SOURCE_OUTPUT = ...,
/**< Source output events */
PA_SUBSCRIPTION_MASK_MODULE = ...,
/**< Module events */
PA_SUBSCRIPTION_MASK_CLIENT = ...,
/**< Client events */
PA_SUBSCRIPTION_MASK_SAMPLE_CACHE = ...,
/**< Sample cache events */
PA_SUBSCRIPTION_MASK_SERVER = ...,
/**< Other global server changes. */
PA_SUBSCRIPTION_MASK_AUTOLOAD = ...,
/** deprecated Autoload table events. */
PA_SUBSCRIPTION_MASK_CARD = ...,
/** Card events. since 0.9.15 */
PA_SUBSCRIPTION_MASK_ALL = ...,
/**< Catch all events */
} pa_subscription_mask_t;
/** Subscription event types, as used by pa_context_subscribe() */
typedef enum pa_subscription_event_type {
PA_SUBSCRIPTION_EVENT_SINK = ...,
/**< Event type: Sink */
PA_SUBSCRIPTION_EVENT_SOURCE = ...,
/**< Event type: Source */
PA_SUBSCRIPTION_EVENT_SINK_INPUT = ...,
/**< Event type: Sink input */
PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT = ...,
/**< Event type: Source output */
PA_SUBSCRIPTION_EVENT_MODULE = ...,
/**< Event type: Module */
PA_SUBSCRIPTION_EVENT_CLIENT = ...,
/**< Event type: Client */
PA_SUBSCRIPTION_EVENT_SAMPLE_CACHE = ...,
/**< Event type: Sample cache item */
PA_SUBSCRIPTION_EVENT_SERVER = ...,
/**< Event type: Global server change, only occurring with PA_SUBSCRIPTION_EVENT_CHANGE. */
PA_SUBSCRIPTION_EVENT_AUTOLOAD = ...,
/** deprecated Event type: Autoload table changes. */
PA_SUBSCRIPTION_EVENT_CARD = ...,
/**< Event type: Card since 0.9.15 */
PA_SUBSCRIPTION_EVENT_FACILITY_MASK = ...,
/**< A mask to extract the event type from an event value */
PA_SUBSCRIPTION_EVENT_NEW = ...,
/**< A new object was created */
PA_SUBSCRIPTION_EVENT_CHANGE = ...,
/**< A property of the object was modified */
PA_SUBSCRIPTION_EVENT_REMOVE = ...,
/**< An object was removed */
PA_SUBSCRIPTION_EVENT_TYPE_MASK = ...,
/**< A mask to extract the event operation from an event value */
} pa_subscription_event_type_t;
/** context.h */
typedef struct pa_context pa_context;
typedef void (*pa_context_notify_cb_t)(pa_context *c, void *userdata);
typedef void (*pa_context_success_cb_t) (pa_context *c, int success, void *userdata);
pa_context *pa_context_new(pa_mainloop_api *mainloop, const char *name);
int pa_context_connect(pa_context *c, const char *server, pa_context_flags_t flags, const pa_spawn_api *api);
void pa_context_set_state_callback(pa_context *c, pa_context_notify_cb_t cb, void *userdata);
pa_context_state_t pa_context_get_state(pa_context *c);
void pa_context_disconnect(pa_context *c);
void pa_context_unref(pa_context *c);
/** channelmap.h */
typedef enum pa_channel_position {
PA_CHANNEL_POSITION_INVALID = ...,
PA_CHANNEL_POSITION_MONO = ...,
PA_CHANNEL_POSITION_FRONT_LEFT,
PA_CHANNEL_POSITION_FRONT_RIGHT,
PA_CHANNEL_POSITION_FRONT_CENTER,
PA_CHANNEL_POSITION_LEFT = PA_CHANNEL_POSITION_FRONT_LEFT,
PA_CHANNEL_POSITION_RIGHT = PA_CHANNEL_POSITION_FRONT_RIGHT,
PA_CHANNEL_POSITION_CENTER = PA_CHANNEL_POSITION_FRONT_CENTER,
PA_CHANNEL_POSITION_REAR_CENTER,
PA_CHANNEL_POSITION_REAR_LEFT,
PA_CHANNEL_POSITION_REAR_RIGHT,
PA_CHANNEL_POSITION_LFE,
PA_CHANNEL_POSITION_SUBWOOFER = PA_CHANNEL_POSITION_LFE,
PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
PA_CHANNEL_POSITION_SIDE_LEFT,
PA_CHANNEL_POSITION_SIDE_RIGHT,
PA_CHANNEL_POSITION_AUX0,
PA_CHANNEL_POSITION_AUX1,
PA_CHANNEL_POSITION_AUX2,
PA_CHANNEL_POSITION_AUX3,
PA_CHANNEL_POSITION_AUX4,
PA_CHANNEL_POSITION_AUX5,
PA_CHANNEL_POSITION_AUX6,
PA_CHANNEL_POSITION_AUX7,
PA_CHANNEL_POSITION_AUX8,
PA_CHANNEL_POSITION_AUX9,
PA_CHANNEL_POSITION_AUX10,
PA_CHANNEL_POSITION_AUX11,
PA_CHANNEL_POSITION_AUX12,
PA_CHANNEL_POSITION_AUX13,
PA_CHANNEL_POSITION_AUX14,
PA_CHANNEL_POSITION_AUX15,
PA_CHANNEL_POSITION_AUX16,
PA_CHANNEL_POSITION_AUX17,
PA_CHANNEL_POSITION_AUX18,
PA_CHANNEL_POSITION_AUX19,
PA_CHANNEL_POSITION_AUX20,
PA_CHANNEL_POSITION_AUX21,
PA_CHANNEL_POSITION_AUX22,
PA_CHANNEL_POSITION_AUX23,
PA_CHANNEL_POSITION_AUX24,
PA_CHANNEL_POSITION_AUX25,
PA_CHANNEL_POSITION_AUX26,
PA_CHANNEL_POSITION_AUX27,
PA_CHANNEL_POSITION_AUX28,
PA_CHANNEL_POSITION_AUX29,
PA_CHANNEL_POSITION_AUX30,
PA_CHANNEL_POSITION_AUX31,
PA_CHANNEL_POSITION_TOP_CENTER,
PA_CHANNEL_POSITION_TOP_FRONT_LEFT,
PA_CHANNEL_POSITION_TOP_FRONT_RIGHT,
PA_CHANNEL_POSITION_TOP_FRONT_CENTER,
PA_CHANNEL_POSITION_TOP_REAR_LEFT,
PA_CHANNEL_POSITION_TOP_REAR_RIGHT,
PA_CHANNEL_POSITION_TOP_REAR_CENTER,
PA_CHANNEL_POSITION_MAX
} pa_channel_position_t;
typedef struct pa_channel_map {
uint8_t channels;
pa_channel_position_t map[...];
} pa_channel_map;
/** sample.h */
#define PA_CHANNELS_MAX ...
typedef enum pa_sample_format {
PA_SAMPLE_U8,
PA_SAMPLE_ALAW,
PA_SAMPLE_ULAW,
PA_SAMPLE_S16LE,
PA_SAMPLE_S16BE,
PA_SAMPLE_FLOAT32LE,
PA_SAMPLE_FLOAT32BE,
PA_SAMPLE_S32LE,
PA_SAMPLE_S32BE,
PA_SAMPLE_S24LE,
PA_SAMPLE_S24BE,
PA_SAMPLE_S24_32LE,
PA_SAMPLE_S24_32BE,
PA_SAMPLE_MAX,
PA_SAMPLE_INVALID = -1
} pa_sample_format_t;
typedef struct pa_sample_spec {
pa_sample_format_t format;
uint32_t rate;
uint8_t channels;
} pa_sample_spec;
typedef uint64_t pa_usec_t;
/** operation.h */
typedef struct pa_operation pa_operation;
pa_operation_state_t pa_operation_get_state(pa_operation *o);
/** volume.h */
#define PA_VOLUME_NORM ...
#define PA_VOLUME_MUTED ...
#define PA_VOLUME_MAX ...
#define PA_CHANNELS_MAX ...
typedef uint32_t pa_volume_t;
typedef struct {
uint8_t channels;
pa_volume_t values[...];
} pa_cvolume;
pa_cvolume* pa_cvolume_init(pa_cvolume *a);
int pa_cvolume_valid(const pa_cvolume *v);
pa_cvolume* pa_cvolume_scale(pa_cvolume *v, pa_volume_t max);
pa_volume_t pa_cvolume_avg(const pa_cvolume *a);
pa_volume_t pa_cvolume_max(const pa_cvolume *a);
pa_cvolume* pa_cvolume_inc(pa_cvolume *v, pa_volume_t inc);
pa_cvolume* pa_cvolume_dec(pa_cvolume *v, pa_volume_t dec);
pa_cvolume* pa_cvolume_set(pa_cvolume *a, unsigned channels, pa_volume_t v);
int pa_cvolume_channels_equal_to(const pa_cvolume *a, pa_volume_t v);
char *pa_cvolume_snprint(char *s, size_t l, const pa_cvolume *c);
/** proplist.h */
typedef struct pa_proplist pa_proplist;
/** format.h */
typedef enum pa_encoding {
PA_ENCODING_ANY,
PA_ENCODING_PCM,
PA_ENCODING_AC3_IEC61937,
PA_ENCODING_EAC3_IEC61937,
PA_ENCODING_MPEG_IEC61937,
PA_ENCODING_DTS_IEC61937,
PA_ENCODING_MPEG2_AAC_IEC61937,
PA_ENCODING_MAX,
PA_ENCODING_INVALID = ...
} pa_encoding_t;
typedef struct pa_format_info {
pa_encoding_t encoding;
pa_proplist *plist;
} pa_format_info;
/** introspect.h */
typedef struct pa_sink_port_info {
const char *name;
const char *description;
uint32_t priority;
int available;
} pa_sink_port_info;
typedef struct pa_sink_info {
const char *name;
uint32_t index;
const char *description;
pa_sample_spec sample_spec;
pa_channel_map channel_map;
uint32_t owner_module;
pa_cvolume volume;
int mute;
uint32_t monitor_source;
const char *monitor_source_name;
pa_usec_t latency;
const char *driver;
pa_sink_flags_t flags;
pa_proplist *proplist;
pa_usec_t configured_latency;
pa_volume_t base_volume;
pa_sink_state_t state;
uint32_t n_volume_steps;
uint32_t card;
uint32_t n_ports;
pa_sink_port_info** ports;
pa_sink_port_info* active_port;
uint8_t n_formats;
pa_format_info **formats;
} pa_sink_info;
typedef void (*pa_sink_info_cb_t)(pa_context *c, const pa_sink_info *i, int eol, void *userdata);
pa_operation* pa_context_get_sink_info_list(pa_context *c, pa_sink_info_cb_t cb, void *userdata);
typedef struct pa_server_info {
const char *user_name;
const char *host_name;
const char *server_version;
const char *server_name;
pa_sample_spec sample_spec;
const char *default_sink_name;
const char *default_source_name;
uint32_t cookie;
pa_channel_map channel_map;
} pa_server_info;
typedef void (*pa_server_info_cb_t) (pa_context *c, const pa_server_info*i, void *userdata);
pa_operation* pa_context_get_server_info(pa_context *c, pa_server_info_cb_t cb, void *userdata);
pa_operation* pa_context_set_sink_volume_by_index(
pa_context *c, uint32_t idx, const pa_cvolume *volume, pa_context_success_cb_t cb, void *userdata);
pa_operation* pa_context_set_sink_volume_by_name(
pa_context *c, const char *name, const pa_cvolume *volume, pa_context_success_cb_t cb, void *userdata);
pa_operation* pa_context_set_sink_mute_by_index(
pa_context *c, uint32_t idx, int mute, pa_context_success_cb_t cb, void *userdata);
pa_operation* pa_context_set_sink_mute_by_name(
pa_context *c, const char *name, int mute, pa_context_success_cb_t cb, void *userdata);
/** subscribe.h */
/** Subscription event callback prototype */
typedef void (*pa_context_subscribe_cb_t)(
pa_context *c, pa_subscription_event_type_t t, uint32_t idx, void *userdata);
/** Enable event notification */
pa_operation* pa_context_subscribe(
pa_context *c, pa_subscription_mask_t m, pa_context_success_cb_t cb, void *userdata);
/** Set the context specific call back function that is called whenever the state of the daemon changes */
void pa_context_set_subscribe_callback(pa_context *c, pa_context_subscribe_cb_t cb, void *userdata);
/** python callbacks */
extern "Python" void qtile_pa_context_changed(pa_context *c, void *userdata);
extern "Python" void qtile_on_sink_info(pa_context *c, const pa_sink_info *i, int eol, void *userdata);
extern "Python" void qtile_on_server_info(pa_context *c, const pa_server_info*i, void *userdata);
extern "Python" void qtile_on_sink_update(
pa_context *c, pa_subscription_event_type_t t, uint32_t idx, void *userdata);
"""
)
if __name__ == "__main__":
pulseaudio_ffi.compile()
| mit | edad8991aad6cee4dfce84ac3d922099 | 32.484561 | 113 | 0.608356 | 3.16218 | false | false | false | false |
qtile/qtile | libqtile/widget/sep.py | 2 | 2912 | # Copyright (c) 2010 Aldo Cortesi
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2012, 2015 Tycho Andersen
# Copyright (c) 2012 Craig Barnes
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 Sean Vig
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from libqtile.widget import base
class Sep(base._Widget):
"""A visible widget separator"""
orientations = base.ORIENTATION_BOTH
defaults = [
("padding", 2, "Padding on either side of separator."),
("linewidth", 1, "Width of separator line."),
("foreground", "888888", "Separator line colour."),
("size_percent", 80, "Size as a percentage of bar size (0-100)."),
]
def __init__(self, **config):
length = config.get("padding", 2) * 2 + config.get("linewidth", 1)
base._Widget.__init__(self, length, **config)
self.add_defaults(Sep.defaults)
self.length = self.padding + self.linewidth
def draw(self):
self.drawer.clear(self.background or self.bar.background)
if self.bar.horizontal:
margin_top = (self.bar.height / float(100) * (100 - self.size_percent)) / 2.0
self.drawer.draw_vbar(
self.foreground,
float(self.length) / 2,
margin_top,
self.bar.height - margin_top,
linewidth=self.linewidth,
)
self.drawer.draw(offsetx=self.offset, offsety=self.offsety, width=self.length)
else:
margin_left = (self.bar.width / float(100) * (100 - self.size_percent)) / 2.0
self.drawer.draw_hbar(
self.foreground,
margin_left,
self.bar.width - margin_left,
float(self.length) / 2,
linewidth=self.linewidth,
)
self.drawer.draw(offsety=self.offset, offsetx=self.offsetx, height=self.length)
| mit | c34c0e8e9764f6d7a8c7cb04d71ed563 | 42.462687 | 91 | 0.651786 | 3.945799 | false | true | false | false |
mpdavis/python-jose | tests/rfc/test_rfc7520.py | 1 | 3373 | # Disable flake8 reporting
# flake8: noqa
import pytest
from jose.backends import RSAKey
from jose.jws import verify
expected_payload = b"It\xe2\x80\x99s a dangerous business, Frodo, going out your door. You step onto the road, and if you don't keep your feet, there\xe2\x80\x99s no knowing where you might be swept off to."
ec_public_key = {
"kty": "EC",
"kid": "bilbo.baggins@hobbiton.example",
"use": "sig",
"crv": "P-521",
"x": "AHKZLLOsCOzz5cY97ewNUajB957y-C-U88c3v13nmGZx6sYl_oJXu9A5RkTKqjqvjyekWF-7ytDyRXYgCF5cj0Kt",
"y": "AdymlHvOiLxXkEhayXQnNCvDX4h9htZaCJN34kfmC6pV5OhQHiraVySsUdaQkAgDPrwQrJmbnX9cwlGfP-HqHZR1",
}
rsa_public_jwk = {
"kty": "RSA",
"kid": "bilbo.baggins@hobbiton.example",
"use": "sig",
"n": "n4EPtAOCc9AlkeQHPzHStgAbgs7bTZLwUBZdR8_KuKPEHLd4rHVTeT-O-XV2jRojdNhxJWTDvNd7nqQ0VEiZQHz_AJmSCpMaJMRBSFKrKb2wqVwGU_NsYOYL-QtiWN2lbzcEe6XC0dApr5ydQLrHqkHHig3RBordaZ6Aj-oBHqFEHYpPe7Tpe-OfVfHd1E6cS6M1FZcD1NNLYD5lFHpPI9bTwJlsde3uhGqC0ZCuEHg8lhzwOHrtIQbS0FVbb9k3-tVTU4fg_3L_vniUFAKwuCLqKnS2BYwdq_mzSnbLY7h_qixoR7jig3__kRhuaxwUkRz5iaiQkqgc5gHdrNP5zw",
"e": "AQAB",
}
hmac_key = {
"kty": "oct",
"kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037",
"use": "sig",
"alg": "HS256",
"k": "hJtXIZ2uSN5kbQfbtTNWbpdmhkV8FJG-Onbc6mxCcYg",
}
class TestFourOneThree:
token = "eyJhbGciOiJSUzI1NiIsImtpZCI6ImJpbGJvLmJhZ2dpbnNAaG9iYml0b24uZXhhbXBsZSJ9.SXTigJlzIGEgZGFuZ2Vyb3VzIGJ1c2luZXNzLCBGcm9kbywgZ29pbmcgb3V0IHlvdXIgZG9vci4gWW91IHN0ZXAgb250byB0aGUgcm9hZCwgYW5kIGlmIHlvdSBkb24ndCBrZWVwIHlvdXIgZmVldCwgdGhlcmXigJlzIG5vIGtub3dpbmcgd2hlcmUgeW91IG1pZ2h0IGJlIHN3ZXB0IG9mZiB0by4.MRjdkly7_-oTPTS3AXP41iQIGKa80A0ZmTuV5MEaHoxnW2e5CZ5NlKtainoFmKZopdHM1O2U4mwzJdQx996ivp83xuglII7PNDi84wnB-BDkoBwA78185hX-Es4JIwmDLJK3lfWRa-XtL0RnltuYv746iYTh_qHRD68BNt1uSNCrUCTJDt5aAE6x8wW1Kt9eRo4QPocSadnHXFxnt8Is9UzpERV0ePPQdLuW3IS_de3xyIrDaLGdjluPxUAhb6L2aXic1U12podGU0KLUQSE_oI-ZnmKJ3F4uOZDnd6QZWJushZ41Axf_fcIe8u9ipH84ogoree7vjbU5y18kDquDg"
@pytest.mark.skipif(RSAKey is None, reason="RSA is not available")
def test_signature(self):
payload = verify(self.token, rsa_public_jwk, "RS256")
assert payload == expected_payload
class TestFourThreeThree:
token = "eyJhbGciOiJFUzUxMiIsImtpZCI6ImJpbGJvLmJhZ2dpbnNAaG9iYml0b24uZXhhbXBsZSJ9.SXTigJlzIGEgZGFuZ2Vyb3VzIGJ1c2luZXNzLCBGcm9kbywgZ29pbmcgb3V0IHlvdXIgZG9vci4gWW91IHN0ZXAgb250byB0aGUgcm9hZCwgYW5kIGlmIHlvdSBkb24ndCBrZWVwIHlvdXIgZmVldCwgdGhlcmXigJlzIG5vIGtub3dpbmcgd2hlcmUgeW91IG1pZ2h0IGJlIHN3ZXB0IG9mZiB0by4.AE_R_YZCChjn4791jSQCrdPZCNYqHXCTZH0-JZGYNlaAjP2kqaluUIIUnC9qvbu9Plon7KRTzoNEuT4Va2cmL1eJAQy3mtPBu_u_sDDyYjnAMDxXPn7XrT0lw-kvAD890jl8e2puQens_IEKBpHABlsbEPX6sFY8OcGDqoRuBomu9xQ2"
def test_signature(self):
payload = verify(self.token, ec_public_key, "ES512")
assert payload == expected_payload
class TestFourFourThree:
token = "eyJhbGciOiJIUzI1NiIsImtpZCI6IjAxOGMwYWU1LTRkOWItNDcxYi1iZmQ2LWVlZjMxNGJjNzAzNyJ9.SXTigJlzIGEgZGFuZ2Vyb3VzIGJ1c2luZXNzLCBGcm9kbywgZ29pbmcgb3V0IHlvdXIgZG9vci4gWW91IHN0ZXAgb250byB0aGUgcm9hZCwgYW5kIGlmIHlvdSBkb24ndCBrZWVwIHlvdXIgZmVldCwgdGhlcmXigJlzIG5vIGtub3dpbmcgd2hlcmUgeW91IG1pZ2h0IGJlIHN3ZXB0IG9mZiB0by4.s0h6KThzkfBBBkLspW1h84VsJZFTsPPqMDA7g1Md7p0"
def test_signature(self):
payload = verify(self.token, hmac_key, "HS256")
assert payload == expected_payload
| mit | 4750236416e1e0ad3f12bf54e09078f4 | 47.884058 | 653 | 0.830418 | 1.779947 | false | true | false | false |
redis/redis-py | tests/test_asyncio/test_json.py | 2 | 32575 | import pytest
import redis.asyncio as redis
from redis import exceptions
from redis.commands.json.path import Path
from tests.conftest import skip_ifmodversion_lt
@pytest.mark.redismod
async def test_json_setbinarykey(modclient: redis.Redis):
d = {"hello": "world", b"some": "value"}
with pytest.raises(TypeError):
modclient.json().set("somekey", Path.root_path(), d)
assert await modclient.json().set("somekey", Path.root_path(), d, decode_keys=True)
@pytest.mark.redismod
async def test_json_setgetdeleteforget(modclient: redis.Redis):
assert await modclient.json().set("foo", Path.root_path(), "bar")
assert await modclient.json().get("foo") == "bar"
assert await modclient.json().get("baz") is None
assert await modclient.json().delete("foo") == 1
assert await modclient.json().forget("foo") == 0 # second delete
assert await modclient.exists("foo") == 0
@pytest.mark.redismod
async def test_jsonget(modclient: redis.Redis):
await modclient.json().set("foo", Path.root_path(), "bar")
assert await modclient.json().get("foo") == "bar"
@pytest.mark.redismod
async def test_json_get_jset(modclient: redis.Redis):
assert await modclient.json().set("foo", Path.root_path(), "bar")
assert "bar" == await modclient.json().get("foo")
assert await modclient.json().get("baz") is None
assert 1 == await modclient.json().delete("foo")
assert await modclient.exists("foo") == 0
@pytest.mark.redismod
async def test_nonascii_setgetdelete(modclient: redis.Redis):
assert await modclient.json().set("notascii", Path.root_path(), "hyvää-élève")
assert "hyvää-élève" == await modclient.json().get("notascii", no_escape=True)
assert 1 == await modclient.json().delete("notascii")
assert await modclient.exists("notascii") == 0
@pytest.mark.redismod
async def test_jsonsetexistentialmodifiersshouldsucceed(modclient: redis.Redis):
obj = {"foo": "bar"}
assert await modclient.json().set("obj", Path.root_path(), obj)
# Test that flags prevent updates when conditions are unmet
assert await modclient.json().set("obj", Path("foo"), "baz", nx=True) is None
assert await modclient.json().set("obj", Path("qaz"), "baz", xx=True) is None
# Test that flags allow updates when conditions are met
assert await modclient.json().set("obj", Path("foo"), "baz", xx=True)
assert await modclient.json().set("obj", Path("qaz"), "baz", nx=True)
# Test that flags are mutually exlusive
with pytest.raises(Exception):
await modclient.json().set("obj", Path("foo"), "baz", nx=True, xx=True)
@pytest.mark.redismod
async def test_mgetshouldsucceed(modclient: redis.Redis):
await modclient.json().set("1", Path.root_path(), 1)
await modclient.json().set("2", Path.root_path(), 2)
assert await modclient.json().mget(["1"], Path.root_path()) == [1]
assert await modclient.json().mget([1, 2], Path.root_path()) == [1, 2]
@pytest.mark.redismod
@skip_ifmodversion_lt("99.99.99", "ReJSON") # todo: update after the release
async def test_clear(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 1 == await modclient.json().clear("arr", Path.root_path())
assert [] == await modclient.json().get("arr")
@pytest.mark.redismod
async def test_type(modclient: redis.Redis):
await modclient.json().set("1", Path.root_path(), 1)
assert "integer" == await modclient.json().type("1", Path.root_path())
assert "integer" == await modclient.json().type("1")
@pytest.mark.redismod
async def test_numincrby(modclient):
await modclient.json().set("num", Path.root_path(), 1)
assert 2 == await modclient.json().numincrby("num", Path.root_path(), 1)
assert 2.5 == await modclient.json().numincrby("num", Path.root_path(), 0.5)
assert 1.25 == await modclient.json().numincrby("num", Path.root_path(), -1.25)
@pytest.mark.redismod
async def test_nummultby(modclient: redis.Redis):
await modclient.json().set("num", Path.root_path(), 1)
with pytest.deprecated_call():
assert 2 == await modclient.json().nummultby("num", Path.root_path(), 2)
assert 5 == await modclient.json().nummultby("num", Path.root_path(), 2.5)
assert 2.5 == await modclient.json().nummultby("num", Path.root_path(), 0.5)
@pytest.mark.redismod
@skip_ifmodversion_lt("99.99.99", "ReJSON") # todo: update after the release
async def test_toggle(modclient: redis.Redis):
await modclient.json().set("bool", Path.root_path(), False)
assert await modclient.json().toggle("bool", Path.root_path())
assert await modclient.json().toggle("bool", Path.root_path()) is False
# check non-boolean value
await modclient.json().set("num", Path.root_path(), 1)
with pytest.raises(exceptions.ResponseError):
await modclient.json().toggle("num", Path.root_path())
@pytest.mark.redismod
async def test_strappend(modclient: redis.Redis):
await modclient.json().set("jsonkey", Path.root_path(), "foo")
assert 6 == await modclient.json().strappend("jsonkey", "bar")
assert "foobar" == await modclient.json().get("jsonkey", Path.root_path())
@pytest.mark.redismod
async def test_strlen(modclient: redis.Redis):
await modclient.json().set("str", Path.root_path(), "foo")
assert 3 == await modclient.json().strlen("str", Path.root_path())
await modclient.json().strappend("str", "bar", Path.root_path())
assert 6 == await modclient.json().strlen("str", Path.root_path())
assert 6 == await modclient.json().strlen("str")
@pytest.mark.redismod
async def test_arrappend(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [1])
assert 2 == await modclient.json().arrappend("arr", Path.root_path(), 2)
assert 4 == await modclient.json().arrappend("arr", Path.root_path(), 3, 4)
assert 7 == await modclient.json().arrappend("arr", Path.root_path(), *[5, 6, 7])
@pytest.mark.redismod
async def test_arrindex(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 1 == await modclient.json().arrindex("arr", Path.root_path(), 1)
assert -1 == await modclient.json().arrindex("arr", Path.root_path(), 1, 2)
@pytest.mark.redismod
async def test_arrinsert(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [0, 4])
assert 5 - -await modclient.json().arrinsert("arr", Path.root_path(), 1, *[1, 2, 3])
assert [0, 1, 2, 3, 4] == await modclient.json().get("arr")
# test prepends
await modclient.json().set("val2", Path.root_path(), [5, 6, 7, 8, 9])
await modclient.json().arrinsert("val2", Path.root_path(), 0, ["some", "thing"])
assert await modclient.json().get("val2") == [["some", "thing"], 5, 6, 7, 8, 9]
@pytest.mark.redismod
async def test_arrlen(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 5 == await modclient.json().arrlen("arr", Path.root_path())
assert 5 == await modclient.json().arrlen("arr")
assert await modclient.json().arrlen("fakekey") is None
@pytest.mark.redismod
async def test_arrpop(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 4 == await modclient.json().arrpop("arr", Path.root_path(), 4)
assert 3 == await modclient.json().arrpop("arr", Path.root_path(), -1)
assert 2 == await modclient.json().arrpop("arr", Path.root_path())
assert 0 == await modclient.json().arrpop("arr", Path.root_path(), 0)
assert [1] == await modclient.json().get("arr")
# test out of bounds
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 4 == await modclient.json().arrpop("arr", Path.root_path(), 99)
# none test
await modclient.json().set("arr", Path.root_path(), [])
assert await modclient.json().arrpop("arr") is None
@pytest.mark.redismod
async def test_arrtrim(modclient: redis.Redis):
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 3 == await modclient.json().arrtrim("arr", Path.root_path(), 1, 3)
assert [1, 2, 3] == await modclient.json().get("arr")
# <0 test, should be 0 equivalent
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 0 == await modclient.json().arrtrim("arr", Path.root_path(), -1, 3)
# testing stop > end
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 2 == await modclient.json().arrtrim("arr", Path.root_path(), 3, 99)
# start > array size and stop
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 0 == await modclient.json().arrtrim("arr", Path.root_path(), 9, 1)
# all larger
await modclient.json().set("arr", Path.root_path(), [0, 1, 2, 3, 4])
assert 0 == await modclient.json().arrtrim("arr", Path.root_path(), 9, 11)
@pytest.mark.redismod
async def test_resp(modclient: redis.Redis):
obj = {"foo": "bar", "baz": 1, "qaz": True}
await modclient.json().set("obj", Path.root_path(), obj)
assert "bar" == await modclient.json().resp("obj", Path("foo"))
assert 1 == await modclient.json().resp("obj", Path("baz"))
assert await modclient.json().resp("obj", Path("qaz"))
assert isinstance(await modclient.json().resp("obj"), list)
@pytest.mark.redismod
async def test_objkeys(modclient: redis.Redis):
obj = {"foo": "bar", "baz": "qaz"}
await modclient.json().set("obj", Path.root_path(), obj)
keys = await modclient.json().objkeys("obj", Path.root_path())
keys.sort()
exp = list(obj.keys())
exp.sort()
assert exp == keys
await modclient.json().set("obj", Path.root_path(), obj)
keys = await modclient.json().objkeys("obj")
assert keys == list(obj.keys())
assert await modclient.json().objkeys("fakekey") is None
@pytest.mark.redismod
async def test_objlen(modclient: redis.Redis):
obj = {"foo": "bar", "baz": "qaz"}
await modclient.json().set("obj", Path.root_path(), obj)
assert len(obj) == await modclient.json().objlen("obj", Path.root_path())
await modclient.json().set("obj", Path.root_path(), obj)
assert len(obj) == await modclient.json().objlen("obj")
# @pytest.mark.redismod
# async def test_json_commands_in_pipeline(modclient: redis.Redis):
# async with modclient.json().pipeline() as p:
# p.set("foo", Path.root_path(), "bar")
# p.get("foo")
# p.delete("foo")
# assert [True, "bar", 1] == await p.execute()
# assert await modclient.keys() == []
# assert await modclient.get("foo") is None
# # now with a true, json object
# await modclient.flushdb()
# p = await modclient.json().pipeline()
# d = {"hello": "world", "oh": "snap"}
# with pytest.deprecated_call():
# p.jsonset("foo", Path.root_path(), d)
# p.jsonget("foo")
# p.exists("notarealkey")
# p.delete("foo")
# assert [True, d, 0, 1] == p.execute()
# assert await modclient.keys() == []
# assert await modclient.get("foo") is None
@pytest.mark.redismod
async def test_json_delete_with_dollar(modclient: redis.Redis):
doc1 = {"a": 1, "nested": {"a": 2, "b": 3}}
assert await modclient.json().set("doc1", "$", doc1)
assert await modclient.json().delete("doc1", "$..a") == 2
r = await modclient.json().get("doc1", "$")
assert r == [{"nested": {"b": 3}}]
doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}}
assert await modclient.json().set("doc2", "$", doc2)
assert await modclient.json().delete("doc2", "$..a") == 1
res = await modclient.json().get("doc2", "$")
assert res == [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}]
doc3 = [
{
"ciao": ["non ancora"],
"nested": [
{"ciao": [1, "a"]},
{"ciao": [2, "a"]},
{"ciaoc": [3, "non", "ciao"]},
{"ciao": [4, "a"]},
{"e": [5, "non", "ciao"]},
],
}
]
assert await modclient.json().set("doc3", "$", doc3)
assert await modclient.json().delete("doc3", '$.[0]["nested"]..ciao') == 3
doc3val = [
[
{
"ciao": ["non ancora"],
"nested": [
{},
{},
{"ciaoc": [3, "non", "ciao"]},
{},
{"e": [5, "non", "ciao"]},
],
}
]
]
res = await modclient.json().get("doc3", "$")
assert res == doc3val
# Test async default path
assert await modclient.json().delete("doc3") == 1
assert await modclient.json().get("doc3", "$") is None
await modclient.json().delete("not_a_document", "..a")
@pytest.mark.redismod
async def test_json_forget_with_dollar(modclient: redis.Redis):
doc1 = {"a": 1, "nested": {"a": 2, "b": 3}}
assert await modclient.json().set("doc1", "$", doc1)
assert await modclient.json().forget("doc1", "$..a") == 2
r = await modclient.json().get("doc1", "$")
assert r == [{"nested": {"b": 3}}]
doc2 = {"a": {"a": 2, "b": 3}, "b": ["a", "b"], "nested": {"b": [True, "a", "b"]}}
assert await modclient.json().set("doc2", "$", doc2)
assert await modclient.json().forget("doc2", "$..a") == 1
res = await modclient.json().get("doc2", "$")
assert res == [{"nested": {"b": [True, "a", "b"]}, "b": ["a", "b"]}]
doc3 = [
{
"ciao": ["non ancora"],
"nested": [
{"ciao": [1, "a"]},
{"ciao": [2, "a"]},
{"ciaoc": [3, "non", "ciao"]},
{"ciao": [4, "a"]},
{"e": [5, "non", "ciao"]},
],
}
]
assert await modclient.json().set("doc3", "$", doc3)
assert await modclient.json().forget("doc3", '$.[0]["nested"]..ciao') == 3
doc3val = [
[
{
"ciao": ["non ancora"],
"nested": [
{},
{},
{"ciaoc": [3, "non", "ciao"]},
{},
{"e": [5, "non", "ciao"]},
],
}
]
]
res = await modclient.json().get("doc3", "$")
assert res == doc3val
# Test async default path
assert await modclient.json().forget("doc3") == 1
assert await modclient.json().get("doc3", "$") is None
await modclient.json().forget("not_a_document", "..a")
@pytest.mark.redismod
async def test_json_mget_dollar(modclient: redis.Redis):
# Test mget with multi paths
await modclient.json().set(
"doc1",
"$",
{"a": 1, "b": 2, "nested": {"a": 3}, "c": None, "nested2": {"a": None}},
)
await modclient.json().set(
"doc2",
"$",
{"a": 4, "b": 5, "nested": {"a": 6}, "c": None, "nested2": {"a": [None]}},
)
# Compare also to single JSON.GET
assert await modclient.json().get("doc1", "$..a") == [1, 3, None]
assert await modclient.json().get("doc2", "$..a") == [4, 6, [None]]
# Test mget with single path
await modclient.json().mget("doc1", "$..a") == [1, 3, None]
# Test mget with multi path
res = await modclient.json().mget(["doc1", "doc2"], "$..a")
assert res == [[1, 3, None], [4, 6, [None]]]
# Test missing key
res = await modclient.json().mget(["doc1", "missing_doc"], "$..a")
assert res == [[1, 3, None], None]
res = await modclient.json().mget(["missing_doc1", "missing_doc2"], "$..a")
assert res == [None, None]
@pytest.mark.redismod
async def test_numby_commands_dollar(modclient: redis.Redis):
# Test NUMINCRBY
await modclient.json().set(
"doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}
)
# Test multi
assert await modclient.json().numincrby("doc1", "$..a", 2) == [None, 4, 7.0, None]
res = await modclient.json().numincrby("doc1", "$..a", 2.5)
assert res == [None, 6.5, 9.5, None]
# Test single
assert await modclient.json().numincrby("doc1", "$.b[1].a", 2) == [11.5]
assert await modclient.json().numincrby("doc1", "$.b[2].a", 2) == [None]
assert await modclient.json().numincrby("doc1", "$.b[1].a", 3.5) == [15.0]
# Test NUMMULTBY
await modclient.json().set(
"doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}
)
# test list
with pytest.deprecated_call():
res = await modclient.json().nummultby("doc1", "$..a", 2)
assert res == [None, 4, 10, None]
res = await modclient.json().nummultby("doc1", "$..a", 2.5)
assert res == [None, 10.0, 25.0, None]
# Test single
with pytest.deprecated_call():
assert await modclient.json().nummultby("doc1", "$.b[1].a", 2) == [50.0]
assert await modclient.json().nummultby("doc1", "$.b[2].a", 2) == [None]
assert await modclient.json().nummultby("doc1", "$.b[1].a", 3) == [150.0]
# test missing keys
with pytest.raises(exceptions.ResponseError):
await modclient.json().numincrby("non_existing_doc", "$..a", 2)
await modclient.json().nummultby("non_existing_doc", "$..a", 2)
# Test legacy NUMINCRBY
await modclient.json().set(
"doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}
)
await modclient.json().numincrby("doc1", ".b[0].a", 3) == 5
# Test legacy NUMMULTBY
await modclient.json().set(
"doc1", "$", {"a": "b", "b": [{"a": 2}, {"a": 5.0}, {"a": "c"}]}
)
with pytest.deprecated_call():
await modclient.json().nummultby("doc1", ".b[0].a", 3) == 6
@pytest.mark.redismod
async def test_strappend_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1", "$", {"a": "foo", "nested1": {"a": "hello"}, "nested2": {"a": 31}}
)
# Test multi
await modclient.json().strappend("doc1", "bar", "$..a") == [6, 8, None]
await modclient.json().get("doc1", "$") == [
{"a": "foobar", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}
]
# Test single
await modclient.json().strappend("doc1", "baz", "$.nested1.a") == [11]
await modclient.json().get("doc1", "$") == [
{"a": "foobar", "nested1": {"a": "hellobarbaz"}, "nested2": {"a": 31}}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().strappend("non_existing_doc", "$..a", "err")
# Test multi
await modclient.json().strappend("doc1", "bar", ".*.a") == 8
await modclient.json().get("doc1", "$") == [
{"a": "foo", "nested1": {"a": "hellobar"}, "nested2": {"a": 31}}
]
# Test missing path
with pytest.raises(exceptions.ResponseError):
await modclient.json().strappend("doc1", "piu")
@pytest.mark.redismod
async def test_strlen_dollar(modclient: redis.Redis):
# Test multi
await modclient.json().set(
"doc1", "$", {"a": "foo", "nested1": {"a": "hello"}, "nested2": {"a": 31}}
)
assert await modclient.json().strlen("doc1", "$..a") == [3, 5, None]
res2 = await modclient.json().strappend("doc1", "bar", "$..a")
res1 = await modclient.json().strlen("doc1", "$..a")
assert res1 == res2
# Test single
await modclient.json().strlen("doc1", "$.nested1.a") == [8]
await modclient.json().strlen("doc1", "$.nested2.a") == [None]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().strlen("non_existing_doc", "$..a")
@pytest.mark.redismod
async def test_arrappend_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi
await modclient.json().arrappend("doc1", "$..a", "bar", "racuda") == [3, 5, None]
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo", "bar", "racuda"],
"nested1": {"a": ["hello", None, "world", "bar", "racuda"]},
"nested2": {"a": 31},
}
]
# Test single
assert await modclient.json().arrappend("doc1", "$.nested1.a", "baz") == [6]
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo", "bar", "racuda"],
"nested1": {"a": ["hello", None, "world", "bar", "racuda", "baz"]},
"nested2": {"a": 31},
}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrappend("non_existing_doc", "$..a")
# Test legacy
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi (all paths are updated, but return result of last path)
assert await modclient.json().arrappend("doc1", "..a", "bar", "racuda") == 5
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo", "bar", "racuda"],
"nested1": {"a": ["hello", None, "world", "bar", "racuda"]},
"nested2": {"a": 31},
}
]
# Test single
assert await modclient.json().arrappend("doc1", ".nested1.a", "baz") == 6
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo", "bar", "racuda"],
"nested1": {"a": ["hello", None, "world", "bar", "racuda", "baz"]},
"nested2": {"a": 31},
}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrappend("non_existing_doc", "$..a")
@pytest.mark.redismod
async def test_arrinsert_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi
res = await modclient.json().arrinsert("doc1", "$..a", "1", "bar", "racuda")
assert res == [3, 5, None]
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo", "bar", "racuda"],
"nested1": {"a": ["hello", "bar", "racuda", None, "world"]},
"nested2": {"a": 31},
}
]
# Test single
assert await modclient.json().arrinsert("doc1", "$.nested1.a", -2, "baz") == [6]
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo", "bar", "racuda"],
"nested1": {"a": ["hello", "bar", "racuda", "baz", None, "world"]},
"nested2": {"a": 31},
}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrappend("non_existing_doc", "$..a")
@pytest.mark.redismod
async def test_arrlen_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi
assert await modclient.json().arrlen("doc1", "$..a") == [1, 3, None]
res = await modclient.json().arrappend("doc1", "$..a", "non", "abba", "stanza")
assert res == [4, 6, None]
await modclient.json().clear("doc1", "$.a")
assert await modclient.json().arrlen("doc1", "$..a") == [0, 6, None]
# Test single
assert await modclient.json().arrlen("doc1", "$.nested1.a") == [6]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrappend("non_existing_doc", "$..a")
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi (return result of last path)
assert await modclient.json().arrlen("doc1", "$..a") == [1, 3, None]
assert await modclient.json().arrappend("doc1", "..a", "non", "abba", "stanza") == 6
# Test single
assert await modclient.json().arrlen("doc1", ".nested1.a") == 6
# Test missing key
assert await modclient.json().arrlen("non_existing_doc", "..a") is None
@pytest.mark.redismod
async def test_arrpop_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# # # Test multi
assert await modclient.json().arrpop("doc1", "$..a", 1) == ['"foo"', None, None]
assert await modclient.json().get("doc1", "$") == [
{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrpop("non_existing_doc", "..a")
# # Test legacy
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi (all paths are updated, but return result of last path)
await modclient.json().arrpop("doc1", "..a", "1") is None
assert await modclient.json().get("doc1", "$") == [
{"a": [], "nested1": {"a": ["hello", "world"]}, "nested2": {"a": 31}}
]
# # Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrpop("non_existing_doc", "..a")
@pytest.mark.redismod
async def test_arrtrim_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi
assert await modclient.json().arrtrim("doc1", "$..a", "1", -1) == [0, 2, None]
assert await modclient.json().get("doc1", "$") == [
{"a": [], "nested1": {"a": [None, "world"]}, "nested2": {"a": 31}}
]
assert await modclient.json().arrtrim("doc1", "$..a", "1", "1") == [0, 1, None]
assert await modclient.json().get("doc1", "$") == [
{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}
]
# Test single
assert await modclient.json().arrtrim("doc1", "$.nested1.a", 1, 0) == [0]
assert await modclient.json().get("doc1", "$") == [
{"a": [], "nested1": {"a": []}, "nested2": {"a": 31}}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrtrim("non_existing_doc", "..a", "0", 1)
# Test legacy
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": ["hello", None, "world"]},
"nested2": {"a": 31},
},
)
# Test multi (all paths are updated, but return result of last path)
assert await modclient.json().arrtrim("doc1", "..a", "1", "-1") == 2
# Test single
assert await modclient.json().arrtrim("doc1", ".nested1.a", "1", "1") == 1
assert await modclient.json().get("doc1", "$") == [
{"a": [], "nested1": {"a": ["world"]}, "nested2": {"a": 31}}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().arrtrim("non_existing_doc", "..a", 1, 1)
@pytest.mark.redismod
async def test_objkeys_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"nested1": {"a": {"foo": 10, "bar": 20}},
"a": ["foo"],
"nested2": {"a": {"baz": 50}},
},
)
# Test single
assert await modclient.json().objkeys("doc1", "$.nested1.a") == [["foo", "bar"]]
# Test legacy
assert await modclient.json().objkeys("doc1", ".*.a") == ["foo", "bar"]
# Test single
assert await modclient.json().objkeys("doc1", ".nested2.a") == ["baz"]
# Test missing key
assert await modclient.json().objkeys("non_existing_doc", "..a") is None
# Test non existing doc
with pytest.raises(exceptions.ResponseError):
assert await modclient.json().objkeys("non_existing_doc", "$..a") == []
assert await modclient.json().objkeys("doc1", "$..nowhere") == []
@pytest.mark.redismod
async def test_objlen_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"nested1": {"a": {"foo": 10, "bar": 20}},
"a": ["foo"],
"nested2": {"a": {"baz": 50}},
},
)
# Test multi
assert await modclient.json().objlen("doc1", "$..a") == [None, 2, 1]
# Test single
assert await modclient.json().objlen("doc1", "$.nested1.a") == [2]
# Test missing key, and path
with pytest.raises(exceptions.ResponseError):
await modclient.json().objlen("non_existing_doc", "$..a")
assert await modclient.json().objlen("doc1", "$.nowhere") == []
# Test legacy
assert await modclient.json().objlen("doc1", ".*.a") == 2
# Test single
assert await modclient.json().objlen("doc1", ".nested2.a") == 1
# Test missing key
assert await modclient.json().objlen("non_existing_doc", "..a") is None
# Test missing path
# with pytest.raises(exceptions.ResponseError):
await modclient.json().objlen("doc1", ".nowhere")
@pytest.mark.redismod
def load_types_data(nested_key_name):
td = {
"object": {},
"array": [],
"string": "str",
"integer": 42,
"number": 1.2,
"boolean": False,
"null": None,
}
jdata = {}
types = []
for i, (k, v) in zip(range(1, len(td) + 1), iter(td.items())):
jdata["nested" + str(i)] = {nested_key_name: v}
types.append(k)
return jdata, types
@pytest.mark.redismod
async def test_type_dollar(modclient: redis.Redis):
jdata, jtypes = load_types_data("a")
await modclient.json().set("doc1", "$", jdata)
# Test multi
assert await modclient.json().type("doc1", "$..a") == jtypes
# Test single
assert await modclient.json().type("doc1", "$.nested2.a") == [jtypes[1]]
# Test missing key
assert await modclient.json().type("non_existing_doc", "..a") is None
@pytest.mark.redismod
async def test_clear_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"nested1": {"a": {"foo": 10, "bar": 20}},
"a": ["foo"],
"nested2": {"a": "claro"},
"nested3": {"a": {"baz": 50}},
},
)
# Test multi
assert await modclient.json().clear("doc1", "$..a") == 3
assert await modclient.json().get("doc1", "$") == [
{"nested1": {"a": {}}, "a": [], "nested2": {"a": "claro"}, "nested3": {"a": {}}}
]
# Test single
await modclient.json().set(
"doc1",
"$",
{
"nested1": {"a": {"foo": 10, "bar": 20}},
"a": ["foo"],
"nested2": {"a": "claro"},
"nested3": {"a": {"baz": 50}},
},
)
assert await modclient.json().clear("doc1", "$.nested1.a") == 1
assert await modclient.json().get("doc1", "$") == [
{
"nested1": {"a": {}},
"a": ["foo"],
"nested2": {"a": "claro"},
"nested3": {"a": {"baz": 50}},
}
]
# Test missing path (async defaults to root)
assert await modclient.json().clear("doc1") == 1
assert await modclient.json().get("doc1", "$") == [{}]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().clear("non_existing_doc", "$..a")
@pytest.mark.redismod
async def test_toggle_dollar(modclient: redis.Redis):
await modclient.json().set(
"doc1",
"$",
{
"a": ["foo"],
"nested1": {"a": False},
"nested2": {"a": 31},
"nested3": {"a": True},
},
)
# Test multi
assert await modclient.json().toggle("doc1", "$..a") == [None, 1, None, 0]
assert await modclient.json().get("doc1", "$") == [
{
"a": ["foo"],
"nested1": {"a": True},
"nested2": {"a": 31},
"nested3": {"a": False},
}
]
# Test missing key
with pytest.raises(exceptions.ResponseError):
await modclient.json().toggle("non_existing_doc", "$..a")
| mit | 9341c126d2d1fa45939fa5990225e25c | 33.137317 | 88 | 0.544478 | 3.211419 | false | true | false | false |
gwu-libraries/sfm-ui | sfm/ui/forms.py | 1 | 59182 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.models import Group
from django.forms.widgets import DateTimeInput
from django.urls import reverse
from django.utils import timezone
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator
from django.conf import settings
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, Button, Submit, Div, HTML, Field
from crispy_forms.bootstrap import FormActions
from .models import CollectionSet, Collection, Seed, Credential, Export, User
from .utils import clean_token, clean_blogname
import json
import logging
import math
import re
log = logging.getLogger(__name__)
HISTORY_NOTE_LABEL = "Change Note"
HISTORY_NOTE_HELP = "Explain why you made these changes at this time."
HISTORY_NOTE_HELP_ADD = "Further information about this addition."
HISTORY_NOTE_WIDGET = forms.Textarea(attrs={'rows': 4})
SCHEDULE_HELP = "How frequently you want data to be retrieved."
INCREMENTAL_LABEL = "Incremental harvest"
INCREMENTAL_HELP = "Only collect new items since the last data retrieval."
GROUP_HELP = "Your default group is your username, unless the SFM team has added you to another group."
class CollectionSetForm(forms.ModelForm):
group = forms.ModelChoiceField(queryset=None)
class Meta:
model = CollectionSet
fields = ['name', 'description', 'group', 'history_note']
exclude = []
widgets = {
'history_note': HISTORY_NOTE_WIDGET
}
localized_fields = None
labels = {
'history_note': HISTORY_NOTE_LABEL
}
help_texts = {
'history_note': HISTORY_NOTE_HELP,
}
error_messages = {}
def __init__(self, *args, **kwargs):
request = kwargs.pop('request')
super(CollectionSetForm, self).__init__(*args, **kwargs)
# limiting groups in dropdown to user's and setting default if only 1 value.
group_queryset = Group.objects.filter(pk__in=request.user.groups.all())
if len(group_queryset) == 1:
self.initial['group'] = group_queryset[0]
self.fields['group'].queryset = group_queryset
self.fields['group'].help_text = GROUP_HELP
# check whether it's a CreateView and offer different help text
if self.instance.pk is None:
self.fields['history_note'].help_text = HISTORY_NOTE_HELP_ADD
# set up crispy forms helper
self.helper = FormHelper(self)
self.helper.layout = Layout(
Fieldset(
'',
'name',
'description',
'group',
'history_note',
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Save'),
Button('cancel', 'Cancel', onclick="window.history.back()")
)
)
class NameModelChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.name
class BaseCollectionForm(forms.ModelForm):
credential = NameModelChoiceField(None)
link = forms.URLField(required=False, label="Public link",
help_text="Link to a public version of this collection, e.g., in a data repository.")
class Meta:
model = Collection
fields = ['name', 'description', 'link', 'collection_set', 'visibility',
'schedule_minutes', 'credential', 'end_date',
'history_note']
exclude = []
widgets = {'collection_set': forms.HiddenInput,
'history_note': HISTORY_NOTE_WIDGET}
labels = {
'history_note': HISTORY_NOTE_LABEL,
'visibility': 'Sharing'
}
help_texts = {
'history_note': HISTORY_NOTE_HELP,
'schedule_minutes': SCHEDULE_HELP
}
error_messages = {}
def __init__(self, *args, **kwargs):
self.coll = kwargs.pop("coll", None)
self.credential_list = kwargs.pop('credential_list', None)
super(BaseCollectionForm, self).__init__(*args, **kwargs)
# Set default if only 1 value.
if self.credential_list and self.credential_list.count() == 1:
self.initial['credential'] = self.credential_list[0]
self.fields['credential'].queryset = self.credential_list
# check whether it's a create view and offer different help text
if self.instance.pk is None:
self.fields['history_note'].help_text = HISTORY_NOTE_HELP_ADD
cancel_url = reverse('collection_set_detail', args=[self.coll])
self.helper = FormHelper(self)
self.helper.layout = Layout(
Fieldset(
'',
'name',
'description',
'link',
'credential',
Div(css_id='credential_warning'),
Div(),
'schedule_minutes',
Field('end_date', css_class='datepicker'),
'collection_set',
'visibility',
'history_note',
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Save'),
Button('cancel', 'Cancel',
onclick="window.location.href='{0}'".format(cancel_url))
)
)
def clean_end_date(self):
data = self.cleaned_data.get('end_date', None)
if data:
if data < timezone.now():
raise forms.ValidationError(
'End date must be later than current date and time.')
return data
class CollectionTwitterUserTimelineForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
deleted_accounts_option = forms.BooleanField(initial=False, required=False, label="Automatically delete seeds "
"for deleted / not found "
"accounts.")
suspended_accounts_option = forms.BooleanField(initial=False, required=False, label="Automatically delete seeds "
"for suspended accounts.")
protected_accounts_options = forms.BooleanField(initial=False, required=False, label="Automatically delete seeds "
"for protected accounts.")
def __init__(self, *args, **kwargs):
super(CollectionTwitterUserTimelineForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',
'deleted_accounts_option', 'suspended_accounts_option',
'protected_accounts_options'))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
if "deactivate_not_found_seeds" in harvest_options:
self.fields['deleted_accounts_option'].initial = harvest_options["deactivate_not_found_seeds"]
if "deactivate_unauthorized_seeds" in harvest_options:
self.fields['protected_accounts_options'].initial = harvest_options["deactivate_unauthorized_seeds"]
if "deactivate_suspended_seeds" in harvest_options:
self.fields['suspended_accounts_option'].initial = harvest_options["deactivate_suspended_seeds"]
def save(self, commit=True):
m = super(CollectionTwitterUserTimelineForm, self).save(commit=False)
m.harvest_type = Collection.TWITTER_USER_TIMELINE
harvest_options = {
"incremental": self.cleaned_data["incremental"],
"deactivate_not_found_seeds": self.cleaned_data["deleted_accounts_option"],
"deactivate_unauthorized_seeds": self.cleaned_data["protected_accounts_options"],
"deactivate_suspended_seeds": self.cleaned_data["suspended_accounts_option"]
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionTwitterUserTimeline2Form(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
deleted_accounts_option = forms.BooleanField(initial=False, required=False, label="Automatically delete seeds "
"for deleted / not found "
"accounts.")
suspended_accounts_option = forms.BooleanField(initial=False, required=False, label="Automatically delete seeds "
"for suspended accounts.")
protected_accounts_options = forms.BooleanField(initial=False, required=False, label="Automatically delete seeds "
"for protected accounts.")
def __init__(self, *args, **kwargs):
super(CollectionTwitterUserTimeline2Form, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',
'deleted_accounts_option', 'suspended_accounts_option',
'protected_accounts_options'))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
if "deactivate_not_found_seeds" in harvest_options:
self.fields['deleted_accounts_option'].initial = harvest_options["deactivate_not_found_seeds"]
if "deactivate_unauthorized_seeds" in harvest_options:
self.fields['protected_accounts_options'].initial = harvest_options["deactivate_unauthorized_seeds"]
if "deactivate_suspended_seeds" in harvest_options:
self.fields['suspended_accounts_option'].initial = harvest_options["deactivate_suspended_seeds"]
def save(self, commit=True):
m = super(CollectionTwitterUserTimeline2Form, self).save(commit=False)
m.harvest_type = Collection.TWITTER_USER_TIMELINE_2
harvest_options = {
"incremental": self.cleaned_data["incremental"],
"deactivate_not_found_seeds": self.cleaned_data["deleted_accounts_option"],
"deactivate_unauthorized_seeds": self.cleaned_data["protected_accounts_options"],
"deactivate_suspended_seeds": self.cleaned_data["suspended_accounts_option"]
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionTwitterSearchForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
def __init__(self, *args, **kwargs):
super(CollectionTwitterSearchForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionTwitterSearchForm, self).save(commit=False)
m.harvest_type = Collection.TWITTER_SEARCH
harvest_options = {
"incremental": self.cleaned_data["incremental"],
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionTwitterSearch2Form(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
def __init__(self, *args, **kwargs):
super(CollectionTwitterSearch2Form, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionTwitterSearch2Form, self).save(commit=False)
m.harvest_type = Collection.TWITTER_SEARCH_2
harvest_options = {
"incremental": self.cleaned_data["incremental"],
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionTwitterAcademicSearchForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
def __init__(self, *args, **kwargs):
super(CollectionTwitterAcademicSearchForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionTwitterAcademicSearchForm, self).save(commit=False)
m.harvest_type = Collection.TWITTER_ACADEMIC_SEARCH
harvest_options = {
"incremental": self.cleaned_data["incremental"],
"twitter_academic_search": True
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionTwitterSampleForm(BaseCollectionForm):
class Meta(BaseCollectionForm.Meta):
exclude = ('schedule_minutes',)
def __init__(self, *args, **kwargs):
super(CollectionTwitterSampleForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
m = super(CollectionTwitterSampleForm, self).save(commit=False)
m.harvest_type = Collection.TWITTER_SAMPLE
m.schedule_minutes = None
m.save()
return m
class CollectionTwitterFilterForm(BaseCollectionForm):
class Meta(BaseCollectionForm.Meta):
exclude = ('schedule_minutes',)
def __init__(self, *args, **kwargs):
super(CollectionTwitterFilterForm, self).__init__(*args, **kwargs)
def save(self, commit=True):
m = super(CollectionTwitterFilterForm, self).save(commit=False)
m.harvest_type = Collection.TWITTER_FILTER
m.schedule_minutes = None
m.save()
return m
class CollectionFlickrUserForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
def __init__(self, *args, **kwargs):
super(CollectionFlickrUserForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionFlickrUserForm, self).save(commit=False)
m.harvest_type = Collection.FLICKR_USER
harvest_options = {
"incremental": self.cleaned_data["incremental"],
}
m.harvest_options = json.dumps(harvest_options)
m.save()
return m
class CollectionWeiboTimelineForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
def __init__(self, *args, **kwargs):
super(CollectionWeiboTimelineForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionWeiboTimelineForm, self).save(commit=False)
m.harvest_type = Collection.WEIBO_TIMELINE
harvest_options = {
"incremental": self.cleaned_data["incremental"],
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionWeiboSearchForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, help_text=INCREMENTAL_HELP, label=INCREMENTAL_LABEL)
def __init__(self, *args, **kwargs):
super(CollectionWeiboSearchForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionWeiboSearchForm, self).save(commit=False)
m.harvest_type = Collection.WEIBO_SEARCH
harvest_options = {
"incremental": self.cleaned_data["incremental"],
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class CollectionTumblrBlogPostsForm(BaseCollectionForm):
incremental = forms.BooleanField(initial=True, required=False, label=INCREMENTAL_LABEL, help_text=INCREMENTAL_HELP)
def __init__(self, *args, **kwargs):
super(CollectionTumblrBlogPostsForm, self).__init__(*args, **kwargs)
self.helper.layout[0][5].extend(('incremental',))
if self.instance and self.instance.harvest_options:
harvest_options = json.loads(self.instance.harvest_options)
if "incremental" in harvest_options:
self.fields['incremental'].initial = harvest_options["incremental"]
def save(self, commit=True):
m = super(CollectionTumblrBlogPostsForm, self).save(commit=False)
m.harvest_type = Collection.TUMBLR_BLOG_POSTS
harvest_options = {
"incremental": self.cleaned_data["incremental"],
}
m.harvest_options = json.dumps(harvest_options, sort_keys=True)
m.save()
return m
class BaseSeedForm(forms.ModelForm):
class Meta:
model = Seed
fields = ['collection',
'history_note']
exclude = []
widgets = {
'collection': forms.HiddenInput,
'history_note': HISTORY_NOTE_WIDGET
}
labels = {
'history_note': HISTORY_NOTE_LABEL
}
help_texts = {
'history_note': HISTORY_NOTE_HELP
}
def __init__(self, *args, **kwargs):
self.collection = kwargs.pop("collection", None)
# for createView and updateView
self.view_type = kwargs.pop("view_type", None)
# for updateView check the updates for the original token and uid
self.entry = kwargs.pop("entry", None)
super(BaseSeedForm, self).__init__(*args, **kwargs)
cancel_url = reverse('collection_detail', args=[self.collection])
# check whether it's a create view and offer different help text
if self.instance.pk is None:
self.fields['history_note'].help_text = HISTORY_NOTE_HELP_ADD
self.helper = FormHelper(self)
self.helper.layout = Layout(
Fieldset(
'',
Div(),
'history_note',
'collection',
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Save'),
Button('cancel', 'Cancel',
onclick="window.location.href='{0}'".format(cancel_url))
)
)
def clean_token(self):
token_val = self.cleaned_data.get("token")
return token_val.strip()
def clean_uid(self):
uid_val = self.cleaned_data.get("uid")
return uid_val.strip()
def clean(self):
fields = self._meta.fields
uid_val, token_val = '', ''
uid_label, token_label = '', ''
if "uid" in fields:
uid_val = self.cleaned_data.get("uid")
uid_label = self._meta.labels["uid"]
if "token" in fields:
token_val = self.cleaned_data.get("token")
token_label = self._meta.labels["token"]
# if has invalid error before, directly not check deep error
if self.errors:
return
# should not both empty if has token or uid fields, the twitter filter should deal with separately
if (uid_label or token_label) and (not uid_val and not token_val):
or_text = 'or' * (1 if uid_label and token_label else 0)
raise ValidationError(
u'One of the following fields is required :{} {} {}.'.format(token_label, or_text, uid_label))
# for the update view
if self.view_type == Seed.UPDATE_VIEW:
# check updated seeds exist in db if changes
# case insensitive match, and user can't add 'token:TeSt' or 'token:teSt', etc if 'token:test exist.',
# but can update to 'token:TeSt' or other.
if token_val.lower() != self.entry.token.lower() and \
token_val and Seed.objects.filter(collection=self.collection,
token__iexact=token_val).exists():
raise ValidationError(u'{}: {} already exist.'.format(token_label, token_val))
# check updated uid whether exist in db if changes
if uid_val.lower() != self.entry.uid.lower() and \
uid_val and Seed.objects.filter(collection=self.collection,
uid__iexact=uid_val).exists():
raise ValidationError(u'{}: {} already exist.'.format(uid_label, uid_val))
else:
if token_val and Seed.objects.filter(collection=self.collection, token__iexact=token_val).exists():
raise ValidationError(u'{}: {} already exist.'.format(token_label, token_val))
if uid_val and Seed.objects.filter(collection=self.collection, uid__iexact=uid_val).exists():
raise ValidationError(u'{}: {} already exist.'.format(uid_label, uid_val))
class SeedTwitterUserTimelineForm(BaseSeedForm):
class Meta(BaseSeedForm.Meta):
fields = ['token', 'uid']
fields.extend(BaseSeedForm.Meta.fields)
labels = dict(BaseSeedForm.Meta.labels)
labels["token"] = "Screen name"
labels["uid"] = "User id"
widgets = dict(BaseSeedForm.Meta.widgets)
widgets["token"] = forms.TextInput(attrs={'size': '40'})
widgets["uid"] = forms.TextInput(attrs={'size': '40'})
def __init__(self, *args, **kwargs):
super(SeedTwitterUserTimelineForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('token', 'uid'))
def clean_uid(self):
uid_val = self.cleaned_data.get("uid")
# check the format
if uid_val and not uid_val.isdigit():
raise ValidationError('Uid should be numeric.', code='invalid')
return uid_val
def clean_token(self):
token_val = clean_token(self.cleaned_data.get("token"))
token_val = token_val.split(" ")[0]
# check the format
if token_val and token_val.isdigit():
raise ValidationError('Screen name may not be numeric.', code='invalid')
return token_val
class SeedTwitterUserTimeline2Form(BaseSeedForm):
class Meta(BaseSeedForm.Meta):
fields = ['token', 'uid']
fields.extend(BaseSeedForm.Meta.fields)
labels = dict(BaseSeedForm.Meta.labels)
labels["token"] = "Screen name"
labels["uid"] = "User id"
widgets = dict(BaseSeedForm.Meta.widgets)
widgets["token"] = forms.TextInput(attrs={'size': '40'})
widgets["uid"] = forms.TextInput(attrs={'size': '40'})
def __init__(self, *args, **kwargs):
super(SeedTwitterUserTimeline2Form, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('token', 'uid'))
def clean_uid(self):
uid_val = self.cleaned_data.get("uid")
# check the format
if uid_val and not uid_val.isdigit():
raise ValidationError('Uid should be numeric.', code='invalid')
return uid_val
def clean_token(self):
token_val = clean_token(self.cleaned_data.get("token"))
token_val = token_val.split(" ")[0]
# check the format
if token_val and token_val.isdigit():
raise ValidationError('Screen name may not be numeric.', code='invalid')
return token_val
class SeedTwitterSearchForm(BaseSeedForm):
query = forms.CharField(required=False, widget=forms.Textarea(attrs={'rows': 4}),
help_text='See <a href="https://developer.twitter.com/en/docs/tweets/search/guides/'
'standard-operators" target="_blank">'
'these instructions</a> for writing a query. '
'Example: firefly OR "lightning bug"')
geocode = forms.CharField(required=False,
help_text='Geocode in the format latitude,longitude,radius. '
'Example: 38.899434,-77.036449,50mi')
def __init__(self, *args, **kwargs):
super(SeedTwitterSearchForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('query', 'geocode'))
if self.instance and self.instance.token:
try:
token = json.loads(self.instance.token)
# This except handling is for converting over old query tokens
except ValueError:
token = {'query': self.instance.token}
if 'query' in token:
self.fields['query'].initial = token['query']
if 'geocode' in token:
self.fields['geocode'].initial = token['geocode']
def clean_query(self):
query_val = self.cleaned_data.get("query")
return query_val.strip()
def clean_geocode(self):
geocode_val = self.cleaned_data.get("geocode")
return geocode_val.strip()
def clean(self):
# if do string strip in here, string ends an empty space, not sure why
query_val = self.cleaned_data.get("query")
geocode_val = self.cleaned_data.get("geocode")
# should not all be empty
if not query_val and not geocode_val:
raise ValidationError(u'One of the following fields is required: query, geocode.')
def save(self, commit=True):
m = super(SeedTwitterSearchForm, self).save(commit=False)
token = dict()
if self.cleaned_data['query']:
token['query'] = self.cleaned_data['query']
if self.cleaned_data['geocode']:
token['geocode'] = self.cleaned_data['geocode']
m.token = json.dumps(token, ensure_ascii=False)
m.save()
return m
class SeedTwitterSearch2Form(BaseSeedForm):
query = forms.CharField(required=True, widget=forms.Textarea(attrs={'rows': 4}),
help_text="See Twitter's <a href='https://developer.twitter.com/en/docs/twitter-api/tweets/counts/integrate/build-a-query' target='_blank'>instructions for building a query</a>. "
"Example: (happy OR happiness) lang:en -is:retweet")
start_time = forms.DateTimeField(required=False, help_text="Earliest date of tweets searched. Will be converted to UTC.", widget=DateTimeInput(attrs={'class': 'datepicker'}))
end_time= forms.DateTimeField(required=False, help_text="Most recent date of tweets searched. Will be converted to UTC.", widget=DateTimeInput(attrs={'class': 'datepicker'}))
limit = forms.IntegerField(required=False, validators=[MinValueValidator(1)], help_text="Maximum number of tweets to be retrieved. Will be rounded up to a multiple of 100. Limits are approximate; actual results may exceed the limit slightly.")
def __init__(self, *args, **kwargs):
super(SeedTwitterSearch2Form, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('query','start_time','end_time','limit'))
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
if 'query' in token:
self.fields['query'].initial = token['query']
if 'start_time' in token:
self.fields['start_time'].initial = token['start_time']
if 'end_time' in token:
self.fields['end_time'].initial = token['end_time']
if 'limit' in token:
self.fields['limit'].initial = token['limit']
def clean_query(self):
query_val = self.cleaned_data.get("query")
return query_val.strip()
def save(self, commit=True):
m = super(SeedTwitterSearch2Form, self).save(commit=False)
token = dict()
if self.cleaned_data['query']:
token['query'] = self.cleaned_data['query']
if self.cleaned_data['start_time']:
token['start_time'] = self.cleaned_data['start_time'].isoformat()
if self.cleaned_data['end_time']:
token['end_time'] = self.cleaned_data['end_time'].isoformat()
if self.cleaned_data['limit']:
limit = self.cleaned_data['limit']
token['limit'] = _round_up(limit)
m.token = json.dumps(token, ensure_ascii=False)
m.save()
return m
class SeedTwitterAcademicSearchForm(BaseSeedForm):
query = forms.CharField(required=False, widget=forms.Textarea(attrs={'rows': 4}),
help_text="See Twitter's <a href='https://developer.twitter.com/en/docs/twitter-api/tweets/counts/integrate/build-a-query' target='_blank'>instructions for building a query</a>. "
"Example: (happy OR happiness) lang:en -is:retweet")
start_time = forms.DateTimeField(required=False,help_text="Earliest date of tweets searched. Will be converted to UTC.", widget=DateTimeInput(attrs={'class': 'datepicker'}))
end_time= forms.DateTimeField(required=False, help_text="Most recent date of tweets searched. Will be converted to UTC.", widget=DateTimeInput(attrs={'class': 'datepicker'}))
limit = forms.IntegerField(required=False, validators=[MinValueValidator(1)], help_text="Maximum number of tweets to be retrieved. Will be rounded up to a multiple of 100. Limits are approximate; actual results may exceed the limit slightly.")
geocode = forms.CharField(required=False,
help_text='Geocode point radius in the format: longitude latitude radius. '
'Example: -77.036449 38.899434 50mi')
def __init__(self, *args, **kwargs):
super(SeedTwitterAcademicSearchForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('query',HTML("""
<div class="alert alert-warning">
Use start and end times in order to avoid using up monthly limit imposed by the API. See Twitter's <a href='https://developer.twitter.com/en/docs/twitter-api/getting-started/about-twitter-api#v2-access-level' target='_blank'>API access levels and versions here</a>.</div>"""),'start_time', 'end_time','limit', 'geocode'))
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
if 'query' in token:
self.fields['query'].initial = token['query']
if 'start_time' in token:
self.fields['start_time'].initial = token['start_time']
if 'end_time' in token:
self.fields['end_time'].initial = token['end_time']
if 'geocode' in token:
self.fields['geocode'].initial = token['geocode']
if 'limit' in token:
self.fields['limit'].initial = token['limit']
def clean(self):
query_val = self.cleaned_data.get("query")
geocode_val = self.cleaned_data.get("geocode")
# should not all be empty
if not query_val and not geocode_val:
raise ValidationError(u'One of the following fields is required: query, geocode.')
def save(self, commit=True):
m = super(SeedTwitterAcademicSearchForm, self).save(commit=False)
token = dict()
if self.cleaned_data['query']:
token['query'] = self.cleaned_data['query']
if self.cleaned_data['geocode']:
token['geocode'] = self.cleaned_data['geocode']
if self.cleaned_data['start_time']:
token['start_time'] = self.cleaned_data['start_time'].isoformat()
if self.cleaned_data['end_time']:
token['end_time'] = self.cleaned_data['end_time'].isoformat()
if self.cleaned_data['limit']:
limit = self.cleaned_data['limit']
token['limit'] = _round_up(limit)
m.token = json.dumps(token, ensure_ascii=False)
m.save()
return m
class SeedWeiboSearchForm(BaseSeedForm):
class Meta(BaseSeedForm.Meta):
fields = ['token']
fields.extend(BaseSeedForm.Meta.fields)
labels = dict(BaseSeedForm.Meta.labels)
labels["token"] = "Topic"
help_texts = dict(BaseSeedForm.Meta.help_texts)
help_texts["token"] = u'See <a href="http://open.weibo.com/wiki/2/search/topics" target="_blank">' \
u'API documents</a> for query Weibo related on a topic. ' \
u'Example: "科技".'
widgets = dict(BaseSeedForm.Meta.widgets)
widgets["token"] = forms.TextInput(attrs={'size': '40'})
def __init__(self, *args, **kwargs):
super(SeedWeiboSearchForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].append('token')
class SeedTwitterFilterForm(BaseSeedForm):
track = forms.CharField(required=False, widget=forms.Textarea(attrs={'rows': 4}),
help_text="""Separate keywords and phrases with commas. See Twitter <a
target="_blank" href="https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/basic-stream-parameters#track">
track</a> for more information.""")
follow = forms.CharField(required=False, widget=forms.Textarea(attrs={'rows': 4}),
help_text="""Use commas to separate user IDs (e.g. 1233718,6378678) of accounts whose
tweets, retweets, and replies will be collected. See Twitter <a
target="_blank"
href="https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/basic-stream-parameters#follow">
follow</a>
documentation for a full list of what is returned. User <a target="_blank"
href="https://tweeterid.com/">TweeterID</a> to get the user ID for a screen name.""")
locations = forms.CharField(required=False, widget=forms.Textarea(attrs={'rows': 2}),
help_text="""Provide a longitude and latitude (e.g. -74,40,-73,41) of a geographic
bounding box. See Twitter <a target="blank"
href="https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/basic-stream-parameters#locations">
locations</a> for more information.""")
language = forms.CharField(required=False, widget=forms.Textarea(attrs={'rows': 2}),
help_text="""Provide a comma-separated list of two-letter <a target="blank"
href="https://datahub.io/core/language-codes">BCP 47 language codes</a> (e.g. en,es). See Twitter <a target="blank"
href="https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/basic-stream-parameters#language">
language</a> for more information.""")
def __init__(self, *args, **kwargs):
super(SeedTwitterFilterForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('track', 'follow', 'locations', 'language'))
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
if 'track' in token:
self.fields['track'].initial = token['track']
if 'follow' in token:
self.fields['follow'].initial = token['follow']
if 'locations' in token:
self.fields['locations'].initial = token['locations']
if 'language' in token:
self.fields['language'].initial = token['language']
def clean_track(self):
track_val = self.cleaned_data.get("track").strip()
if len(track_val.split(",")) > 400:
raise ValidationError("Can only track 400 keywords.")
return track_val
def clean_locations(self):
return self.cleaned_data.get("locations").strip()
def clean_language(self):
return self.cleaned_data.get("language").strip()
def clean_follow(self):
follow_val = self.cleaned_data.get("follow").strip()
if len(follow_val.split(",")) > 5000:
raise ValidationError("Can only follow 5000 users.")
return follow_val
def clean(self):
# if do string strip in here, string ends an empty space, not sure why
track_val = self.cleaned_data.get("track")
follow_val = self.cleaned_data.get("follow")
locations_val = self.cleaned_data.get("locations")
language_val = self.cleaned_data.get("language")
# should not all be empty
if not track_val and not follow_val and not locations_val and not language_val:
raise ValidationError(u'One of the following fields is required: track, follow, locations, language.')
# check follow should be number uid
if re.compile(r'[^0-9, ]').search(follow_val):
raise ValidationError('Follow must be user ids', code='invalid_follow')
token_val = {}
if track_val:
token_val['track'] = track_val
if follow_val:
token_val['follow'] = follow_val
if locations_val:
token_val['locations'] = locations_val
if language_val:
token_val['language'] = language_val
token_val = json.dumps(token_val, ensure_ascii=False)
# for the update view
if self.view_type == Seed.UPDATE_VIEW:
# check updated seeds exist in db if changes
# case insensitive match, and user can update seed `tack:Test` to 'tack:test'
if token_val.lower() != self.entry.token.lower() and \
token_val and Seed.objects.filter(collection=self.collection,
token__iexact=token_val).exists():
raise ValidationError(u'Seed: {} already exist.'.format(token_val))
else:
if token_val and Seed.objects.filter(collection=self.collection, token__iexact=token_val).exists():
raise ValidationError(u'Seed: {} already exist.'.format(token_val))
def save(self, commit=True):
m = super(SeedTwitterFilterForm, self).save(commit=False)
token = dict()
if self.cleaned_data['track']:
token['track'] = self.cleaned_data['track']
if self.cleaned_data['follow']:
token['follow'] = self.cleaned_data['follow']
if self.cleaned_data['locations']:
token['locations'] = self.cleaned_data['locations']
if self.cleaned_data['language']:
token['language'] = self.cleaned_data['language']
m.token = json.dumps(token, ensure_ascii=False)
m.save()
return m
class SeedFlickrUserForm(BaseSeedForm):
class Meta(BaseSeedForm.Meta):
fields = ['token', 'uid']
fields.extend(BaseSeedForm.Meta.fields)
labels = dict(BaseSeedForm.Meta.labels)
labels["token"] = "Username"
labels["uid"] = "NSID"
help_texts = dict(BaseSeedForm.Meta.help_texts)
help_texts["token"] = 'A string name for the user account. Finding this on the Flickr website can be ' \
'confusing, so see NSID below.'
help_texts["uid"] = 'An unchanging identifier for a user account, e.g., 80136838@N05. To find the NSID for a ' \
'user account, use <a href="http://www.webpagefx.com/tools/idgettr/">idGettr</a>.'
widgets = dict(BaseSeedForm.Meta.widgets)
widgets["token"] = forms.TextInput(attrs={'size': '40'})
widgets["uid"] = forms.TextInput(attrs={'size': '40'})
def __init__(self, *args, **kwargs):
super(SeedFlickrUserForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].extend(('token', 'uid'))
class SeedTumblrBlogPostsForm(BaseSeedForm):
class Meta(BaseSeedForm.Meta):
fields = ['uid']
fields.extend(BaseSeedForm.Meta.fields)
labels = dict(BaseSeedForm.Meta.labels)
labels["uid"] = "Blog hostname"
help_texts = dict(BaseSeedForm.Meta.help_texts)
help_texts["uid"] = 'Please provide the standard blog hostname, eg. codingjester or codingjester.tumblr.com.' \
'If blog hostname is codingjester.tumblr.com, it would be considered as codingjester. ' \
'To better understand standard blog hostname, See ' \
'<a target="_blank" href="https://www.tumblr.com/docs/en/api/v2#hostname">' \
'these instructions</a>.'
widgets = dict(BaseSeedForm.Meta.widgets)
widgets["uid"] = forms.TextInput(attrs={'size': '40'})
def __init__(self, *args, **kwargs):
super(SeedTumblrBlogPostsForm, self).__init__(*args, **kwargs)
self.helper.layout[0][0].append('uid')
def clean_uid(self):
return clean_blogname(self.cleaned_data.get("uid"))
class BaseBulkSeedForm(forms.Form):
TYPES = (('token', 'Username'), ('uid', 'NSID'))
seeds_type = forms.ChoiceField(required=True, choices=TYPES, widget=forms.RadioSelect)
tokens = forms.CharField(required=True, widget=forms.Textarea(attrs={'rows': 20}),
help_text="Enter each seed on a separate line.", label="Bulk Seeds")
history_note = forms.CharField(label=HISTORY_NOTE_LABEL, widget=HISTORY_NOTE_WIDGET, help_text=HISTORY_NOTE_HELP,
required=False)
def __init__(self, *args, **kwargs):
self.collection = kwargs.pop("collection", None)
super(BaseBulkSeedForm, self).__init__(*args, **kwargs)
self.fields['history_note'].help_text = HISTORY_NOTE_HELP_ADD
cancel_url = reverse('collection_detail', args=[self.collection])
self.helper = FormHelper(self)
self.helper.layout = Layout(
Fieldset(
'',
'seeds_type',
'tokens',
'history_note',
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Save'),
Button('cancel', 'Cancel',
onclick="window.location.href='{0}'".format(cancel_url))
)
)
class BulkSeedTwitterUserTimelineForm(BaseBulkSeedForm):
def __init__(self, *args, **kwargs):
super(BulkSeedTwitterUserTimelineForm, self).__init__(*args, **kwargs)
self.fields['seeds_type'].choices = (('token', 'Screen Name'), ('uid', 'User id'))
def clean_tokens(self):
seed_type = self.cleaned_data.get("seeds_type")
tokens = self.cleaned_data.get("tokens")
splittoken = ''.join(tokens).splitlines()
numtoken, strtoken, finaltokens = [], [], []
for t in splittoken:
clean_t = clean_token(t)
clean_t = clean_t.split(" ")[0]
if clean_t and clean_t.isdigit():
numtoken.append(clean_t)
elif clean_t and not clean_t.isdigit():
strtoken.append(clean_t)
finaltokens.append(clean_t + "\n")
if seed_type == 'token' and numtoken:
raise ValidationError(
'Screen names may not be numeric. Please correct the following seeds: ' + ', '.join(numtoken) + '.')
elif seed_type == 'uid' and strtoken:
raise ValidationError(
'UIDs must be numeric. Please correct the following seeds: ' + ', '.join(strtoken) + '.')
return ''.join(finaltokens)
class BulkSeedFlickrUserForm(BaseBulkSeedForm):
def __init__(self, *args, **kwargs):
super(BulkSeedFlickrUserForm, self).__init__(*args, **kwargs)
class BulkSeedTumblrBlogPostsForm(BaseBulkSeedForm):
def __init__(self, *args, **kwargs):
super(BulkSeedTumblrBlogPostsForm, self).__init__(*args, **kwargs)
self.fields['seeds_type'].choices = (('uid', 'Blog hostnames'),)
self.fields['seeds_type'].initial = 'uid'
class BaseCredentialForm(forms.ModelForm):
class Meta:
model = Credential
fields = ['name', 'history_note']
exclude = []
widgets = {
'history_note': HISTORY_NOTE_WIDGET
}
localized_fields = None
labels = {
'history_note': HISTORY_NOTE_LABEL
}
help_texts = {
'history_note': HISTORY_NOTE_HELP
}
error_messages = {}
def __init__(self, *args, **kwargs):
# for createView and updateView
self.view_type = kwargs.pop("view_type", None)
# for updateView check the updates for the original token
self.entry = kwargs.pop("entry", None)
super(BaseCredentialForm, self).__init__(*args, **kwargs)
# check whether it's a create view and offer different help text
if self.instance.pk is None:
self.fields['history_note'].help_text = HISTORY_NOTE_HELP_ADD
# set up crispy forms helper
self.helper = FormHelper(self)
# set up crispy forms helper
self.helper.layout = Layout(
Fieldset(
'',
'name',
Div(),
'history_note',
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Save'),
Button('cancel', 'Cancel', onclick="window.history.back()")
)
)
def clean(self):
cleaned_data = super(BaseCredentialForm, self).clean()
token = json.dumps(self.to_token())
# for the update view
if self.view_type == Credential.UPDATE_VIEW:
# check updated Credential exist in db if changes
if token != self.entry.token and Credential.objects.filter(token=token).exists():
raise ValidationError(u'This is a duplicate of an existing credential!')
else:
if Credential.objects.filter(token=token).exists():
raise ValidationError(u'This is a duplicate of an existing credential!')
return cleaned_data
class CredentialFlickrForm(BaseCredentialForm):
key = forms.CharField(required=True)
secret = forms.CharField(required=True)
def __init__(self, *args, **kwargs):
super(CredentialFlickrForm, self).__init__(*args, **kwargs)
self.helper.layout[0][1].extend(['key', 'secret'])
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
self.fields['key'].initial = token.get('key')
self.fields['secret'].initial = token.get('secret')
def to_token(self):
return {
"key": self.cleaned_data.get("key", "").strip(),
"secret": self.cleaned_data.get("secret", "").strip(),
}
def save(self, commit=True):
m = super(CredentialFlickrForm, self).save(commit=False)
m.platform = Credential.FLICKR
m.token = json.dumps(self.to_token())
m.save()
return m
class CredentialTwitterForm(BaseCredentialForm):
consumer_key = forms.CharField(required=True)
consumer_secret = forms.CharField(required=True)
access_token = forms.CharField(required=True)
access_token_secret = forms.CharField(required=True)
def __init__(self, *args, **kwargs):
super(CredentialTwitterForm, self).__init__(*args, **kwargs)
self.helper.layout[0][1].extend(['consumer_key', 'consumer_secret', 'access_token', 'access_token_secret'])
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
self.fields['consumer_key'].initial = token.get('consumer_key')
self.fields['consumer_secret'].initial = token.get('consumer_secret')
self.fields['access_token'].initial = token.get('access_token')
self.fields['access_token_secret'].initial = token.get('access_token_secret')
def to_token(self):
return {
"consumer_key": self.cleaned_data.get("consumer_key", "").strip(),
"consumer_secret": self.cleaned_data.get("consumer_secret", "").strip(),
"access_token": self.cleaned_data.get("access_token", "").strip(),
"access_token_secret": self.cleaned_data.get("access_token_secret", "").strip(),
}
def save(self, commit=True):
m = super(CredentialTwitterForm, self).save(commit=False)
m.platform = Credential.TWITTER
m.token = json.dumps(self.to_token())
m.save()
return m
class CredentialTwitter2Form(BaseCredentialForm):
consumer_key = forms.CharField(required=False,label="API key")
consumer_secret = forms.CharField(required=False,label="API key secret")
access_token = forms.CharField(required=False)
access_token_secret = forms.CharField(required=False)
bearer_token = forms.CharField(required=False)
def __init__(self, *args, **kwargs):
super(CredentialTwitter2Form, self).__init__(*args, **kwargs)
self.helper.layout[0][1].extend(['consumer_key', 'consumer_secret', 'access_token', 'access_token_secret', 'bearer_token'])
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
self.fields['consumer_key'].initial = token.get('consumer_key')
self.fields['consumer_secret'].initial = token.get('consumer_secret')
self.fields['access_token'].initial = token.get('access_token')
self.fields['access_token_secret'].inital = token.get('access_token_secret')
self.fields['bearer_token'].initial = token.get('bearer_token')
def to_token(self):
return {
"consumer_key": self.cleaned_data.get("consumer_key", None),
"consumer_secret": self.cleaned_data.get("consumer_secret", None),
"access_token": self.cleaned_data.get("access_token", None),
"access_token_secret": self.cleaned_data.get("access_token_secret", None),
"bearer_token": self.cleaned_data.get("bearer_token", None)
}
def save(self, commit=True):
m = super(CredentialTwitter2Form, self).save(commit=False)
m.platform = Credential.TWITTER2
m.token = json.dumps(self.to_token())
m.save()
return m
class CredentialTumblrForm(BaseCredentialForm):
api_key = forms.CharField(required=True)
def __init__(self, *args, **kwargs):
super(CredentialTumblrForm, self).__init__(*args, **kwargs)
self.helper.layout[0][1].extend(['api_key'])
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
self.fields['api_key'].initial = token.get('api_key')
def to_token(self):
return {
"api_key": self.cleaned_data.get("api_key", "").strip(),
}
def save(self, commit=True):
m = super(CredentialTumblrForm, self).save(commit=False)
m.platform = Credential.TUMBLR
m.token = json.dumps(self.to_token())
m.save()
return m
class CredentialWeiboForm(BaseCredentialForm):
access_token = forms.CharField(required=True)
def __init__(self, *args, **kwargs):
super(CredentialWeiboForm, self).__init__(*args, **kwargs)
self.helper.layout[0][1].extend(['access_token'])
if self.instance and self.instance.token:
token = json.loads(self.instance.token)
self.fields['access_token'].initial = token.get('access_token')
def to_token(self):
return {
"access_token": self.cleaned_data.get("access_token", "").strip(),
}
def save(self, commit=True):
m = super(CredentialWeiboForm, self).save(commit=False)
m.platform = Credential.WEIBO
m.token = json.dumps(self.to_token())
m.save()
return m
class SeedChoiceField(forms.ModelMultipleChoiceField):
def label_from_instance(self, obj):
return obj.label()
class ExportForm(forms.ModelForm):
seeds = SeedChoiceField(None, required=False, widget=forms.SelectMultiple, label="")
seed_choice = forms.ChoiceField(choices=(('ALL', 'All seeds'), ('ACTIVE', 'Active seeds only'),
('SELECTED', 'Selected seeds only'),),
initial='ALL',
widget=forms.RadioSelect)
class Meta:
model = Export
fields = ['seeds', 'seed_choice', 'export_format', 'export_segment_size', 'dedupe',
'item_date_start', 'item_date_end',
'harvest_date_start', 'harvest_date_end']
localized_fields = None
error_messages = {}
labels = {
'dedupe': "Deduplicate (remove duplicate posts)",
'export_segment_size': "Maximum number of items per file"
}
def __init__(self, *args, **kwargs):
self.collection = Collection.objects.get(pk=kwargs.pop("collection"))
super(ExportForm, self).__init__(*args, **kwargs)
self.fields["seeds"].queryset = self.collection.seeds.all()
cancel_url = reverse('collection_detail', args=[self.collection.pk])
self.helper = FormHelper(self)
self.helper.layout = Layout(
Fieldset(
'',
'seed_choice',
Div('seeds', css_class="longseed"),
'export_format',
'export_segment_size',
'dedupe',
Div(
HTML("""<h4>Limit by item date range</h4>"""),
Field('item_date_start', css_class='datepicker'),
Field('item_date_end', css_class='datepicker'),
HTML("""<p class="help-block">The timezone for dates entered here are {}. Adjustments will be
made to match the time zone of the items. For example, dates in
tweets are UTC.</p>""".format(settings.TIME_ZONE)),
css_class="card panel-default card-body mb-3"),
Div(
HTML("""<h4>Limit by harvest date range</h4>"""),
Field('harvest_date_start', css_class='datepicker'),
Field('harvest_date_end', css_class='datepicker'),
css_class="card panel-default card-body mb-3"),
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Export'),
Button('cancel', 'Cancel',
onclick="window.location.href='{0}'".format(cancel_url))
)
)
if len(self.fields["seeds"].queryset) < 2:
del self.fields["seeds"]
del self.fields["seed_choice"]
self.helper.layout[0].pop(0)
self.helper.layout[0].pop(0)
def clean_seeds(self):
seeds = self.cleaned_data["seeds"]
if self.data.get("seed_choice") == "SELECTED" and not seeds:
raise ValidationError("At least one seed must be selected")
if self.data.get("seed_choice", "ALL") == "ALL":
seeds = []
elif self.data["seed_choice"] == "ACTIVE":
seeds = list(self.collection.seeds.filter(is_active=True))
return seeds
def save(self, commit=True):
m = super(ExportForm, self).save(commit=False)
m.export_type = self.collection.harvest_type
if self.cleaned_data.get("seed_choice", "ALL") == "ALL":
m.collection = self.collection
m.save()
self.save_m2m()
return m
class UserProfileForm(forms.ModelForm):
class Meta:
model = User
fields = ['username', 'email', 'email_frequency', 'harvest_notifications']
widgets = {
"username": forms.TextInput(attrs={'size': '40'}),
"email": forms.TextInput(attrs={'size': '40'})
}
help_texts = {
'harvest_notifications': "Receive an email when there is a problem with a harvest.",
}
def __init__(self, *args, **kwargs):
super(UserProfileForm, self).__init__(*args, **kwargs)
# set up crispy forms helper
self.helper = FormHelper(self)
# set up crispy forms helper
self.helper.layout = Layout(
Fieldset(
'',
'username',
'email',
'email_frequency',
'harvest_notifications',
Div(),
css_class='crispy-form-custom'
),
FormActions(
Submit('submit', 'Save'),
Button('cancel', 'Cancel', onclick="window.history.back()")
)
)
def _round_up(x):
'''
Rounds up limit parameter to 100 to align with max_results=100 in twarc2 calls
'''
return int(math.ceil(x / 100.0)) * 100 | mit | 89d98a430bb393dc59c03b5adb296c35 | 43.596081 | 333 | 0.593751 | 4.092248 | false | false | false | false |
adrn/gala | gala/dynamics/actionangle/tests/test_actionangle_staeckel.py | 1 | 5520 | # Third-party
from astropy.constants import G
import astropy.units as u
import numpy as np
import pytest
# gala
from gala.dynamics import PhaseSpacePosition
from gala.dynamics.actionangle import (
get_staeckel_fudge_delta,
find_actions_o2gf
)
from gala.dynamics.actionangle.tests.staeckel_helpers import galpy_find_actions_staeckel
import gala.potential as gp
from gala.units import galactic
from gala.tests.optional_deps import HAS_GALPY
@pytest.mark.skipif(not HAS_GALPY,
reason="requires galpy to run this test")
def test_staeckel_fudge_delta():
import galpy.potential as galpy_pot
from galpy.actionAngle import estimateDeltaStaeckel
ro = 8.1 * u.kpc
vo = 229 * u.km/u.s
paired_potentials = []
# Miyamoto-Nagai
potential = gp.MiyamotoNagaiPotential(
m=6e10*u.Msun, a=3*u.kpc, b=0.3*u.kpc, units=galactic)
amp = (G * potential.parameters['m']).to_value(vo**2 * ro)
a = potential.parameters['a'].to_value(ro)
b = potential.parameters['b'].to_value(ro)
galpy_potential = galpy_pot.MiyamotoNagaiPotential(amp=amp, a=a, b=b,
ro=ro, vo=vo)
paired_potentials.append((potential, galpy_potential))
# Hernquist
potential = gp.HernquistPotential(m=6e10*u.Msun, c=0.3*u.kpc,
units=galactic)
amp = (G * potential.parameters['m']).to_value(vo**2 * ro)
a = potential.parameters['c'].to_value(ro)
galpy_potential = galpy_pot.HernquistPotential(amp=amp, a=a,
ro=ro, vo=vo)
paired_potentials.append((potential, galpy_potential))
# NFW
potential = gp.NFWPotential(m=6e11*u.Msun, r_s=15.6*u.kpc,
units=galactic)
amp = (G * potential.parameters['m']).to_value(vo**2 * ro)
a = potential.parameters['r_s'].to_value(ro)
galpy_potential = galpy_pot.NFWPotential(amp=amp, a=a, ro=ro, vo=vo)
paired_potentials.append((potential, galpy_potential))
# TEST:
# TODO: remove the randomness here
N = 1024
rnd = np.random.default_rng(42)
w = PhaseSpacePosition(
pos=rnd.uniform(-10, 10, size=(3, N)) * u.kpc,
vel=rnd.uniform(-100, 100, size=(3, N)) * u.km/u.s
)
R = w.cylindrical.rho.to_value(ro)
z = w.z.to_value(ro)
for p, galpy_p in paired_potentials:
galpy_deltas = estimateDeltaStaeckel(galpy_p, R, z,
no_median=True)
gala_deltas = get_staeckel_fudge_delta(p, w).value
assert np.allclose(gala_deltas, galpy_deltas, atol=1e-5, rtol=1e-3)
@pytest.mark.skipif(not HAS_GALPY,
reason="requires galpy to run this test")
def test_find_actions_staeckel():
"""
This test function performs some unit test checks of the API
"""
disk = gp.MiyamotoNagaiPotential(5e10, 3.5, 0.3, units=galactic)
halo = gp.NFWPotential.from_M200_c(1e12*u.Msun, 15, units=galactic)
pot = disk + halo
xyz = (np.zeros((3, 16)) + 1e-5) * u.kpc
xyz[0] = np.linspace(4, 20, xyz.shape[1]) * u.kpc
vxyz = np.zeros((3, 16)) * u.km/u.s
vxyz[0] = 15 * u.km/u.s
vxyz[1] = pot.circular_velocity(xyz)
vxyz[2] = 15 * u.km/u.s
w0_one = PhaseSpacePosition(xyz[:, 0], vxyz[:, 0])
w0_many = PhaseSpacePosition(xyz, vxyz)
orbit_one = pot.integrate_orbit(w0_one, dt=1., n_steps=1000)
orbit_many = pot.integrate_orbit(w0_many, dt=1., n_steps=1000)
inputs = [
w0_one,
w0_many,
orbit_one,
orbit_many
]
shapes = [
(1, 3),
(xyz.shape[1], 3),
(1, 3),
(xyz.shape[1], 3)
]
for w, colshape in zip(inputs, shapes):
aaf = galpy_find_actions_staeckel(pot, w)
for colname in ['actions', 'freqs']:
assert aaf[colname].shape == colshape
# Check that mean=False returns the right shape
aaf = galpy_find_actions_staeckel(pot, orbit_one, mean=False)
for colname in ['actions', 'freqs', 'angles']:
assert aaf[colname].shape == (1, orbit_one.ntimes, 3)
aaf = galpy_find_actions_staeckel(pot, orbit_many, mean=False)
for colname in ['actions', 'freqs', 'angles']:
assert aaf[colname].shape == (xyz.shape[1], orbit_one.ntimes, 3)
@pytest.mark.skipif(not HAS_GALPY,
reason="requires galpy to run this test")
def test_compare_staeckel_o2gf():
"""
This test function performs some comparisons between actions, angles,
and frequencies solved from the staeckel fudge and O2GF.
"""
disk = gp.MiyamotoNagaiPotential(5e10, 3.5, 0.3, units=galactic)
halo = gp.NFWPotential.from_M200_c(1e12*u.Msun, 15, units=galactic)
pot = disk + halo
xyz = (np.zeros((3, 16)) + 1e-5) * u.kpc
xyz[0] = np.linspace(4, 20, xyz.shape[1]) * u.kpc
vxyz = np.zeros((3, 16)) * u.km/u.s
vxyz[0] = 15 * u.km/u.s
vxyz[1] = pot.circular_velocity(xyz)
vxyz[2] = 15 * u.km/u.s
orbits = pot.integrate_orbit(
PhaseSpacePosition(xyz, vxyz),
dt=1., n_steps=20_000
)
aaf_staeckel = galpy_find_actions_staeckel(pot, orbits)
aaf_o2gf = find_actions_o2gf(orbits, N_max=10)
assert u.allclose(aaf_staeckel['actions'], aaf_o2gf['actions'], rtol=1e-3)
with u.set_enabled_equivalencies(u.dimensionless_angles()):
assert u.allclose(aaf_staeckel['freqs'], aaf_o2gf['freqs'], rtol=1e-3)
assert u.allclose(aaf_staeckel['angles'], aaf_o2gf['angles'], rtol=1.5e-2)
| mit | 97f54279963b33df431724979c9e5971 | 34.159236 | 88 | 0.611594 | 2.79777 | false | true | false | false |
adrn/gala | gala/potential/potential/tests/test_interop_galpy.py | 1 | 6598 | """
Test converting the builtin Potential classes to other packages
"""
# Third-party
from astropy.coordinates import CylindricalRepresentation
from astropy.tests.helper import catch_warnings
import astropy.units as u
import numpy as np
import pytest
# This project
import gala.potential as gp
from gala.units import galactic
from gala.tests.optional_deps import HAS_GALPY
from gala.potential.potential.interop import galpy_to_gala_potential
# Set these globally!
ro = 8.122 * u.kpc
vo = 245 * u.km/u.s
if HAS_GALPY:
import galpy.potential as galpy_gp
from gala.potential.potential.interop import (
_gala_to_galpy,
_galpy_to_gala
)
def pytest_generate_tests(metafunc):
# Some magic, semi-random numbers below!
gala_pots = []
galpy_pots = []
if not HAS_GALPY:
return
# Test the Gala -> Galpy direction
for Potential in _gala_to_galpy.keys():
init = {}
len_scale = 1.
for k, par in Potential._parameters.items():
if k == 'm':
val = 1.43e10 * u.Msun
elif par.physical_type == 'length':
val = 5.12 * u.kpc * len_scale
len_scale *= 0.5
elif par.physical_type == 'dimensionless':
val = 1.
elif par.physical_type == 'speed':
val = 201.41 * u.km/u.s
else:
continue
init[k] = val
pot = Potential(**init, units=galactic)
galpy_pot = pot.to_galpy_potential(ro=ro, vo=vo)
gala_pots.append(pot)
galpy_pots.append(galpy_pot)
# Custom settings in the MN3 potential:
if isinstance(pot, gp.MN3ExponentialDiskPotential):
pot = Potential(**init, units=galactic, sech2_z=False)
galpy_pot = pot.to_galpy_potential(ro=ro, vo=vo)
gala_pots.append(pot)
galpy_pots.append(galpy_pot)
# Make a composite potential too:
gala_pots.append(gala_pots[0] + gala_pots[1])
galpy_pots.append([galpy_pots[0], galpy_pots[1]])
# Test the Galpy -> Gala direction
for Potential in _galpy_to_gala.keys():
galpy_pot = Potential(ro=ro, vo=vo) # use defaults
with catch_warnings(RuntimeWarning) as warns:
pot = galpy_to_gala_potential(galpy_pot, ro=ro, vo=vo)
if isinstance(galpy_pot, galpy_gp.MN3ExponentialDiskPotential):
assert len(warns) > 0
gala_pots.append(pot)
galpy_pots.append(galpy_pot)
test_names = [f'{g1.__class__.__name__}:{g2.__class__.__name__}'
for g1, g2 in zip(gala_pots, galpy_pots)]
metafunc.parametrize(['gala_pot', 'galpy_pot'],
list(zip(gala_pots, galpy_pots)),
ids=test_names)
@pytest.mark.skipif(not HAS_GALPY,
reason="must have galpy installed to run these tests")
class TestGalpy:
def setup(self):
# Test points:
rng = np.random.default_rng(42)
ntest = 4
Rs = rng.uniform(1, 15, size=ntest) * u.kpc
phis = rng.uniform(0, 2*np.pi, size=ntest) * u.radian
zs = rng.uniform(1, 15, size=ntest) * u.kpc
cyl = CylindricalRepresentation(Rs, phis, zs)
xyz = cyl.to_cartesian().xyz
self.Rs = Rs.to_value(ro)
self.phis = phis.to_value(u.rad)
self.zs = zs.to_value(ro)
self.Rpz_iter = np.array(list(zip(self.Rs, self.phis, self.zs))).copy()
self.xyz = xyz.copy()
Jac = np.zeros((len(cyl), 3, 3))
Jac[:, 0, 0] = xyz[0] / cyl.rho
Jac[:, 0, 1] = xyz[1] / cyl.rho
Jac[:, 1, 0] = (-xyz[1] / cyl.rho**2).to_value(1 / ro)
Jac[:, 1, 1] = (xyz[0] / cyl.rho**2).to_value(1 / ro)
Jac[:, 2, 2] = 1.
self.Jac = Jac
def test_density(self, gala_pot, galpy_pot):
if isinstance(gala_pot, gp.LogarithmicPotential):
pytest.skip()
gala_val = gala_pot.density(self.xyz).to_value(u.Msun / u.pc**3)
galpy_val = np.array([galpy_gp.evaluateDensities(galpy_pot,
R=RR, z=zz, phi=pp)
for RR, pp, zz in self.Rpz_iter])
assert np.allclose(gala_val, galpy_val)
def test_energy(self, gala_pot, galpy_pot):
gala_val = gala_pot.energy(self.xyz).to_value(u.km**2 / u.s**2)
galpy_val = np.array([galpy_gp.evaluatePotentials(galpy_pot,
R=RR, z=zz, phi=pp)
for RR, pp, zz in self.Rpz_iter])
if isinstance(gala_pot, gp.LogarithmicPotential):
# Logarithms are weird
gala_val -= (0.5 * gala_pot.parameters['v_c']**2 *
np.log(ro.value**2)).to_value((u.km / u.s)**2)
assert np.allclose(gala_val, galpy_val)
def test_gradient(self, gala_pot, galpy_pot):
gala_grad = gala_pot.gradient(self.xyz)
gala_grad = gala_grad.to_value(u.km/u.s/u.Myr)
# TODO: Starting with galpy 1.7, this has been failing because of a
# units issue with dPhi/dphi
if isinstance(gala_pot, gp.LongMuraliBarPotential):
pytest.skip()
galpy_dR = np.array([-galpy_gp.evaluateRforces(galpy_pot,
R=RR, z=zz, phi=pp)
for RR, pp, zz in self.Rpz_iter])
galpy_dp = np.array([-galpy_gp.evaluatephiforces(galpy_pot,
R=RR, z=zz, phi=pp)
for RR, pp, zz in self.Rpz_iter])
galpy_dp = (galpy_dp*(u.km/u.s)**2).to_value(vo**2)
galpy_dz = np.array([-galpy_gp.evaluatezforces(galpy_pot,
R=RR, z=zz, phi=pp)
for RR, pp, zz in self.Rpz_iter])
galpy_dRpz = np.stack((galpy_dR, galpy_dp, galpy_dz),
axis=1)
galpy_grad = np.einsum('nij,ni->nj', self.Jac, galpy_dRpz).T
assert np.allclose(gala_grad, galpy_grad)
def test_vcirc(self, gala_pot, galpy_pot):
tmp = self.xyz.copy()
tmp[2] = 0.
if (not hasattr(galpy_pot, 'vcirc')
or isinstance(gala_pot, gp.LongMuraliBarPotential)):
pytest.skip()
gala_vcirc = gala_pot.circular_velocity(tmp).to_value(u.km/u.s)
galpy_vcirc = np.array([galpy_pot.vcirc(R=RR)
for RR, *_ in self.Rpz_iter])
assert np.allclose(gala_vcirc, galpy_vcirc)
| mit | e52d0e57baafe3563e1908b34912b0aa | 34.095745 | 79 | 0.541982 | 3.046168 | false | true | false | false |
adrn/gala | gala/coordinates/velocity_frame_transforms.py | 1 | 2265 | """ Miscellaneous astronomical velocity transformations. """
import astropy.coordinates as coord
__all__ = ["vgsr_to_vhel", "vhel_to_vgsr"]
def _get_vproj(c, vsun):
gal = c.transform_to(coord.Galactic())
cart_data = gal.data.to_cartesian()
unit_vector = cart_data / cart_data.norm()
return coord.CartesianRepresentation(vsun).dot(unit_vector)
def vgsr_to_vhel(coordinate, vgsr, vsun=None):
"""
Convert a radial velocity in the Galactic standard of rest (GSR) to
a barycentric radial velocity.
Parameters
----------
coordinate : :class:`~astropy.coordinates.SkyCoord`
An Astropy SkyCoord object or anything object that can be passed
to the SkyCoord initializer.
vgsr : :class:`~astropy.units.Quantity`
GSR line-of-sight velocity.
vsun : :class:`~astropy.units.Quantity`
Full-space velocity of the sun in a Galactocentric frame. By default,
uses the value assumed by Astropy in
`~astropy.coordinates.Galactocentric`.
Returns
-------
vhel : :class:`~astropy.units.Quantity`
Radial velocity in a barycentric rest frame.
"""
if vsun is None:
galcen = coord.Galactocentric()
vsun = galcen.galcen_v_sun.to_cartesian().xyz
return vgsr - _get_vproj(coordinate, vsun)
def vhel_to_vgsr(coordinate, vhel, vsun):
"""
Convert a velocity from a heliocentric radial velocity to
the Galactic standard of rest (GSR).
Parameters
----------
coordinate : :class:`~astropy.coordinates.SkyCoord`
An Astropy SkyCoord object or anything object that can be passed
to the SkyCoord initializer.
vhel : :class:`~astropy.units.Quantity`
Barycentric line-of-sight velocity.
vsun : :class:`~astropy.units.Quantity`
Full-space velocity of the sun in a Galactocentric frame. By default,
uses the value assumed by Astropy in
`~astropy.coordinates.Galactocentric`.
Returns
-------
vgsr : :class:`~astropy.units.Quantity`
Radial velocity in a galactocentric rest frame.
"""
if vsun is None:
galcen = coord.Galactocentric()
vsun = galcen.galcen_v_sun.to_cartesian().xyz
return vhel + _get_vproj(coordinate, vsun)
| mit | 9dd3336a7e0cbd31454ec986ba5c4513 | 29.608108 | 77 | 0.659161 | 3.406015 | false | false | false | false |
adrn/gala | gala/potential/potential/builtin/pybuiltin.py | 1 | 3183 | # Third-party
import numpy as np
from gala.potential.potential.core import PotentialBase
from gala.potential.potential.util import sympy_wrap
from gala.potential.common import PotentialParameter
__all__ = ["HarmonicOscillatorPotential"]
class HarmonicOscillatorPotential(PotentialBase):
r"""
Represents an N-dimensional harmonic oscillator.
.. math::
\Phi = \frac{1}{2}\omega^2 x^2
Parameters
----------
omega : numeric
Frequency.
units : iterable(optional)
Unique list of non-reducable units that specify (at minimum) the
length, mass, time, and angle units.
"""
omega = PotentialParameter('omega', physical_type='frequency')
def _setup_potential(self, parameters, origin=None, R=None, units=None):
parameters['omega'] = np.atleast_1d(parameters['omega'])
super()._setup_potential(parameters, origin=origin, R=R, units=units)
self.ndim = len(self.parameters['omega'])
def _energy(self, q, t=0.):
om = np.atleast_1d(self.parameters['omega'].value)
return np.sum(0.5 * om[None]**2 * q**2, axis=1)
def _gradient(self, q, t=0.):
om = np.atleast_1d(self.parameters['omega'].value)
return om[None]**2 * q
def _hessian(self, q, t=0.):
om = np.atleast_1d(self.parameters['omega'].value)
return np.tile(np.diag(om)[:, :, None], reps=(1, 1, q.shape[0]))
@classmethod
@sympy_wrap(var='x')
def to_sympy(cls, v, p):
expr = 1/2 * p['omega']**2 * v['x']**2
return expr, v, p
def action_angle(self, w):
"""
Transform the input cartesian position and velocity to action-angle
coordinates the Harmonic Oscillator potential. This transformation
is analytic and can be used as a "toy potential" in the
Sanders & Binney 2014 formalism for computing action-angle coordinates
in _any_ potential.
Adapted from Jason Sanders' code
`genfunc <https://github.com/jlsanders/genfunc>`_.
Parameters
----------
w : :class:`gala.dynamics.PhaseSpacePosition`, :class:`gala.dynamics.Orbit`
The positions or orbit to compute the actions, angles, and frequencies at.
"""
from gala.dynamics.actionangle import harmonic_oscillator_xv_to_aa
return harmonic_oscillator_xv_to_aa(w, self)
# def phase_space(self, actions, angles):
# """
# Transform the input action-angle coordinates to cartesian position and velocity
# assuming a Harmonic Oscillator potential. This transformation
# is analytic and can be used as a "toy potential" in the
# Sanders & Binney 2014 formalism for computing action-angle coordinates
# in _any_ potential.
# Adapted from Jason Sanders' code
# `genfunc <https://github.com/jlsanders/genfunc>`_.
# Parameters
# ----------
# x : array_like
# Positions.
# v : array_like
# Velocities.
# """
# from gala.dynamics.actionangle import harmonic_oscillator_aa_to_xv
# return harmonic_oscillator_aa_to_xv(actions, angles, self)
| mit | 0b5322582939fb9b5a3aec2529fb0240 | 34.366667 | 89 | 0.626139 | 3.608844 | false | false | false | false |
redis/redis-py | redis/commands/graph/node.py | 4 | 2315 | from ..helpers import quote_string
class Node:
"""
A node within the graph.
"""
def __init__(self, node_id=None, alias=None, label=None, properties=None):
"""
Create a new node.
"""
self.id = node_id
self.alias = alias
if isinstance(label, list):
label = [inner_label for inner_label in label if inner_label != ""]
if (
label is None
or label == ""
or (isinstance(label, list) and len(label) == 0)
):
self.label = None
self.labels = None
elif isinstance(label, str):
self.label = label
self.labels = [label]
elif isinstance(label, list) and all(
[isinstance(inner_label, str) for inner_label in label]
):
self.label = label[0]
self.labels = label
else:
raise AssertionError(
"label should be either None, " "string or a list of strings"
)
self.properties = properties or {}
def to_string(self):
res = ""
if self.properties:
props = ",".join(
key + ":" + str(quote_string(val))
for key, val in sorted(self.properties.items())
)
res += "{" + props + "}"
return res
def __str__(self):
res = "("
if self.alias:
res += self.alias
if self.labels:
res += ":" + ":".join(self.labels)
if self.properties:
props = ",".join(
key + ":" + str(quote_string(val))
for key, val in sorted(self.properties.items())
)
res += "{" + props + "}"
res += ")"
return res
def __eq__(self, rhs):
# Quick positive check, if both IDs are set.
if self.id is not None and rhs.id is not None and self.id != rhs.id:
return False
# Label should match.
if self.label != rhs.label:
return False
# Quick check for number of properties.
if len(self.properties) != len(rhs.properties):
return False
# Compare properties.
if self.properties != rhs.properties:
return False
return True
| mit | 3c21e67872502b2322027f6adcc79508 | 26.559524 | 79 | 0.483801 | 4.401141 | false | false | false | false |
gwu-libraries/sfm-ui | sfm/ui/notifications.py | 1 | 16697 | import logging
from datetime import date, datetime, timedelta, time
from collections import OrderedDict
from smtplib import SMTPException
from subprocess import check_output, CalledProcessError
import pytz
from itertools import chain
from django.template.loader import get_template
from django.core.mail import EmailMultiAlternatives
from django.db.models import Sum, Q
from django.conf import settings
from django.urls import reverse
from .models import User, CollectionSet, Collection, HarvestStat, Harvest
from .sched import next_run_time
from .utils import get_admin_email_addresses, get_site_url
from . import monitoring
log = logging.getLogger(__name__)
class MonitorSpace(object):
def __init__(self, volume_dir, threshold):
"""
A class for monitor free space of the special directory
:param volume_dir: the volume mounted directory, considered as the id of the record.
:param threshold: the free space threshold.
:return:
"""
# deal with the empty string
if not volume_dir:
volume_dir = 'None'
if not threshold:
threshold = '10GB'
self.space_msg_cache = {'volume_id': volume_dir, 'threshold': threshold, 'bar_color': 'progress-bar-success'}
def analysis_space(self):
"""
Getting space info from 'df -h'
"""
total_free_space = total_space = 0
res = self.run_check_cmd()
split_lines = res.split('\n')
for line in split_lines:
line_units = list(filter(None, line.split(' ')))
# the sfm-data and sfm-processing mount at sfm-data,
# we only need to count the sfm-data
if line_units:
# The following uncommented code will not work anymore, '/sfm-data' was removed and replaced by '/sfm-db-data', '/sfm-mq-data' etc
# get rid of the unit at the space,12M
# eg:['/dev/sda1', '208074M', '47203M', '150279M', '24%', '/sfm-data']
total_free_space = int(line_units[3][:-1])
total_space = int(line_units[1][:-1])
self.space_msg_cache['total_space'] = self._size_readable_fmt(total_space)
self.space_msg_cache['total_free_space'] = self._size_readable_fmt(total_free_space)
self.space_msg_cache['percentage'] = 0 if not total_space else int(
float(total_space - total_free_space) / float(total_space) * 100)
# update bar color with percentage
self.space_msg_cache['bar_color'] = self._get_bar_color(self.space_msg_cache['percentage'])
return total_free_space
def get_space_info(self):
"""
get the space info and check whether to send email
"""
self.space_msg_cache['send_email'] = False
# get the free space info
total_free_space = self.analysis_space()
# if not available info return False
if self.space_msg_cache['total_space'] == '0.0MB':
return self.space_msg_cache
# deal with the configuration
suffix = self.space_msg_cache['threshold'][-2:]
if suffix not in {'MB', 'GB', 'TB'}:
log.error("Free Space threshold %s, configure suffix error.",
self.space_msg_cache['threshold'])
return self.space_msg_cache
# get rid of the unit and deal with GB/TB, compare with MB
space_threshold = int(self.space_msg_cache['threshold'][:-2])
if suffix == 'GB':
space_threshold *= 1024
elif suffix == 'TB':
space_threshold *= 11048576
log.debug("total space %s, space threshold %s,", self.space_msg_cache['total_free_space'],
self.space_msg_cache['threshold'])
if total_free_space < space_threshold:
self.space_msg_cache['send_email'] = True
return self.space_msg_cache
def run_check_cmd(self):
cmd = "df -h -BM {volume_id} | grep -w {volume_id}".format(volume_id=self.space_msg_cache['volume_id'])
res = ''
try:
res = check_output(cmd, shell=True)
log.debug("Running %s completed.", cmd)
except CalledProcessError as e:
log.error("%s returned %s: %s", cmd, e.returncode, e.output)
return res.decode('utf-8')
@staticmethod
def _size_readable_fmt(num, suffix='B'):
for unit in ['M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Y', suffix)
@staticmethod
def _get_bar_color(percentage):
if 70 <= percentage <= 80:
return 'bg-warning'
elif percentage > 80:
return 'bg-danger'
return 'bg-success'
def get_free_space():
"""
an interface to get the space info
:return: a space data list
"""
data_list = []
# get data directories info (sfm-db-data, sfm-mq-data, sfm-export-data, sfm-containers-data and sfm-collection-set-data)
data_db_monitor = MonitorSpace(settings.SFM_DB_DATA_DIR, settings.DATA_THRESHOLD_DB)
data_mq_monitor = MonitorSpace(settings.SFM_MQ_DATA_DIR, settings.DATA_THRESHOLD_MQ)
data_export_monitor = MonitorSpace(settings.SFM_EXPORT_DATA_DIR, settings.DATA_THRESHOLD_EXPORT)
data_containers_monitor = MonitorSpace(settings.SFM_CONTAINERS_DATA_DIR, settings.DATA_THRESHOLD_CONTAINERS)
data_collection_set_monitor = MonitorSpace(settings.SFM_COLLECTION_SET_DATA_DIR, settings.DATA_THRESHOLD_COLLECTION_SET)
data_list.append(data_db_monitor.get_space_info())
data_list.append(data_mq_monitor.get_space_info())
data_list.append(data_export_monitor.get_space_info())
data_list.append(data_containers_monitor.get_space_info())
data_list.append(data_collection_set_monitor.get_space_info())
if is_shared():
data_shared_monitor = MonitorSpace(settings.SFM_SHARED_DIR, settings.DATA_THRESHOLD_SHARED)
data_list.append(data_shared_monitor.get_space_info())
# get sfm-processing info
processing_monitor = MonitorSpace(settings.SFM_PROCESSING_DIR, settings.PROCESSING_THRESHOLD)
data_list.append(processing_monitor.get_space_info())
return data_list
def send_free_space_emails():
log.info("Sending free space emails")
msg_cache = {
# get the space mem
'space_data': get_free_space()
}
if _should_send_space_email(msg_cache):
email_addresses = get_admin_email_addresses()
for email_address in email_addresses:
msg = _create_space_email(email_address, msg_cache)
try:
log.debug("Sending email to %s: %s", msg.to, msg.subject)
msg.send()
except SMTPException as ex:
log.error("Error sending email: %s", ex)
except IOError as ex:
log.error("Error sending email: %s", ex)
def _should_send_space_email(msg_cache):
# if any volume need send email, return true
return any(msg['send_email'] for msg in msg_cache['space_data'])
def _create_space_email(email_address, msg_cache):
text_template = get_template('email/free_space_email.txt')
html_template = get_template('email/free_space_email.html')
msg_cache["url"] = _create_url(reverse('home'))
d = msg_cache
msg = EmailMultiAlternatives("[WARNING] Low free space on SFM server",
text_template.render(d), settings.EMAIL_FROM, [email_address])
msg.attach_alternative(html_template.render(d), "text/html")
return msg
def get_queue_data():
queue_threshold_map = settings.QUEUE_LENGTH_THRESHOLD
queue_threshold_other = settings.QUEUE_LENGTH_THRESHOLD_OTHER
return get_warn_queue(queue_threshold_map, queue_threshold_other)
def get_warn_queue(q_th_map, q_th_other):
hqs, eqs, uqs = monitoring.monitor_queues()
# filter any msg count larger than the threshold
return list(filter(lambda x: x[1] >= int(q_th_map[x[0]] if x[0] in q_th_map else q_th_other),
chain(hqs.items(), eqs.items(), uqs.items())))
def send_queue_warn_emails():
log.info("Sending queue length warning emails")
# get queue data and determine whether to send email
msg_cache = {
'queue_data': get_queue_data()
}
if len(msg_cache['queue_data']):
email_addresses = get_admin_email_addresses()
for email_address in email_addresses:
msg = _create_queue_warn_email(email_address, msg_cache)
try:
log.debug("Sending email to %s: %s", msg.to, msg.subject)
msg.send()
except SMTPException as ex:
log.error("Error sending email: %s", ex)
except IOError as ex:
log.error("Error sending email: %s", ex)
def _create_queue_warn_email(email_address, msg_cache):
text_template = get_template('email/queue_length_email.txt')
html_template = get_template('email/queue_length_email.html')
msg_cache["url"] = _create_url(reverse('home'))
msg_cache["monitor_url"] = _create_url(reverse('monitor'))
d = msg_cache
msg = EmailMultiAlternatives("[WARNING] Long message queue on SFM server",
text_template.render(d), settings.EMAIL_FROM, [email_address])
msg.attach_alternative(html_template.render(d), "text/html")
return msg
def send_user_harvest_emails(users=None):
log.info("Sending user harvest emails")
collection_set_cache = {}
if users is None:
users = User.objects.all()
for user in users:
if _should_send_email(user):
msg = _create_email(user, collection_set_cache)
try:
log.debug("Sending email to %s: %s", msg.to, msg.subject)
msg.send()
except SMTPException as ex:
log.error("Error sending email: %s", ex)
except IOError as ex:
log.error("Error sending email: %s", ex)
else:
log.debug("Not sending email to %s", user.username)
def _should_send_email(user, today=None):
if today is None:
today = date.today()
send_email = False
has_active_collections = Collection.objects.filter(collection_set__group__in=user.groups.all(),
is_on=True).exists()
if user.email and has_active_collections:
if user.email_frequency == User.DAILY:
send_email = True
elif user.email_frequency == User.MONTHLY and today.day == 1:
send_email = True
elif user.email_frequency == User.WEEKLY and today.weekday() == 6:
send_email = True
return send_email
def _create_email(user, collection_set_cache):
text_template = get_template('email/user_harvest_email.txt')
html_template = get_template('email/user_harvest_email.html')
d = _create_context(user, collection_set_cache)
msg = EmailMultiAlternatives("Update on your Social Feed Manager harvests", text_template.render(d),
settings.EMAIL_FROM, [user.email])
msg.attach_alternative(html_template.render(d), "text/html")
return msg
def _create_context(user, collection_set_cache):
# Start and end are datetimes. The range is inclusive.
today = datetime.utcnow().date()
# Yesterday
yesterday = today + timedelta(days=-1)
yesterday_start = datetime.combine(yesterday,
time(time.min.hour, time.min.minute, time.min.second, tzinfo=pytz.utc))
yesterday_end = datetime.combine(yesterday, time(time.max.hour, time.max.minute, time.max.second, tzinfo=pytz.utc))
# Previous day
prev_day_start = yesterday_start + timedelta(days=-1)
prev_day_end = yesterday_end + timedelta(days=-1)
last_7_start = yesterday_start + timedelta(days=-6)
last_7_end = yesterday_end
prev_7_start = last_7_start + timedelta(days=-7)
prev_7_end = yesterday_end + timedelta(days=-7)
last_30_start = yesterday_start + timedelta(days=-29)
last_30_end = yesterday_end
prev_30_start = last_30_start + timedelta(days=-30)
prev_30_end = last_30_end + timedelta(days=-30)
time_ranges = (
('yesterday', yesterday_start, yesterday_end),
('prev_day', prev_day_start, prev_day_end),
('last_7', last_7_start, last_7_end),
('prev_7', prev_7_start, prev_7_end),
('last_30', last_30_start, last_30_end),
('prev_30', prev_30_start, prev_30_end)
)
c = {
"url": _create_url(reverse('home'))
}
# Ordered list of collection sets
collection_sets = OrderedDict()
for collection_set in CollectionSet.objects.filter(group__in=user.groups.all()).filter(
collections__is_active=True).order_by('name'):
# Using a cache to avoid regenerating the data repeatedly.
if collection_set in collection_set_cache:
collections = collection_set_cache[collection_set]
else:
collections = OrderedDict()
for collection in Collection.objects.filter(collection_set=collection_set).filter(is_active=True).order_by(
'name'):
collection_info = {
"url": _create_url(reverse('collection_detail', args=(collection.id,)))
}
if collection.is_on:
collection_info['next_run_time'] = next_run_time(collection.id)
stats = {}
for name, range_start, range_end in time_ranges:
_add_stats(stats, name, collection, range_start, range_end)
for name, range_start, range_end in time_ranges:
_update_stats_for_na(stats, name, collection, range_start, range_end)
collection_info['stats'] = stats
collections[collection] = collection_info
collection_set_cache[collection_set] = collections
collection_sets[collection_set] = {
"collections": collections,
"url": _create_url(reverse('collection_set_detail', args=(collection_set.id,)))
}
c['collection_sets'] = collection_sets
return c
def _add_stats(stats, name, collection, range_start, range_end):
result_set = HarvestStat.objects.filter(harvest__collection=collection,
harvest_date__gte=range_start.date(),
harvest_date__lte=range_end.date()).values(
'item').annotate(count=Sum('count'))
for result in result_set:
item = result['item']
if item not in stats:
stats[item] = {
'yesterday': 0,
'prev_day': 0,
'last_7': 0,
'prev_7': 0,
'last_30': 0,
'prev_30': 0
}
stats[item][name] = result['count']
def _update_stats_for_na(stats, name, collection, range_start, range_end):
for item, item_stats in stats.items():
if item != "web resource" and item_stats[name] == 0 and not _was_harvest_in_range(range_start, range_end,
collection):
item_stats[name] = "N/A"
def _was_harvest_in_range(range_start, range_end, collection):
# Harvests that have start and end (i.e., completed)
if Harvest.objects.filter(Q(collection=collection)
& Q(date_started__isnull=False)
& Q(date_ended__isnull=False)
& (Q(date_started__range=(range_start, range_end))
| Q(date_ended__range=(range_start, range_end))
| (Q(date_started__lt=range_start) & Q(date_ended__gt=range_end)))
& ~Q(harvest_type='web')).exists():
return True
# Harvests that are still running
# Using status=RUNNING to try to filter out some
if Harvest.objects.filter(Q(collection=collection)
& Q(status=Harvest.RUNNING)
& Q(date_started__isnull=False)
& Q(date_ended__isnull=True)
& Q(date_started__range=(range_start, range_end))
& ~Q(harvest_type='web')).exists():
return True
return False
def _create_url(path):
return get_site_url() + path
def is_shared():
if settings.SFM_SHARED_DIR and settings.DATA_THRESHOLD_SHARED:
return True
return False
| mit | e15b25ba2010c9a94f16ccbaad0b442b | 39.625304 | 146 | 0.597892 | 3.78188 | false | false | false | false |
gwu-libraries/sfm-ui | sfm/ui/management/commands/resendwarccreatedmsgs.py | 2 | 2500 | from django.core.management.base import BaseCommand, CommandError
from ui.models import Warc
from ui.rabbit import RabbitWorker
import json
class Command(BaseCommand):
help = 'Resends warc_created messages to the messaging service.'
def add_arguments(self, parser):
parser.add_argument("routing_key", help="The name of the routing key. May not be warc_created.")
parser.add_argument("--collection-set", help="Limit to collection set with this collection set id.")
parser.add_argument("--collection", help="Limit to collection with this collection id.")
parser.add_argument("--harvest-type", help="Limit to this harvest_type.")
parser.add_argument("--test", action="store_true", help="Print out the messages instead of sending")
def handle(self, *args, **options):
if options["routing_key"] == "warc_created":
raise CommandError("Cannot send messages to warc_created since they may have unintended consequeunces.")
warcs = Warc.objects.all()
if options["collection_set"]:
warcs = warcs.filter(harvest__collection__collection_set__collection_set_id=options["collection_set"])
if options["collection"]:
warcs = warcs.filter(harvest__collection__collection_id=options["collection"])
if options["harvest_type"]:
warcs = warcs.filter(harvest__harvest_type=options["harvest_type"])
if options["test"]:
for msg in self.message_generator(warcs):
self.stdout.write(json.dumps(msg))
else:
RabbitWorker().send_messages(self.message_generator(warcs), options["routing_key"])
self.stdout.write("Messages sent")
@staticmethod
def message_generator(warcs):
for warc in warcs:
yield {
"warc": {
"path": warc.path,
"sha1": warc.sha1,
"bytes": warc.bytes,
"id": warc.warc_id,
"date_created": warc.date_created.isoformat()
},
"collection_set": {
"id": warc.harvest.collection.collection_set.collection_set_id
},
"collection": {
"id": warc.harvest.collection.collection_id
},
"harvest": {
"id": warc.harvest.harvest_id,
"type": warc.harvest.harvest_type
}
}
| mit | 901c97e96758598ee54e9beaa247d13d | 42.859649 | 116 | 0.5832 | 4.295533 | false | false | false | false |
aio-libs/aioredis | aioredis/connection.py | 1 | 59561 | import asyncio
import errno
import inspect
import io
import os
import socket
import ssl
import threading
import time
import warnings
from distutils.version import StrictVersion
from itertools import chain
from typing import (
Any,
Iterable,
List,
Mapping,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from urllib.parse import ParseResult, parse_qs, unquote, urlparse
import async_timeout
from .compat import Protocol, TypedDict
from .exceptions import (
AuthenticationError,
AuthenticationWrongNumberOfArgsError,
BusyLoadingError,
ChildDeadlockedError,
ConnectionError,
DataError,
ExecAbortError,
InvalidResponse,
ModuleError,
NoPermissionError,
NoScriptError,
ReadOnlyError,
RedisError,
ResponseError,
TimeoutError,
)
from .utils import str_if_bytes
NONBLOCKING_EXCEPTION_ERROR_NUMBERS = {
BlockingIOError: errno.EWOULDBLOCK,
ssl.SSLWantReadError: 2,
ssl.SSLWantWriteError: 2,
ssl.SSLError: 2,
}
NONBLOCKING_EXCEPTIONS = tuple(NONBLOCKING_EXCEPTION_ERROR_NUMBERS.keys())
try:
import hiredis
except (ImportError, ModuleNotFoundError):
HIREDIS_AVAILABLE = False
else:
HIREDIS_AVAILABLE = True
hiredis_version = StrictVersion(hiredis.__version__)
if hiredis_version < StrictVersion("1.0.0"):
warnings.warn(
"aioredis supports hiredis @ 1.0.0 or higher. "
f"You have hiredis @ {hiredis.__version__}. "
"Pure-python parser will be used instead."
)
HIREDIS_AVAILABLE = False
SYM_STAR = b"*"
SYM_DOLLAR = b"$"
SYM_CRLF = b"\r\n"
SYM_LF = b"\n"
SYM_EMPTY = b""
SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server."
SENTINEL = object()
MODULE_LOAD_ERROR = "Error loading the extension. Please check the server logs."
NO_SUCH_MODULE_ERROR = "Error unloading module: no such module with that name"
MODULE_UNLOAD_NOT_POSSIBLE_ERROR = "Error unloading module: operation not possible."
MODULE_EXPORTS_DATA_TYPES_ERROR = (
"Error unloading module: the module "
"exports one or more module-side data "
"types, can't unload"
)
EncodedT = Union[bytes, memoryview]
DecodedT = Union[str, int, float]
EncodableT = Union[EncodedT, DecodedT, None]
class Encoder:
"""Encode strings to bytes-like and decode bytes-like to strings"""
__slots__ = "encoding", "encoding_errors", "decode_responses"
def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool):
self.encoding = encoding
self.encoding_errors = encoding_errors
self.decode_responses = decode_responses
def encode(self, value: EncodableT) -> EncodedT:
"""Return a bytestring or bytes-like representation of the value"""
if isinstance(value, (bytes, memoryview)):
return value
if isinstance(value, bool):
# special case bool since it is a subclass of int
raise DataError(
"Invalid input of type: 'bool'. "
"Convert to a bytes, string, int or float first."
)
if isinstance(value, (int, float)):
return repr(value).encode()
if not isinstance(value, str):
# a value we don't know how to deal with. throw an error
typename = value.__class__.__name__
raise DataError(
f"Invalid input of type: {typename!r}. "
"Convert to a bytes, string, int or float first."
)
if isinstance(value, str):
return value.encode(self.encoding, self.encoding_errors)
return value
def decode(self, value: EncodableT, force=False) -> EncodableT:
"""Return a unicode string from the bytes-like representation"""
if self.decode_responses or force:
if isinstance(value, memoryview):
return value.tobytes().decode(self.encoding, self.encoding_errors)
if isinstance(value, bytes):
return value.decode(self.encoding, self.encoding_errors)
return value
ExceptionMappingT = Mapping[str, Union[Type[Exception], Mapping[str, Type[Exception]]]]
class BaseParser:
"""Plain Python parsing class"""
__slots__ = "_stream", "_buffer", "_read_size"
EXCEPTION_CLASSES: ExceptionMappingT = {
"ERR": {
"max number of clients reached": ConnectionError,
"Client sent AUTH, but no password is set": AuthenticationError,
"invalid password": AuthenticationError,
# some Redis server versions report invalid command syntax
# in lowercase
"wrong number of arguments for 'auth' command": AuthenticationWrongNumberOfArgsError,
# some Redis server versions report invalid command syntax
# in uppercase
"wrong number of arguments for 'AUTH' command": AuthenticationWrongNumberOfArgsError,
MODULE_LOAD_ERROR: ModuleError,
MODULE_EXPORTS_DATA_TYPES_ERROR: ModuleError,
NO_SUCH_MODULE_ERROR: ModuleError,
MODULE_UNLOAD_NOT_POSSIBLE_ERROR: ModuleError,
},
"EXECABORT": ExecAbortError,
"LOADING": BusyLoadingError,
"NOSCRIPT": NoScriptError,
"READONLY": ReadOnlyError,
"NOAUTH": AuthenticationError,
"NOPERM": NoPermissionError,
}
def __init__(self, socket_read_size: int):
self._stream: Optional[asyncio.StreamReader] = None
self._buffer: Optional[SocketBuffer] = None
self._read_size = socket_read_size
def __del__(self):
try:
self.on_disconnect()
except Exception:
pass
def parse_error(self, response: str) -> ResponseError:
"""Parse an error response"""
error_code = response.split(" ")[0]
if error_code in self.EXCEPTION_CLASSES:
response = response[len(error_code) + 1 :]
exception_class = self.EXCEPTION_CLASSES[error_code]
if isinstance(exception_class, dict):
exception_class = exception_class.get(response, ResponseError)
return exception_class(response)
return ResponseError(response)
def on_disconnect(self):
raise NotImplementedError()
def on_connect(self, connection: "Connection"):
raise NotImplementedError()
async def can_read(self, timeout: float) -> bool:
raise NotImplementedError()
async def read_response(self) -> Union[EncodableT, ResponseError, None]:
raise NotImplementedError()
class SocketBuffer:
"""Async-friendly re-impl of redis-py's SocketBuffer.
TODO: We're currently passing through two buffers,
the asyncio.StreamReader and this. I imagine we can reduce the layers here
while maintaining compliance with prior art.
"""
def __init__(
self,
stream_reader: asyncio.StreamReader,
socket_read_size: int,
socket_timeout: float,
):
self._stream = stream_reader
self.socket_read_size = socket_read_size
self.socket_timeout = socket_timeout
self._buffer = io.BytesIO()
# number of bytes written to the buffer from the socket
self.bytes_written = 0
# number of bytes read from the buffer
self.bytes_read = 0
@property
def length(self):
return self.bytes_written - self.bytes_read
async def _read_from_socket(
self,
length: int = None,
timeout: Optional[float] = SENTINEL, # type: ignore
raise_on_timeout: bool = True,
) -> bool:
buf = self._buffer
buf.seek(self.bytes_written)
marker = 0
timeout = timeout if timeout is not SENTINEL else self.socket_timeout
try:
while True:
async with async_timeout.timeout(timeout):
data = await self._stream.read(self.socket_read_size)
# an empty string indicates the server shutdown the socket
if isinstance(data, bytes) and len(data) == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
buf.write(data)
data_length = len(data)
self.bytes_written += data_length
marker += data_length
if length is not None and length > marker:
continue
return True
except (socket.timeout, asyncio.TimeoutError):
if raise_on_timeout:
raise TimeoutError("Timeout reading from socket")
return False
except NONBLOCKING_EXCEPTIONS as ex:
# if we're in nonblocking mode and the recv raises a
# blocking error, simply return False indicating that
# there's no data to be read. otherwise raise the
# original exception.
allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1)
if not raise_on_timeout and ex.errno == allowed:
return False
raise ConnectionError(f"Error while reading from socket: {ex.args}")
async def can_read(self, timeout: float) -> bool:
return bool(self.length) or await self._read_from_socket(
timeout=timeout, raise_on_timeout=False
)
async def read(self, length: int) -> bytes:
length = length + 2 # make sure to read the \r\n terminator
# make sure we've read enough data from the socket
if length > self.length:
await self._read_from_socket(length - self.length)
self._buffer.seek(self.bytes_read)
data = self._buffer.read(length)
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
async def readline(self) -> bytes:
buf = self._buffer
buf.seek(self.bytes_read)
data = buf.readline()
while not data.endswith(SYM_CRLF):
# there's more data in the socket that we need
await self._read_from_socket()
buf.seek(self.bytes_read)
data = buf.readline()
self.bytes_read += len(data)
# purge the buffer when we've consumed it all so it doesn't
# grow forever
if self.bytes_read == self.bytes_written:
self.purge()
return data[:-2]
def purge(self):
self._buffer.seek(0)
self._buffer.truncate()
self.bytes_written = 0
self.bytes_read = 0
def close(self):
try:
self.purge()
self._buffer.close()
except Exception:
# issue #633 suggests the purge/close somehow raised a
# BadFileDescriptor error. Perhaps the client ran out of
# memory or something else? It's probably OK to ignore
# any error being raised from purge/close since we're
# removing the reference to the instance below.
pass
self._buffer = None
self._stream = None
class PythonParser(BaseParser):
"""Plain Python parsing class"""
__slots__ = BaseParser.__slots__ + ("encoder",)
def __init__(self, socket_read_size: int):
super().__init__(socket_read_size)
self.encoder: Optional[Encoder] = None
def on_connect(self, connection: "Connection"):
"""Called when the stream connects"""
self._stream = connection._reader
self._buffer = SocketBuffer(
self._stream, self._read_size, connection.socket_timeout
)
self.encoder = connection.encoder
def on_disconnect(self):
"""Called when the stream disconnects"""
if self._stream is not None:
self._stream = None
if self._buffer is not None:
self._buffer.close()
self._buffer = None
self.encoder = None
async def can_read(self, timeout: float):
return self._buffer and bool(await self._buffer.can_read(timeout))
async def read_response(self) -> Union[EncodableT, ResponseError, None]:
if not self._buffer:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
raw = await self._buffer.readline()
if not raw:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
response: Any
byte, response = raw[:1], raw[1:]
if byte not in (b"-", b"+", b":", b"$", b"*"):
raise InvalidResponse(f"Protocol Error: {raw!r}")
# server returned an error
if byte == b"-":
response = response.decode("utf-8", errors="replace")
error = self.parse_error(response)
# if the error is a ConnectionError, raise immediately so the user
# is notified
if isinstance(error, ConnectionError):
raise error
# otherwise, we're dealing with a ResponseError that might belong
# inside a pipeline response. the connection's read_response()
# and/or the pipeline's execute() will raise this error if
# necessary, so just return the exception instance here.
return error
# single value
elif byte == b"+":
pass
# int value
elif byte == b":":
response = int(response)
# bulk response
elif byte == b"$":
length = int(response)
if length == -1:
return None
response = await self._buffer.read(length)
# multi-bulk response
elif byte == b"*":
length = int(response)
if length == -1:
return None
response = [(await self.read_response()) for i in range(length)]
if isinstance(response, bytes):
response = self.encoder.decode(response)
return response
class HiredisParser(BaseParser):
"""Parser class for connections using Hiredis"""
__slots__ = BaseParser.__slots__ + ("_next_response", "_reader", "_socket_timeout")
def __init__(self, socket_read_size: int):
if not HIREDIS_AVAILABLE:
raise RedisError("Hiredis is not available.")
super().__init__(socket_read_size=socket_read_size)
self._next_response = ...
self._reader: Optional[hiredis.Reader] = None
self._socket_timeout: Optional[float] = None
def on_connect(self, connection: "Connection"):
self._stream = connection._reader
kwargs = {
"protocolError": InvalidResponse,
"replyError": self.parse_error,
}
if connection.encoder.decode_responses:
kwargs.update(
encoding=connection.encoder.encoding,
errors=connection.encoder.encoding_errors,
)
self._reader = hiredis.Reader(**kwargs)
self._next_response = False
self._socket_timeout = connection.socket_timeout
def on_disconnect(self):
self._stream = None
self._reader = None
self._next_response = False
async def can_read(self, timeout: float):
if not self._reader:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
if self._next_response is False:
self._next_response = self._reader.gets()
if self._next_response is False:
return await self.read_from_socket(timeout=timeout, raise_on_timeout=False)
return True
async def read_from_socket(
self, timeout: Optional[float] = SENTINEL, raise_on_timeout: bool = True
):
timeout = self._socket_timeout if timeout is SENTINEL else timeout
try:
async with async_timeout.timeout(timeout):
buffer = await self._stream.read(self._read_size)
if not isinstance(buffer, bytes) or len(buffer) == 0:
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None
self._reader.feed(buffer)
# data was read from the socket and added to the buffer.
# return True to indicate that data was read.
return True
except asyncio.CancelledError:
raise
except (socket.timeout, asyncio.TimeoutError):
if raise_on_timeout:
raise TimeoutError("Timeout reading from socket") from None
return False
except NONBLOCKING_EXCEPTIONS as ex:
# if we're in nonblocking mode and the recv raises a
# blocking error, simply return False indicating that
# there's no data to be read. otherwise raise the
# original exception.
allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1)
if not raise_on_timeout and ex.errno == allowed:
return False
raise ConnectionError(f"Error while reading from socket: {ex.args}")
async def read_response(self) -> EncodableT:
if not self._stream or not self._reader:
self.on_disconnect()
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None
# _next_response might be cached from a can_read() call
if self._next_response is not False:
response = self._next_response
self._next_response = False
return response
response = self._reader.gets()
while response is False:
await self.read_from_socket()
response = self._reader.gets()
# if the response is a ConnectionError or the response is a list and
# the first item is a ConnectionError, raise it as something bad
# happened
if isinstance(response, ConnectionError):
raise response
elif (
isinstance(response, list)
and response
and isinstance(response[0], ConnectionError)
):
raise response[0]
return response
DefaultParser: Type[Union[PythonParser, HiredisParser]]
if HIREDIS_AVAILABLE:
DefaultParser = HiredisParser
else:
DefaultParser = PythonParser
class ConnectCallbackProtocol(Protocol):
def __call__(self, connection: "Connection"):
...
class AsyncConnectCallbackProtocol(Protocol):
async def __call__(self, connection: "Connection"):
...
ConnectCallbackT = Union[ConnectCallbackProtocol, AsyncConnectCallbackProtocol]
class Connection:
"""Manages TCP communication to and from a Redis server"""
__slots__ = (
"pid",
"host",
"port",
"db",
"username",
"client_name",
"password",
"socket_timeout",
"socket_connect_timeout",
"socket_keepalive",
"socket_keepalive_options",
"socket_type",
"retry_on_timeout",
"health_check_interval",
"next_health_check",
"last_active_at",
"encoder",
"ssl_context",
"_reader",
"_writer",
"_parser",
"_connect_callbacks",
"_buffer_cutoff",
"_loop",
"__dict__",
)
def __init__(
self,
*,
host: str = "localhost",
port: Union[str, int] = 6379,
db: Union[str, int] = 0,
password: str = None,
socket_timeout: float = None,
socket_connect_timeout: float = None,
socket_keepalive: bool = False,
socket_keepalive_options: dict = None,
socket_type: int = 0,
retry_on_timeout: bool = False,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
parser_class: Type[BaseParser] = DefaultParser,
socket_read_size: int = 65536,
health_check_interval: int = 0,
client_name: str = None,
username: str = None,
encoder_class: Type[Encoder] = Encoder,
loop: asyncio.AbstractEventLoop = None,
):
self.pid = os.getpid()
self.host = host
self.port = int(port)
self.db = db
self.username = username
self.client_name = client_name
self.password = password
self.socket_timeout = socket_timeout
self.socket_connect_timeout = socket_connect_timeout or socket_timeout or None
self.socket_keepalive = socket_keepalive
self.socket_keepalive_options = socket_keepalive_options or {}
self.socket_type = socket_type
self.retry_on_timeout = retry_on_timeout
self.health_check_interval = health_check_interval
self.next_health_check = 0
self.ssl_context: Optional[RedisSSLContext] = None
self.encoder = encoder_class(encoding, encoding_errors, decode_responses)
self._reader: Optional[asyncio.StreamReader] = None
self._writer: Optional[asyncio.StreamWriter] = None
self._parser = parser_class(
socket_read_size=socket_read_size,
)
self._connect_callbacks: List[ConnectCallbackT] = []
self._buffer_cutoff = 6000
self._loop = loop
def __repr__(self):
repr_args = ",".join((f"{k}={v}" for k, v in self.repr_pieces()))
return f"{self.__class__.__name__}<{repr_args}>"
def repr_pieces(self):
pieces = [("host", self.host), ("port", self.port), ("db", self.db)]
if self.client_name:
pieces.append(("client_name", self.client_name))
return pieces
def __del__(self):
try:
if self.is_connected:
loop = self._loop or asyncio.get_event_loop()
coro = self.disconnect()
if loop.is_running():
loop.create_task(coro)
else:
loop.run_until_complete(self.disconnect())
except Exception:
pass
@property
def is_connected(self):
return bool(self._reader and self._writer)
def register_connect_callback(self, callback):
self._connect_callbacks.append(callback)
def clear_connect_callbacks(self):
self._connect_callbacks = []
async def connect(self):
"""Connects to the Redis server if not already connected"""
if self.is_connected:
return
try:
await self._connect()
except asyncio.CancelledError:
raise
except (socket.timeout, asyncio.TimeoutError):
raise TimeoutError("Timeout connecting to server")
except OSError as e:
raise ConnectionError(self._error_message(e))
except Exception as exc:
raise ConnectionError(exc) from exc
try:
await self.on_connect()
except RedisError:
# clean up after any error in on_connect
await self.disconnect()
raise
# run any user callbacks. right now the only internal callback
# is for pubsub channel/pattern resubscription
for callback in self._connect_callbacks:
task = callback(self)
if task and inspect.isawaitable(task):
await task
async def _connect(self):
"""Create a TCP socket connection"""
with async_timeout.timeout(self.socket_connect_timeout):
reader, writer = await asyncio.open_connection(
host=self.host, port=self.port, ssl=self.ssl_context, loop=self._loop
)
self._reader = reader
self._writer = writer
sock = writer.transport.get_extra_info("socket")
if sock is not None:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try:
# TCP_KEEPALIVE
if self.socket_keepalive:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
for k, v in self.socket_keepalive_options.items():
sock.setsockopt(socket.SOL_TCP, k, v)
# set the socket_timeout now that we're connected
if self.socket_timeout is not None:
sock.settimeout(self.socket_timeout)
except (OSError, TypeError):
# `socket_keepalive_options` might contain invalid options
# causing an error. Do not leave the connection open.
writer.close()
raise
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
# or just "message"
if len(exception.args) == 1:
return f"Error connecting to {self.host}:{self.port}. {exception.args[0]}."
else:
return (
f"Error {exception.args[0]} connecting to {self.host}:{self.port}. "
f"{exception.args[0]}."
)
async def on_connect(self):
"""Initialize the connection, authenticate and select a database"""
self._parser.on_connect(self)
# if username and/or password are set, authenticate
if self.username or self.password:
if self.username:
auth_args = (self.username, self.password or "")
else:
auth_args = (self.password,)
# avoid checking health here -- PING will fail if we try
# to check the health prior to the AUTH
await self.send_command("AUTH", *auth_args, check_health=False)
try:
auth_response = await self.read_response()
except AuthenticationWrongNumberOfArgsError:
# a username and password were specified but the Redis
# server seems to be < 6.0.0 which expects a single password
# arg. retry auth with just the password.
# https://github.com/andymccurdy/redis-py/issues/1274
await self.send_command("AUTH", self.password, check_health=False)
auth_response = await self.read_response()
if str_if_bytes(auth_response) != "OK":
raise AuthenticationError("Invalid Username or Password")
# if a client_name is given, set it
if self.client_name:
await self.send_command("CLIENT", "SETNAME", self.client_name)
if str_if_bytes(self.read_response()) != "OK":
raise ConnectionError("Error setting client name")
# if a database is specified, switch to it
if self.db:
await self.send_command("SELECT", self.db)
if str_if_bytes(await self.read_response()) != "OK":
raise ConnectionError("Invalid Database")
async def disconnect(self):
"""Disconnects from the Redis server"""
try:
async with async_timeout.timeout(self.socket_connect_timeout):
self._parser.on_disconnect()
if not self.is_connected:
return
try:
if os.getpid() == self.pid:
self._writer.close()
# py3.6 doesn't have this method
if hasattr(self._writer, "wait_closed"):
await self._writer.wait_closed()
except OSError:
pass
self._reader = None
self._writer = None
except asyncio.TimeoutError:
raise TimeoutError(
f"Timed out closing connection after {self.socket_connect_timeout}"
) from None
async def check_health(self):
"""Check the health of the connection with a PING/PONG"""
if self.health_check_interval and time.time() > self.next_health_check:
try:
await self.send_command("PING", check_health=False)
if str_if_bytes(await self.read_response()) != "PONG":
raise ConnectionError("Bad response from PING health check")
except (ConnectionError, TimeoutError) as err:
await self.disconnect()
try:
await self.send_command("PING", check_health=False)
if str_if_bytes(await self.read_response()) != "PONG":
raise ConnectionError(
"Bad response from PING health check"
) from None
except BaseException as err2:
raise err2 from err
async def send_packed_command(
self,
command: Union[bytes, str, Iterable[Union[bytes, str]]],
check_health: bool = True,
):
"""Send an already packed command to the Redis server"""
if not self._writer:
await self.connect()
# guard against health check recursion
if check_health:
await self.check_health()
try:
if isinstance(command, str):
command = command.encode()
if isinstance(command, bytes):
command = [command]
self._writer.writelines(command)
await self._writer.drain()
except asyncio.TimeoutError:
await self.disconnect()
raise TimeoutError("Timeout writing to socket") from None
except OSError as e:
await self.disconnect()
if len(e.args) == 1:
errno, errmsg = "UNKNOWN", e.args[0]
else:
errno = e.args[0]
errmsg = e.args[1]
raise ConnectionError(
f"Error {errno} while writing to socket. {errmsg}."
) from e
except BaseException:
await self.disconnect()
raise
async def send_command(self, *args, **kwargs):
"""Pack and send a command to the Redis server"""
if not self.is_connected:
await self.connect()
await self.send_packed_command(
self.pack_command(*args), check_health=kwargs.get("check_health", True)
)
async def can_read(self, timeout: float = 0):
"""Poll the socket to see if there's data that can be read."""
if not self.is_connected:
await self.connect()
return await self._parser.can_read(timeout)
async def read_response(self):
"""Read the response from a previously sent command"""
try:
with async_timeout.timeout(self.socket_timeout):
response = await self._parser.read_response()
except asyncio.TimeoutError:
await self.disconnect()
raise TimeoutError(f"Timeout reading from {self.host}:{self.port}")
except BaseException:
await self.disconnect()
raise
if self.health_check_interval:
self.next_health_check = time.time() + self.health_check_interval
if isinstance(response, ResponseError):
raise response from None
return response
def pack_command(self, *args: EncodableT) -> List[bytes]:
"""Pack a series of arguments into the Redis protocol"""
output = []
# the client might have included 1 or more literal arguments in
# the command name, e.g., 'CONFIG GET'. The Redis server expects these
# arguments to be sent separately, so split the first argument
# manually. These arguments should be bytestrings so that they are
# not encoded.
if isinstance(args[0], str):
args = tuple(args[0].encode().split()) + args[1:]
elif b" " in args[0]:
args = tuple(args[0].split()) + args[1:]
buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
buffer_cutoff = self._buffer_cutoff
for arg in map(self.encoder.encode, args):
# to avoid large string mallocs, chunk the command into the
# output list if we're sending large values or memoryviews
arg_length = len(arg)
if (
len(buff) > buffer_cutoff
or arg_length > buffer_cutoff
or isinstance(arg, memoryview)
):
buff = SYM_EMPTY.join(
(buff, SYM_DOLLAR, str(arg_length).encode(), SYM_CRLF)
)
output.append(buff)
output.append(arg)
buff = SYM_CRLF
else:
buff = SYM_EMPTY.join(
(
buff,
SYM_DOLLAR,
str(arg_length).encode(),
SYM_CRLF,
arg,
SYM_CRLF,
)
)
output.append(buff)
return output
def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> List[bytes]:
"""Pack multiple commands into the Redis protocol"""
output: List[bytes] = []
pieces: List[bytes] = []
buffer_length = 0
buffer_cutoff = self._buffer_cutoff
for cmd in commands:
for chunk in self.pack_command(*cmd):
chunklen = len(chunk)
if (
buffer_length > buffer_cutoff
or chunklen > buffer_cutoff
or isinstance(chunk, memoryview)
):
output.append(SYM_EMPTY.join(pieces))
buffer_length = 0
pieces = []
if chunklen > buffer_cutoff or isinstance(chunk, memoryview):
output.append(chunk)
else:
pieces.append(chunk)
buffer_length += chunklen
if pieces:
output.append(SYM_EMPTY.join(pieces))
return output
class SSLConnection(Connection):
def __init__(
self,
ssl_keyfile: str = None,
ssl_certfile: str = None,
ssl_cert_reqs: str = "required",
ssl_ca_certs: str = None,
ssl_check_hostname: bool = False,
**kwargs,
):
super().__init__(**kwargs)
self.ssl_context = RedisSSLContext(
keyfile=ssl_keyfile,
certfile=ssl_certfile,
cert_reqs=ssl_cert_reqs,
ca_certs=ssl_ca_certs,
check_hostname=ssl_check_hostname,
)
@property
def keyfile(self):
return self.ssl_context.keyfile
@property
def certfile(self):
return self.ssl_context.certfile
@property
def cert_reqs(self):
return self.ssl_context.cert_reqs
@property
def ca_certs(self):
return self.ssl_context.ca_certs
@property
def check_hostname(self):
return self.ssl_context.check_hostname
class RedisSSLContext:
__slots__ = (
"keyfile",
"certfile",
"cert_reqs",
"ca_certs",
"context",
"check_hostname",
)
def __init__(
self,
keyfile: str = None,
certfile: str = None,
cert_reqs: str = None,
ca_certs: str = None,
check_hostname: bool = False,
):
self.keyfile = keyfile
self.certfile = certfile
if cert_reqs is None:
self.cert_reqs = ssl.CERT_NONE
elif isinstance(cert_reqs, str):
CERT_REQS = {
"none": ssl.CERT_NONE,
"optional": ssl.CERT_OPTIONAL,
"required": ssl.CERT_REQUIRED,
}
if cert_reqs not in CERT_REQS:
raise RedisError(
"Invalid SSL Certificate Requirements Flag: %s" % cert_reqs
)
self.cert_reqs = CERT_REQS[cert_reqs]
self.ca_certs = ca_certs
self.check_hostname = check_hostname
self.context = None
def get(self) -> ssl.SSLContext:
if not self.context:
context = ssl.create_default_context()
context.check_hostname = self.check_hostname
context.verify_mode = self.cert_reqs
if self.certfile and self.keyfile:
context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile)
if self.ca_certs:
context.load_verify_locations(self.ca_certs)
self.context = context
return self.context
class UnixDomainSocketConnection(Connection): # lgtm [py/missing-call-to-init]
def __init__(
self,
*,
path: str = "",
db: Union[str, int] = 0,
username: str = None,
password: str = None,
socket_timeout: float = None,
encoding: str = "utf-8",
encoding_errors: str = "strict",
decode_responses: bool = False,
retry_on_timeout: bool = False,
parser_class: Type[BaseParser] = DefaultParser,
socket_read_size: int = 65536,
health_check_interval: float = 0.0,
client_name=None,
loop: asyncio.AbstractEventLoop = None,
):
self.pid = os.getpid()
self.path = path
self.db = db
self.username = username
self.client_name = client_name
self.password = password
self.socket_timeout = socket_timeout
self.retry_on_timeout = retry_on_timeout
self.health_check_interval = health_check_interval
self.next_health_check = 0
self.encoder = Encoder(encoding, encoding_errors, decode_responses)
self._sock = None
self._parser = parser_class(socket_read_size=socket_read_size)
self._connect_callbacks = []
self._buffer_cutoff = 6000
self._loop = loop
def repr_pieces(self) -> Iterable[Tuple[str, Union[str, int]]]:
pieces = [
("path", self.path),
("db", self.db),
]
if self.client_name:
pieces.append(("client_name", self.client_name))
return pieces
async def _connect(self):
with async_timeout.timeout(self._connect_timeout):
reader, writer = await asyncio.open_unix_connection(path=self.path)
self._reader = reader
self._writer = writer
await self.on_connect()
def _error_message(self, exception):
# args for socket.error can either be (errno, "message")
# or just "message"
if len(exception.args) == 1:
return f"Error connecting to unix socket: {self.path}. {exception.args[0]}."
else:
return (
f"Error {exception.args[0]} connecting to unix socket: "
f"{self.path}. {exception.args[1]}."
)
FALSE_STRINGS = ("0", "F", "FALSE", "N", "NO")
def to_bool(value) -> bool:
if value is None or value == "":
return None
if isinstance(value, str) and value.upper() in FALSE_STRINGS:
return False
return bool(value)
URL_QUERY_ARGUMENT_PARSERS = {
"db": int,
"socket_timeout": float,
"socket_connect_timeout": float,
"socket_keepalive": to_bool,
"retry_on_timeout": to_bool,
"max_connections": int,
"health_check_interval": int,
"ssl_check_hostname": to_bool,
}
class ConnectKwargs(TypedDict, total=False):
username: str
password: str
connection_class: Type[Connection]
host: str
port: int
db: int
def parse_url(url: str) -> ConnectKwargs:
parsed: ParseResult = urlparse(url)
kwargs: ConnectKwargs = {}
for name, value in parse_qs(parsed.query).items():
if value and len(value) > 0:
value = unquote(value[0])
parser = URL_QUERY_ARGUMENT_PARSERS.get(name)
if parser:
try:
kwargs[name] = parser(value)
except (TypeError, ValueError):
raise ValueError("Invalid value for `%s` in connection URL." % name)
else:
kwargs[name] = value
if parsed.username:
kwargs["username"] = unquote(parsed.username)
if parsed.password:
kwargs["password"] = unquote(parsed.password)
# We only support redis://, rediss:// and unix:// schemes.
if parsed.scheme == "unix":
if parsed.path:
kwargs["path"] = unquote(parsed.path)
kwargs["connection_class"] = UnixDomainSocketConnection
elif parsed.scheme in ("redis", "rediss"):
if parsed.hostname:
kwargs["host"] = unquote(parsed.hostname)
if parsed.port:
kwargs["port"] = int(parsed.port)
# If there's a path argument, use it as the db argument if a
# querystring value wasn't specified
if parsed.path and "db" not in kwargs:
try:
kwargs["db"] = int(unquote(parsed.path).replace("/", ""))
except (AttributeError, ValueError):
pass
if parsed.scheme == "rediss":
kwargs["connection_class"] = SSLConnection
else:
valid_schemes = "redis://, rediss://, unix://"
raise ValueError(
"Redis URL must specify one of the following "
"schemes (%s)" % valid_schemes
)
return kwargs
_CP = TypeVar("_CP")
class ConnectionPool:
"""
Create a connection pool. ``If max_connections`` is set, then this
object raises :py:class:`~redis.ConnectionError` when the pool's
limit is reached.
By default, TCP connections are created unless ``connection_class``
is specified. Use :py:class:`~redis.UnixDomainSocketConnection` for
unix sockets.
Any additional keyword arguments are passed to the constructor of
``connection_class``.
"""
@classmethod
def from_url(cls: Type[_CP], url: str, **kwargs) -> _CP:
"""
Return a connection pool configured from the given URL.
For example::
redis://[[username]:[password]]@localhost:6379/0
rediss://[[username]:[password]]@localhost:6379/0
unix://[[username]:[password]]@/path/to/socket.sock?db=0
Three URL schemes are supported:
- `redis://` creates a TCP socket connection. See more at:
<https://www.iana.org/assignments/uri-schemes/prov/redis>
- `rediss://` creates a SSL wrapped TCP socket connection. See more at:
<https://www.iana.org/assignments/uri-schemes/prov/rediss>
- ``unix://``: creates a Unix Domain Socket connection.
The username, password, hostname, path and all querystring values
are passed through urllib.parse.unquote in order to replace any
percent-encoded values with their corresponding characters.
There are several ways to specify a database number. The first value
found will be used:
1. A ``db`` querystring option, e.g. redis://localhost?db=0
2. If using the redis:// or rediss:// schemes, the path argument
of the url, e.g. redis://localhost/0
3. A ``db`` keyword argument to this function.
If none of these options are specified, the default db=0 is used.
All querystring options are cast to their appropriate Python types.
Boolean arguments can be specified with string values "True"/"False"
or "Yes"/"No". Values that cannot be properly cast cause a
``ValueError`` to be raised. Once parsed, the querystring arguments
and keyword arguments are passed to the ``ConnectionPool``'s
class initializer. In the case of conflicting arguments, querystring
arguments always win.
"""
url_options = parse_url(url)
kwargs.update(url_options)
return cls(**kwargs)
def __init__(
self,
connection_class: Type[Connection] = Connection,
max_connections: int = None,
**connection_kwargs,
):
max_connections = max_connections or 2 ** 31
if not isinstance(max_connections, int) or max_connections < 0:
raise ValueError('"max_connections" must be a positive integer')
self.connection_class = connection_class
self.connection_kwargs = connection_kwargs
self.max_connections = max_connections
# a lock to protect the critical section in _checkpid().
# this lock is acquired when the process id changes, such as
# after a fork. during this time, multiple threads in the child
# process could attempt to acquire this lock. the first thread
# to acquire the lock will reset the data structures and lock
# object of this pool. subsequent threads acquiring this lock
# will notice the first thread already did the work and simply
# release the lock.
self._fork_lock = threading.Lock()
self._lock: asyncio.Lock
self._created_connections: int
self._available_connections: List[Connection]
self._in_use_connections: Set[Connection]
self.reset() # lgtm [py/init-calls-subclass]
self.loop = self.connection_kwargs.get("loop")
self.encoder_class = self.connection_kwargs.get("encoder_class", Encoder)
def __repr__(self):
return (
f"{self.__class__.__name__}"
f"<{self.connection_class(**self.connection_kwargs)!r}>"
)
def reset(self):
self._lock = asyncio.Lock()
self._created_connections = 0
self._available_connections = []
self._in_use_connections = set()
# this must be the last operation in this method. while reset() is
# called when holding _fork_lock, other threads in this process
# can call _checkpid() which compares self.pid and os.getpid() without
# holding any lock (for performance reasons). keeping this assignment
# as the last operation ensures that those other threads will also
# notice a pid difference and block waiting for the first thread to
# release _fork_lock. when each of these threads eventually acquire
# _fork_lock, they will notice that another thread already called
# reset() and they will immediately release _fork_lock and continue on.
self.pid = os.getpid()
def _checkpid(self):
# _checkpid() attempts to keep ConnectionPool fork-safe on modern
# systems. this is called by all ConnectionPool methods that
# manipulate the pool's state such as get_connection() and release().
#
# _checkpid() determines whether the process has forked by comparing
# the current process id to the process id saved on the ConnectionPool
# instance. if these values are the same, _checkpid() simply returns.
#
# when the process ids differ, _checkpid() assumes that the process
# has forked and that we're now running in the child process. the child
# process cannot use the parent's file descriptors (e.g., sockets).
# therefore, when _checkpid() sees the process id change, it calls
# reset() in order to reinitialize the child's ConnectionPool. this
# will cause the child to make all new connection objects.
#
# _checkpid() is protected by self._fork_lock to ensure that multiple
# threads in the child process do not call reset() multiple times.
#
# there is an extremely small chance this could fail in the following
# scenario:
# 1. process A calls _checkpid() for the first time and acquires
# self._fork_lock.
# 2. while holding self._fork_lock, process A forks (the fork()
# could happen in a different thread owned by process A)
# 3. process B (the forked child process) inherits the
# ConnectionPool's state from the parent. that state includes
# a locked _fork_lock. process B will not be notified when
# process A releases the _fork_lock and will thus never be
# able to acquire the _fork_lock.
#
# to mitigate this possible deadlock, _checkpid() will only wait 5
# seconds to acquire _fork_lock. if _fork_lock cannot be acquired in
# that time it is assumed that the child is deadlocked and a
# redis.ChildDeadlockedError error is raised.
if self.pid != os.getpid():
acquired = self._fork_lock.acquire(timeout=5)
if not acquired:
raise ChildDeadlockedError
# reset() the instance for the new process if another thread
# hasn't already done so
try:
if self.pid != os.getpid():
self.reset()
finally:
self._fork_lock.release()
async def get_connection(self, command_name, *keys, **options):
"""Get a connection from the pool"""
self._checkpid()
async with self._lock:
try:
connection = self._available_connections.pop()
except IndexError:
connection = self.make_connection()
self._in_use_connections.add(connection)
try:
# ensure this connection is connected to Redis
await connection.connect()
# connections that the pool provides should be ready to send
# a command. if not, the connection was either returned to the
# pool before all data has been read or the socket has been
# closed. either way, reconnect and verify everything is good.
try:
if await connection.can_read():
raise ConnectionError("Connection has data") from None
except ConnectionError:
await connection.disconnect()
await connection.connect()
if await connection.can_read():
raise ConnectionError("Connection not ready") from None
except BaseException:
# release the connection back to the pool so that we don't
# leak it
await self.release(connection)
raise
return connection
def get_encoder(self):
"""Return an encoder based on encoding settings"""
kwargs = self.connection_kwargs
return self.encoder_class(
encoding=kwargs.get("encoding", "utf-8"),
encoding_errors=kwargs.get("encoding_errors", "strict"),
decode_responses=kwargs.get("decode_responses", False),
)
def make_connection(self):
"""Create a new connection"""
if self._created_connections >= self.max_connections:
raise ConnectionError("Too many connections")
self._created_connections += 1
return self.connection_class(**self.connection_kwargs)
async def release(self, connection: Connection):
"""Releases the connection back to the pool"""
self._checkpid()
async with self._lock:
try:
self._in_use_connections.remove(connection)
except KeyError:
# Gracefully fail when a connection is returned to this pool
# that the pool doesn't actually own
pass
if self.owns_connection(connection):
self._available_connections.append(connection)
else:
# pool doesn't own this connection. do not add it back
# to the pool and decrement the count so that another
# connection can take its place if needed
self._created_connections -= 1
await connection.disconnect()
return
def owns_connection(self, connection: Connection):
return connection.pid == self.pid
async def disconnect(self, inuse_connections: bool = True):
"""
Disconnects connections in the pool
If ``inuse_connections`` is True, disconnect connections that are
current in use, potentially by other tasks. Otherwise only disconnect
connections that are idle in the pool.
"""
self._checkpid()
async with self._lock:
if inuse_connections:
connections = chain(
self._available_connections, self._in_use_connections
)
else:
connections = self._available_connections
resp = await asyncio.gather(
*(connection.disconnect() for connection in connections),
return_exceptions=True,
)
exc = next((r for r in resp if isinstance(r, BaseException)), None)
if exc:
raise exc
class BlockingConnectionPool(ConnectionPool):
"""
Thread-safe blocking connection pool::
>>> from aioredis.client import Redis
>>> client = Redis(connection_pool=BlockingConnectionPool())
It performs the same function as the default
:py:class:`~redis.ConnectionPool` implementation, in that,
it maintains a pool of reusable connections that can be shared by
multiple redis clients (safely across threads if required).
The difference is that, in the event that a client tries to get a
connection from the pool when all of connections are in use, rather than
raising a :py:class:`~redis.ConnectionError` (as the default
:py:class:`~redis.ConnectionPool` implementation does), it
makes the client wait ("blocks") for a specified number of seconds until
a connection becomes available.
Use ``max_connections`` to increase / decrease the pool size::
>>> pool = BlockingConnectionPool(max_connections=10)
Use ``timeout`` to tell it either how many seconds to wait for a connection
to become available, or to block forever:
>>> # Block forever.
>>> pool = BlockingConnectionPool(timeout=None)
>>> # Raise a ``ConnectionError`` after five seconds if a connection is
>>> # not available.
>>> pool = BlockingConnectionPool(timeout=5)
"""
def __init__(
self,
max_connections: int = 50,
timeout: Optional[int] = 20,
connection_class: Type[Connection] = Connection,
queue_class: Type[asyncio.Queue] = asyncio.LifoQueue,
**connection_kwargs,
):
self.queue_class = queue_class
self.timeout = timeout
self._connections: List[Connection]
super().__init__(
connection_class=connection_class,
max_connections=max_connections,
**connection_kwargs,
)
def reset(self):
# Create and fill up a thread safe queue with ``None`` values.
self.pool = self.queue_class(self.max_connections)
while True:
try:
self.pool.put_nowait(None)
except asyncio.QueueFull:
break
# Keep a list of actual connection instances so that we can
# disconnect them later.
self._connections = []
# this must be the last operation in this method. while reset() is
# called when holding _fork_lock, other threads in this process
# can call _checkpid() which compares self.pid and os.getpid() without
# holding any lock (for performance reasons). keeping this assignment
# as the last operation ensures that those other threads will also
# notice a pid difference and block waiting for the first thread to
# release _fork_lock. when each of these threads eventually acquire
# _fork_lock, they will notice that another thread already called
# reset() and they will immediately release _fork_lock and continue on.
self.pid = os.getpid()
def make_connection(self):
"""Make a fresh connection."""
connection = self.connection_class(**self.connection_kwargs)
self._connections.append(connection)
return connection
async def get_connection(self, command_name, *keys, **options):
"""
Get a connection, blocking for ``self.timeout`` until a connection
is available from the pool.
If the connection returned is ``None`` then creates a new connection.
Because we use a last-in first-out queue, the existing connections
(having been returned to the pool after the initial ``None`` values
were added) will be returned before ``None`` values. This means we only
create new connections when we need to, i.e.: the actual number of
connections will only increase in response to demand.
"""
# Make sure we haven't changed process.
self._checkpid()
# Try and get a connection from the pool. If one isn't available within
# self.timeout then raise a ``ConnectionError``.
connection = None
try:
async with async_timeout.timeout(self.timeout):
connection = await self.pool.get()
except (asyncio.QueueEmpty, asyncio.TimeoutError):
# Note that this is not caught by the redis client and will be
# raised unless handled by application code. If you want never to
raise ConnectionError("No connection available.")
# If the ``connection`` is actually ``None`` then that's a cue to make
# a new connection to add to the pool.
if connection is None:
connection = self.make_connection()
try:
# ensure this connection is connected to Redis
await connection.connect()
# connections that the pool provides should be ready to send
# a command. if not, the connection was either returned to the
# pool before all data has been read or the socket has been
# closed. either way, reconnect and verify everything is good.
try:
if await connection.can_read():
raise ConnectionError("Connection has data") from None
except ConnectionError:
await connection.disconnect()
await connection.connect()
if await connection.can_read():
raise ConnectionError("Connection not ready") from None
except BaseException:
# release the connection back to the pool so that we don't leak it
await self.release(connection)
raise
return connection
async def release(self, connection: Connection):
"""Releases the connection back to the pool."""
# Make sure we haven't changed process.
self._checkpid()
if not self.owns_connection(connection):
# pool doesn't own this connection. do not add it back
# to the pool. instead add a None value which is a placeholder
# that will cause the pool to recreate the connection if
# its needed.
await connection.disconnect()
self.pool.put_nowait(None)
return
# Put the connection back into the pool.
try:
self.pool.put_nowait(connection)
except asyncio.QueueFull:
# perhaps the pool has been reset() after a fork? regardless,
# we don't want this connection
pass
async def disconnect(self, inuse_connections: bool = True):
"""Disconnects all connections in the pool."""
self._checkpid()
async with self._lock:
resp = await asyncio.gather(
*(connection.disconnect() for connection in self._connections),
return_exceptions=True,
)
exc = next((r for r in resp if isinstance(r, BaseException)), None)
if exc:
raise exc
| mit | e1bda9df1edfa2e1613c3366429638ef | 36.155958 | 97 | 0.589312 | 4.512539 | false | false | false | false |
aio-libs/aioredis | docs/examples/pipeline.py | 1 | 1759 | import asyncio
import aioredis
async def main():
redis = aioredis.from_url("redis://localhost")
# No pipelining;
async def wait_each_command():
val = await redis.get("foo") # wait until `val` is available
cnt = await redis.incr("bar") # wait until `cnt` is available
return val, cnt
# Sending multiple commands and then gathering results
async def concurrent():
fut1 = redis.get("foo") # issue command and return future
fut2 = redis.incr("bar") # issue command and return future
# block until results are available
val, cnt = await asyncio.gather(fut1, fut2)
return val, cnt
# Explicit pipeline
async def explicit_pipeline():
pipe = redis.pipeline()
pipe.get("foo").incr("bar")
result = await pipe.execute()
return result
async def context_pipeline():
async with redis.pipeline() as pipe:
pipe.get("foo").incr("bar")
result = await pipe.execute()
return result
async def pipeline_transaction():
async with redis.pipeline(transaction=True) as pipe:
pipe.get("foo").incr("bar")
result = await pipe.execute()
return result
def callback(pipe: aioredis.client.Pipeline):
pipe.get("foo").incr("bar")
async def transaction():
return await redis.transaction(callback)
res = await wait_each_command()
print(res)
res = await concurrent()
print(res)
res = await explicit_pipeline()
print(res)
res = await context_pipeline()
print(res)
res = await pipeline_transaction()
print(res)
res = await transaction()
print(res)
if __name__ == "__main__":
asyncio.run(main())
| mit | 61787c724729e1fe45876104867b485d | 26.920635 | 70 | 0.609437 | 4.148585 | false | false | false | false |
indico/indico | indico/web/http_api/handlers.py | 2 | 12034 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
"""
HTTP API - Handlers
"""
import hashlib
import hmac
import posixpath
import re
import time
from urllib.parse import parse_qs, urlencode
from uuid import UUID
import sentry_sdk
from authlib.oauth2 import OAuth2Error
from flask import current_app, g, request, session
from werkzeug.exceptions import BadRequest, NotFound
from indico.core.cache import make_scoped_cache
from indico.core.db import db
from indico.core.logger import Logger
from indico.core.oauth import require_oauth
from indico.modules.api import APIMode, api_settings
from indico.modules.api.models.keys import APIKey
from indico.web.http_api import HTTPAPIHook
from indico.web.http_api.metadata.serializer import Serializer
from indico.web.http_api.responses import HTTPAPIError, HTTPAPIResult, HTTPAPIResultSchema
from indico.web.http_api.util import get_query_parameter
# Remove the extension at the end or before the querystring
RE_REMOVE_EXTENSION = re.compile(r'\.(\w+)(?:$|(?=\?))')
API_CACHE = make_scoped_cache('legacy-http-api')
def normalizeQuery(path, query, remove=('signature',), separate=False):
"""Normalize request path and query so it can be used for caching and signing.
Returns a string consisting of path and sorted query string.
Dynamic arguments like signature and timestamp are removed from the query string.
"""
qparams = parse_qs(query)
sorted_params = []
for key, values in sorted(list(qparams.items()), key=lambda x: x[0].lower()):
key = key.lower()
if key not in remove:
for v in sorted(values):
sorted_params.append((key, v))
if separate:
return path, sorted_params and urlencode(sorted_params)
elif sorted_params:
return f'{path}?{urlencode(sorted_params)}'
else:
return path
def validateSignature(ak, signature, timestamp, path, query):
ttl = api_settings.get('signature_ttl')
if not timestamp and not (ak.is_persistent_allowed and api_settings.get('allow_persistent')):
raise HTTPAPIError('Signature invalid (no timestamp)', 403)
elif timestamp and abs(timestamp - int(time.time())) > ttl:
raise HTTPAPIError('Signature invalid (bad timestamp)', 403)
digest = hmac.new(ak.secret.encode(), normalizeQuery(path, query).encode(), hashlib.sha1).hexdigest()
if signature != digest:
raise HTTPAPIError('Signature invalid', 403)
def checkAK(apiKey, signature, timestamp, path, query):
apiMode = api_settings.get('security_mode')
if not apiKey:
if apiMode in {APIMode.ONLYKEY, APIMode.ONLYKEY_SIGNED, APIMode.ALL_SIGNED}:
raise HTTPAPIError('API key is missing', 403)
return None, True
try:
UUID(hex=apiKey)
except ValueError:
raise HTTPAPIError('Malformed API key', 400)
ak = APIKey.query.filter_by(token=apiKey, is_active=True).first()
if not ak:
raise HTTPAPIError('Invalid API key', 403)
if ak.is_blocked:
raise HTTPAPIError('API key is blocked', 403)
# Signature validation
onlyPublic = False
if signature:
validateSignature(ak, signature, timestamp, path, query)
elif apiMode == APIMode.ALL_SIGNED:
raise HTTPAPIError('Signature missing', 403)
elif apiMode in {APIMode.SIGNED, APIMode.ONLYKEY_SIGNED}:
onlyPublic = True
return ak, onlyPublic
def handler(prefix, path):
path = posixpath.join('/', prefix, path)
logger = Logger.get('httpapi')
if request.method == 'POST':
# Convert POST data to a query string
queryParams = list(request.form.lists())
query = urlencode(queryParams, doseq=1)
# we only need/keep multiple values so we can properly validate the signature.
# the legacy code below expects a dict with just the first value.
# if you write a new api endpoint that needs multiple values get them from
# ``request.values.getlist()`` directly
queryParams = {key: values[0] for key, values in queryParams}
else:
# Parse the actual query string
queryParams = {key: value for key, value in request.args.items()}
query = request.query_string.decode()
apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None)
cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes'
signature = get_query_parameter(queryParams, ['signature'])
timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True)
noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes'
pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes'
onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes'
onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes'
scope = 'read:legacy_api' if request.method == 'GET' else 'write:legacy_api'
oauth_token = None
if request.headers.get('Authorization', '').lower().startswith('bearer '):
try:
oauth_token = require_oauth.acquire_token([scope])
except OAuth2Error as exc:
raise BadRequest(f'OAuth error: {exc}')
# Get our handler function and its argument and response type
hook, dformat = HTTPAPIHook.parseRequest(path, queryParams)
if hook is None or dformat is None:
raise NotFound
# Disable caching if we are not just retrieving data (or the hook requires it)
if request.method == 'POST' or hook.NO_CACHE:
noCache = True
ak = error = result = None
ts = int(time.time())
typeMap = {}
status_code = None
is_response = False
try:
used_session = None
if cookieAuth:
used_session = session
if not used_session.user: # ignore guest sessions
used_session = None
if apiKey or oauth_token or not used_session:
auth_token = None
if not oauth_token:
# Validate the API key (and its signature)
ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query)
if enforceOnlyPublic:
onlyPublic = True
# Create an access wrapper for the API key's user
user = ak.user if ak and not onlyPublic else None
else: # Access Token (OAuth)
user = oauth_token.user if not onlyPublic else None
# Get rid of API key in cache key if we did not impersonate a user
if ak and user is None:
cacheKey = normalizeQuery(path, query,
remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache',
'oa', 'onlyauthed', 'access_token'))
else:
cacheKey = normalizeQuery(path, query,
remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed',
'access_token'))
if signature:
# in case the request was signed, store the result under a different key
cacheKey = 'signed_' + cacheKey
if auth_token:
# if oauth was used, we also make the cache key unique
cacheKey = f'oauth-{auth_token.id}_{cacheKey}'
else:
# We authenticated using a session cookie.
# XXX: This is not used anymore within indico and should be removed whenever we rewrite
# the code here.
token = request.headers.get('X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken']))
if used_session.csrf_protected and used_session.csrf_token != token:
raise HTTPAPIError('Invalid CSRF token', 403)
user = used_session.user if not onlyPublic else None
cacheKey = normalizeQuery(path, query,
remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed',
'csrftoken'))
if user is not None:
# We *always* prefix the cache key with the user ID so we never get an overlap between
# authenticated and unauthenticated requests
cacheKey = f'user-{user.id}_{cacheKey}'
sentry_sdk.set_user({
'id': user.id,
'email': user.email,
'name': user.full_name,
'source': 'http_api'
})
else:
cacheKey = f'public_{cacheKey}'
# Bail out if the user requires authentication but is not authenticated
if onlyAuthed and not user:
raise HTTPAPIError('Not authenticated', 403)
addToCache = not hook.NO_CACHE
cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey)
if not noCache:
obj = API_CACHE.get(cacheKey)
if obj is not None:
result, extra, ts, complete, typeMap = obj
addToCache = False
if result is None:
g.current_api_user = user
# Perform the actual exporting
res = hook(user)
if isinstance(res, current_app.response_class):
addToCache = False
is_response = True
result, extra, complete, typeMap = res, {}, True, {}
elif isinstance(res, tuple) and len(res) == 4:
result, extra, complete, typeMap = res
else:
result, extra, complete, typeMap = res, {}, True, {}
if result is not None and addToCache:
ttl = api_settings.get('cache_ttl')
if ttl > 0:
API_CACHE.set(cacheKey, (result, extra, ts, complete, typeMap), ttl)
except HTTPAPIError as e:
error = e
if e.code:
status_code = e.code
if result is None and error is None:
raise NotFound
else:
if ak and error is None:
# Commit only if there was an API key and no error
norm_path, norm_query = normalizeQuery(path, query, remove=('signature', 'timestamp'), separate=True)
uri = '?'.join(_f for _f in (norm_path, norm_query) if _f)
ak.register_used(request.remote_addr, uri, not onlyPublic)
db.session.commit()
else:
# No need to commit stuff if we didn't use an API key (nothing was written)
# XXX do we even need this?
db.session.rollback()
# Log successful POST api requests
if error is None and request.method == 'POST':
logger.info('API request: %s?%s', path, query)
if is_response:
return result
serializer = Serializer.create(dformat, query_params=queryParams, pretty=pretty, typeMap=typeMap,
**hook.serializer_args)
if error:
if not serializer.schemaless:
# if our serializer has a specific schema (HTML, ICAL, etc...)
# use JSON, since it is universal
serializer = Serializer.create('json')
result = {'message': error.message}
elif serializer.encapsulate:
result = HTTPAPIResultSchema().dump(HTTPAPIResult(result, path, query, ts, extra))
try:
data = serializer(result)
response = current_app.make_response(data)
content_type = serializer.get_response_content_type()
if content_type:
response.content_type = content_type
if status_code:
response.status_code = status_code
return response
except Exception:
logger.exception('Serialization error in request %s?%s', path, query)
raise
| mit | 14a48bca97e2c96b7df17543c569d9b6 | 41.522968 | 117 | 0.604537 | 4.203283 | false | false | false | false |
indico/indico | indico/modules/events/persons/blueprint.py | 1 | 2358 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.modules.events.persons.controllers import (RHAPIEmailEventPersonsMetadata, RHAPIEmailEventPersonsSend,
RHDeleteUnusedEventPerson, RHEmailEventPersonsPreview,
RHEventPersonSearch, RHGrantModificationRights,
RHGrantSubmissionRights, RHManagePersonLists, RHPersonsList,
RHRevokeSubmissionRights, RHUpdateEventPerson)
from indico.web.flask.wrappers import IndicoBlueprint
_bp = IndicoBlueprint('persons', __name__, template_folder='templates', virtual_template_folder='events/persons',
url_prefix='/event/<int:event_id>/manage')
_bp.add_url_rule('/persons/', 'person_list', RHPersonsList)
_bp.add_url_rule('/api/persons/email/send', 'api_email_event_persons_send', RHAPIEmailEventPersonsSend,
methods=('POST',))
_bp.add_url_rule('/api/persons/email/metadata', 'api_email_event_persons_metadata', RHAPIEmailEventPersonsMetadata,
methods=('POST',))
_bp.add_url_rule('/api/persons/email/preview', 'email_event_persons_preview', RHEmailEventPersonsPreview,
methods=('POST',))
_bp.add_url_rule('/persons/grant-submission', 'grant_submission_rights', RHGrantSubmissionRights, methods=('POST',))
_bp.add_url_rule('/persons/grant-modification', 'grant_modification_rights', RHGrantModificationRights,
methods=('POST',))
_bp.add_url_rule('/persons/revoke-submission', 'revoke_submission_rights', RHRevokeSubmissionRights,
methods=('POST',))
# EventPerson operations
_bp.add_url_rule('/persons/<int:person_id>', 'update_person', RHUpdateEventPerson, methods=('PATCH',))
_bp.add_url_rule('/persons/<int:person_id>', 'delete_unused_person', RHDeleteUnusedEventPerson, methods=('DELETE',))
_bp.add_url_rule('/api/persons/search', 'event_person_search', RHEventPersonSearch)
# Manage person list settings
_bp.add_url_rule('/persons/person-lists', 'manage_person_lists', RHManagePersonLists, methods=('GET', 'POST'))
| mit | 6e70784b345c671f53d8b1e0ea09cade | 57.95 | 116 | 0.666243 | 3.778846 | false | false | false | false |
indico/indico | indico/modules/events/registration/controllers/display.py | 1 | 22392 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from uuid import UUID
from flask import flash, jsonify, redirect, request, session
from sqlalchemy.orm import contains_eager, joinedload, lazyload, load_only, subqueryload
from webargs import fields
from werkzeug.exceptions import Forbidden, NotFound
from indico.core.db import db
from indico.modules.auth.util import redirect_to_login
from indico.modules.core.captcha import get_captcha_settings, invalidate_captcha
from indico.modules.events.controllers.base import RegistrationRequired, RHDisplayEventBase
from indico.modules.events.models.events import EventType
from indico.modules.events.payment import payment_event_settings
from indico.modules.events.registration import registration_settings
from indico.modules.events.registration.controllers import RegistrationEditMixin, RegistrationFormMixin
from indico.modules.events.registration.models.forms import RegistrationForm
from indico.modules.events.registration.models.invitations import InvitationState, RegistrationInvitation
from indico.modules.events.registration.models.items import PersonalDataType
from indico.modules.events.registration.models.registrations import Registration, RegistrationState
from indico.modules.events.registration.util import (check_registration_email, create_registration, generate_ticket,
get_event_regforms_registrations, get_flat_section_submission_data,
get_initial_form_values, get_user_data, make_registration_schema)
from indico.modules.events.registration.views import (WPDisplayRegistrationFormConference,
WPDisplayRegistrationFormSimpleEvent,
WPDisplayRegistrationParticipantList)
from indico.modules.files.controllers import UploadFileMixin
from indico.modules.users.util import send_avatar, send_default_avatar
from indico.util.fs import secure_filename
from indico.util.i18n import _
from indico.util.marshmallow import LowercaseString, UUIDString
from indico.web.args import parser, use_kwargs
from indico.web.flask.util import send_file, url_for
from indico.web.util import ExpectedError
class RHRegistrationFormDisplayBase(RHDisplayEventBase):
#: Whether to allow access for users who cannot access the event itself.
ALLOW_PROTECTED_EVENT = False
#: Whether the current request is accessing this page in restricted mode
#: due to lack of access to the event.
is_restricted_access = False
@property
def view_class(self):
return (WPDisplayRegistrationFormConference
if self.event.type_ == EventType.conference
else WPDisplayRegistrationFormSimpleEvent)
def _check_access(self):
try:
RHDisplayEventBase._check_access(self)
except RegistrationRequired:
self.is_restricted_access = True
if not self.ALLOW_PROTECTED_EVENT or not self._check_restricted_event_access():
raise Forbidden
def _check_restricted_event_access(self):
return True
class RHRegistrationFormBase(RegistrationFormMixin, RHRegistrationFormDisplayBase):
def _process_args(self):
RHRegistrationFormDisplayBase._process_args(self)
RegistrationFormMixin._process_args(self)
def _check_restricted_event_access(self):
return self.regform.in_event_acls.filter_by(event_id=self.event.id).has_rows()
class RHRegistrationFormRegistrationBase(RHRegistrationFormBase):
"""Base for RHs handling individual registrations."""
REGISTRATION_REQUIRED = True
def _process_args(self):
RHRegistrationFormBase._process_args(self)
self.token = request.args.get('token')
if self.token:
self.registration = self.regform.get_registration(uuid=self.token)
if not self.registration:
raise NotFound
else:
self.registration = self.regform.get_registration(user=session.user) if session.user else None
if self.REGISTRATION_REQUIRED and not self.registration:
raise Forbidden
def _check_access(self):
if not self.token:
RHRegistrationFormBase._check_access(self)
class RHRegistrationFormList(RHRegistrationFormDisplayBase):
"""List of all registration forms in the event."""
ALLOW_PROTECTED_EVENT = True
def _process(self):
displayed_regforms, user_registrations = get_event_regforms_registrations(self.event, session.user,
only_in_acl=self.is_restricted_access)
if len(displayed_regforms) == 1:
return redirect(url_for('.display_regform', displayed_regforms[0]))
return self.view_class.render_template('display/regform_list.html', self.event,
regforms=displayed_regforms,
user_registrations=user_registrations,
is_restricted_access=self.is_restricted_access)
class RHParticipantList(RHRegistrationFormDisplayBase):
"""List of all public registrations."""
view_class = WPDisplayRegistrationParticipantList
@staticmethod
def _is_checkin_visible(reg):
return reg.registration_form.publish_checkin_enabled and reg.checked_in
def _merged_participant_list_table(self):
def _process_registration(reg, column_names):
personal_data = reg.get_personal_data()
columns = [{'text': personal_data.get(column_name, '')} for column_name in column_names]
return {'checked_in': self._is_checkin_visible(reg), 'columns': columns}
def _deduplicate_reg_data(reg_data_iter):
used = set()
for reg_data in reg_data_iter:
reg_data_hash = tuple(tuple(sorted(x.items())) for x in reg_data['columns'])
if reg_data_hash not in used:
used.add(reg_data_hash)
yield reg_data
column_names = registration_settings.get(self.event, 'participant_list_columns')
headers = [PersonalDataType[column_name].get_title() for column_name in column_names]
query = (Registration.query.with_parent(self.event)
.filter(Registration.is_state_publishable,
~RegistrationForm.is_deleted,
~RegistrationForm.participant_list_disabled)
.join(Registration.registration_form)
.options(subqueryload('data').joinedload('field_data'),
contains_eager('registration_form'))
.signal_query('merged-participant-list-publishable-registrations', event=self.event))
is_participant = self.event.is_user_registered(session.user)
registrations = sorted(_deduplicate_reg_data(_process_registration(reg, column_names)
for reg in query if reg.is_publishable(is_participant)),
key=lambda reg: tuple(x['text'].lower() for x in reg['columns']))
return {'headers': headers,
'rows': registrations,
'show_checkin': any(registration['checked_in'] for registration in registrations),
'num_participants': query.count()}
def _participant_list_table(self, regform):
def _process_registration(reg, column_ids, active_fields):
data_by_field = reg.data_by_field
def _content(column_id):
if column_id in data_by_field:
return data_by_field[column_id].get_friendly_data(for_humans=True)
elif (column_id in active_fields and active_fields[column_id].personal_data_type is not None and
active_fields[column_id].personal_data_type.column is not None):
# some legacy registrations have no data in the firstname/lastname/email field
# so we need to get it from the registration object itself
return getattr(reg, active_fields[column_id].personal_data_type.column)
else:
# no data available for the field
return ''
def _sort_key_date(column_id):
data = data_by_field.get(column_id)
if data and data.field_data.field.input_type == 'date':
return data.data
else:
return None
columns = [{'text': _content(column_id), 'sort_key': _sort_key_date(column_id)} for column_id in column_ids]
return {'checked_in': self._is_checkin_visible(reg), 'columns': columns}
active_fields = {field.id: field for field in regform.active_fields}
column_ids = [column_id
for column_id in registration_settings.get_participant_list_columns(self.event, regform)
if column_id in active_fields]
headers = [active_fields[column_id].title.title() for column_id in column_ids]
query = (Registration.query.with_parent(regform)
.filter(Registration.is_state_publishable)
.options(subqueryload('data'))
.order_by(db.func.lower(Registration.first_name),
db.func.lower(Registration.last_name),
Registration.friendly_id)
.signal_query('participant-list-publishable-registrations', regform=regform))
is_participant = self.event.is_user_registered(session.user)
registrations = [_process_registration(reg, column_ids, active_fields) for reg in query
if reg.is_publishable(is_participant)]
return {'headers': headers,
'rows': registrations,
'title': regform.title,
'show_checkin': any(registration['checked_in'] for registration in registrations),
'num_participants': query.count()}
def _process(self):
regforms = (RegistrationForm.query.with_parent(self.event)
.filter(RegistrationForm.is_participant_list_visible(self.event.is_user_registered(session.user)),
~RegistrationForm.participant_list_disabled)
.options(subqueryload('registrations').subqueryload('data').joinedload('field_data'))
.signal_query('participant-list-publishable-regforms', event=self.event)
.all())
if registration_settings.get(self.event, 'merge_registration_forms'):
tables = [self._merged_participant_list_table()]
else:
tables = []
regforms_dict = {regform.id: regform for regform in regforms}
for form_id in registration_settings.get_participant_list_form_ids(self.event):
try:
regform = regforms_dict.pop(form_id)
except KeyError:
# The settings might reference forms that are not available
# anymore (publishing was disabled, etc.)
continue
tables.append(self._participant_list_table(regform))
# There might be forms that have not been sorted by the user yet
tables.extend(map(self._participant_list_table, regforms_dict.values()))
num_participants = sum(table['num_participants'] for table in tables)
return self.view_class.render_template(
'display/participant_list.html',
self.event,
tables=tables,
published=bool(regforms),
num_participants=num_participants
)
class InvitationMixin:
"""Mixin for RHs that accept an invitation token."""
def _process_args(self):
self.invitation = None
try:
token = request.args['invitation']
except KeyError:
return
try:
UUID(hex=token)
except ValueError:
flash(_('Your invitation code is not valid.'), 'warning')
return
self.invitation = RegistrationInvitation.query.filter_by(uuid=token).with_parent(self.regform).first()
if self.invitation is None:
flash(_('This invitation does not exist or has been withdrawn.'), 'warning')
class RHRegistrationFormCheckEmail(RHRegistrationFormBase):
"""Check how an email will affect the registration."""
ALLOW_PROTECTED_EVENT = True
@use_kwargs({
'email': LowercaseString(required=True),
'update': UUIDString(load_default=None),
'management': fields.Bool(load_default=False),
}, location='query')
def _process_args(self, email, update, management):
RHRegistrationFormBase._process_args(self)
self.email = email
self.update = update
self.management = management
self.existing_registration = self.regform.get_registration(uuid=self.update) if self.update else None
def _check_access(self):
if not self.existing_registration:
RHRegistrationFormBase._check_access(self)
def _process(self):
if self.update:
return jsonify(check_registration_email(self.regform, self.email, self.existing_registration,
management=self.management))
else:
return jsonify(check_registration_email(self.regform, self.email, management=self.management))
class RHRegistrationForm(InvitationMixin, RHRegistrationFormRegistrationBase):
"""Display a registration form and registrations, and process submissions."""
REGISTRATION_REQUIRED = False
ALLOW_PROTECTED_EVENT = True
normalize_url_spec = {
'locators': {
lambda self: self.regform
}
}
def _check_access(self):
RHRegistrationFormRegistrationBase._check_access(self)
if self.regform.require_login and not session.user and request.method != 'GET':
raise Forbidden(response=redirect_to_login(reason=_('You are trying to register with a form '
'that requires you to be logged in')))
def _process_args(self):
RHRegistrationFormRegistrationBase._process_args(self)
InvitationMixin._process_args(self)
if self.invitation and self.invitation.state == InvitationState.accepted and self.invitation.registration:
return redirect(url_for('.display_regform', self.invitation.registration.locator.registrant))
@property
def _captcha_required(self):
"""Whether a CAPTCHA should be displayed when registering."""
return session.user is None and self.regform.require_captcha
def _can_register(self):
if self.regform.limit_reached:
return False
elif self.regform.is_purged:
return False
elif not self.regform.is_active and self.invitation is None:
return False
elif session.user and self.regform.get_registration(user=session.user):
return False
return True
def _process_POST(self):
if not self._can_register():
raise ExpectedError(_('You cannot register for this event'))
schema = make_registration_schema(self.regform, captcha_required=self._captcha_required)()
form_data = parser.parse(schema)
registration = create_registration(self.regform, form_data, self.invitation)
invalidate_captcha()
return jsonify({'redirect': url_for('.display_regform', registration.locator.registrant)})
def _process_GET(self):
user_data = get_user_data(self.regform, session.user, self.invitation)
initial_values = get_initial_form_values(self.regform) | user_data
if self._captcha_required:
initial_values |= {'captcha': None}
return self.view_class.render_template('display/regform_display.html', self.event,
regform=self.regform,
form_data=get_flat_section_submission_data(self.regform),
initial_values=initial_values,
payment_conditions=payment_event_settings.get(self.event, 'conditions'),
payment_enabled=self.event.has_feature('payment'),
invitation=self.invitation,
registration=self.registration,
management=False,
login_required=self.regform.require_login and not session.user,
is_restricted_access=self.is_restricted_access,
captcha_required=self._captcha_required,
captcha_settings=get_captcha_settings())
class RHUploadRegistrationFile(UploadFileMixin, RHRegistrationFormBase):
"""
Upload a file from a registration form.
Regform file fields do not wait for the regform to be submitted,
but upload the selected files immediately, saving just the genereated uuid.
Only this uuid is then sent when the regform is submitted.
"""
def get_file_context(self):
return 'event', self.event.id, 'regform', self.regform.id, 'registration'
class RHRegistrationDisplayEdit(RegistrationEditMixin, RHRegistrationFormRegistrationBase):
"""Submit a registration form."""
template_file = 'display/registration_modify.html'
management = False
REGISTRATION_REQUIRED = False
ALLOW_PROTECTED_EVENT = True
def _check_access(self):
RHRegistrationFormRegistrationBase._check_access(self)
if not self.registration.can_be_modified:
raise Forbidden
def _process_args(self):
RHRegistrationFormRegistrationBase._process_args(self)
if self.registration is None:
if session.user:
flash(_('We could not find a registration for you. If have already registered, please use the '
'direct access link from the email you received after registering.'), 'warning')
else:
flash(_('We could not find a registration for you. If have already registered, please use the '
'direct access link from the email you received after registering or log in to your Indico '
'account.'), 'warning')
return redirect(url_for('.display_regform', self.regform))
@property
def success_url(self):
return url_for('.display_regform', self.registration.locator.registrant)
class RHRegistrationWithdraw(RHRegistrationFormRegistrationBase):
"""Withdraw a registration."""
def _check_access(self):
RHRegistrationFormRegistrationBase._check_access(self)
if not self.registration.can_be_withdrawn:
raise Forbidden
def _process(self):
self.registration.update_state(withdrawn=True)
flash(_('Your registration has been withdrawn.'), 'success')
return redirect(url_for('.display_regform', self.registration.locator.registrant))
class RHRegistrationFormDeclineInvitation(InvitationMixin, RHRegistrationFormBase):
"""Decline an invitation to register."""
ALLOW_PROTECTED_EVENT = True
def _process_args(self):
RHRegistrationFormBase._process_args(self)
InvitationMixin._process_args(self)
def _process(self):
if self.invitation.state == InvitationState.pending:
self.invitation.state = InvitationState.declined
flash(_('You declined the invitation to register.'))
return redirect(self.event.url)
class RHTicketDownload(RHRegistrationFormRegistrationBase):
"""Generate ticket for a given registration."""
def _check_access(self):
RHRegistrationFormRegistrationBase._check_access(self)
if self.registration.state != RegistrationState.complete:
raise Forbidden
if not self.regform.tickets_enabled:
raise Forbidden
if (not self.regform.ticket_on_event_page and not self.regform.ticket_on_summary_page
and not self.regform.event.can_manage(session.user, 'registration')):
raise Forbidden
if self.registration.is_ticket_blocked:
raise Forbidden
def _process(self):
filename = secure_filename(f'{self.event.title}-Ticket.pdf', 'ticket.pdf')
return send_file(filename, generate_ticket(self.registration), 'application/pdf')
class RHRegistrationAvatar(RHDisplayEventBase):
"""Display a standard avatar for a registration based on the full name."""
normalize_url_spec = {
'locators': {
lambda self: self.registration
}
}
def _process_args(self):
RHDisplayEventBase._process_args(self)
self.registration = (Registration.query
.filter(Registration.id == request.view_args['registration_id'],
~Registration.is_deleted,
~RegistrationForm.is_deleted)
.join(Registration.registration_form)
.options(load_only('id', 'registration_form_id', 'first_name', 'last_name'),
lazyload('*'),
joinedload('registration_form').load_only('id', 'event_id'),
joinedload('user').load_only('id', 'first_name', 'last_name', 'title',
'picture_source', 'picture_metadata', 'picture'))
.one())
def _process(self):
if self.registration.user:
return send_avatar(self.registration.user)
return send_default_avatar(self.registration.full_name)
| mit | d29c7bd19882b95a5e3d80e90078d406 | 45.943396 | 120 | 0.627456 | 4.49458 | false | false | false | false |
indico/indico | indico/modules/events/contributions/models/subcontributions.py | 2 | 6375 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.core.db.sqlalchemy.attachments import AttachedItemsMixin
from indico.core.db.sqlalchemy.descriptions import RenderMode, SearchableDescriptionMixin
from indico.core.db.sqlalchemy.notes import AttachedNotesMixin
from indico.core.db.sqlalchemy.searchable import SearchableTitleMixin
from indico.core.db.sqlalchemy.util.models import auto_table_args
from indico.core.db.sqlalchemy.util.queries import increment_and_get
from indico.util.locators import locator_property
from indico.util.string import format_repr, slugify
def _get_next_friendly_id(context):
"""Get the next friendly id for a sub-contribution."""
from indico.modules.events.contributions.models.contributions import Contribution
contribution_id = context.current_parameters['contribution_id']
assert contribution_id is not None
return increment_and_get(Contribution._last_friendly_subcontribution_id, Contribution.id == contribution_id)
def _get_next_position(context):
"""Get the next menu entry position for the event."""
contribution_id = context.current_parameters['contribution_id']
res = (db.session.query(db.func.max(SubContribution.position))
.filter(SubContribution.contribution_id == contribution_id)
.one())
return (res[0] or 0) + 1
class SubContribution(SearchableTitleMixin, SearchableDescriptionMixin, AttachedItemsMixin, AttachedNotesMixin,
db.Model):
__tablename__ = 'subcontributions'
__auto_table_args = (db.Index(None, 'friendly_id', 'contribution_id', unique=True),
db.CheckConstraint("date_trunc('minute', duration) = duration", 'duration_no_seconds'),
{'schema': 'events'})
PRELOAD_EVENT_ATTACHED_ITEMS = True
PRELOAD_EVENT_NOTES = True
ATTACHMENT_FOLDER_ID_COLUMN = 'subcontribution_id'
possible_render_modes = {RenderMode.html, RenderMode.markdown}
default_render_mode = RenderMode.markdown
@declared_attr
def __table_args__(cls):
return auto_table_args(cls)
id = db.Column(
db.Integer,
primary_key=True
)
#: The human-friendly ID for the sub-contribution
friendly_id = db.Column(
db.Integer,
nullable=False,
default=_get_next_friendly_id
)
contribution_id = db.Column(
db.Integer,
db.ForeignKey('events.contributions.id'),
index=True,
nullable=False
)
position = db.Column(
db.Integer,
nullable=False,
default=_get_next_position
)
code = db.Column(
db.String,
nullable=False,
default=''
)
duration = db.Column(
db.Interval,
nullable=False
)
is_deleted = db.Column(
db.Boolean,
nullable=False,
default=False
)
#: External references associated with this contribution
references = db.relationship(
'SubContributionReference',
lazy=True,
cascade='all, delete-orphan',
backref=db.backref(
'subcontribution',
lazy=True
)
)
#: Persons associated with this contribution
person_links = db.relationship(
'SubContributionPersonLink',
lazy=True,
cascade='all, delete-orphan',
backref=db.backref(
'subcontribution',
lazy=True
)
)
# relationship backrefs:
# - attachment_folders (AttachmentFolder.subcontribution)
# - contribution (Contribution.subcontributions)
# - legacy_mapping (LegacySubContributionMapping.subcontribution)
# - note (EventNote.subcontribution)
def __init__(self, **kwargs):
# explicitly initialize this relationship with None to avoid
# an extra query to check whether there is an object associated
# when assigning a new one (e.g. during cloning)
kwargs.setdefault('note', None)
super().__init__(**kwargs)
@property
def event(self):
return self.contribution.event
@locator_property
def locator(self):
return dict(self.contribution.locator, subcontrib_id=self.id)
@property
def is_protected(self):
return self.contribution.is_protected
@property
def session(self):
"""Convenience property so all event entities have it."""
return self.contribution.session if self.contribution.session_id is not None else None
@property
def timetable_entry(self):
"""Convenience property so all event entities have it."""
return self.contribution.timetable_entry
@property
def speakers(self):
return self.person_links
@speakers.setter
def speakers(self, value):
self.person_links = list(value.keys())
@property
def slug(self):
return slugify('sc', self.contribution.friendly_id, self.friendly_id, self.title, maxlen=30)
@property
def location_parent(self):
return self.contribution
@property
def venue_name(self):
return self.location_parent.venue_name
@property
def room_name(self):
return self.location_parent.room_name
def get_access_list(self):
return self.contribution.get_access_list()
def get_manager_list(self, recursive=False, include_groups=True):
return self.contribution.get_manager_list(recursive=recursive, include_groups=include_groups)
def __repr__(self):
return format_repr(self, 'id', is_deleted=False, _text=self.title)
def can_access(self, user, **kwargs):
return self.contribution.can_access(user, **kwargs)
def can_manage(self, user, permission=None, **kwargs):
return self.contribution.can_manage(user, permission=permission, **kwargs)
def can_edit(self, user):
return self.contribution.can_edit(user)
def is_user_associated(self, user):
if user is None:
return False
if self.contribution.is_user_associated(user):
return True
return any(pl.person.user == user for pl in self.person_links if pl.person.user)
| mit | 55064fcf51355be563ba9a368995eff9 | 31.860825 | 112 | 0.667137 | 3.954715 | false | false | false | false |
indico/indico | indico/modules/events/surveys/operations.py | 2 | 1948 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import session
from indico.core.db import db
from indico.modules.events.surveys import logger
from indico.modules.events.surveys.models.items import SurveyQuestion, SurveySection, SurveyText
def add_survey_question(section, field_cls, data):
"""Add a question to a survey.
:param section: The `SurveySection` to which the question will be added.
:param field_cls: The field class of this question.
:param data: The `FieldConfigForm.data` to populate the question with.
:return: The added `SurveyQuestion`.
"""
question = SurveyQuestion()
field = field_cls(question)
field.update_object(data)
section.children.append(question)
db.session.flush()
logger.info('Survey question %s added by %s', question, session.user)
return question
def add_survey_text(section, data):
"""Add a text item to a survey.
:param section: The `SurveySection` to which the question will be added.
:param data: The `TextForm.data` to populate the question with.
:return: The added `SurveyText`.
"""
text = SurveyText()
text.populate_from_dict(data)
section.children.append(text)
db.session.flush()
logger.info('Survey text item %s added by %s', text, session.user)
return text
def add_survey_section(survey, data):
"""Add a section to a survey.
:param survey: The `Survey` to which the section will be added.
:param data: Attributes of the new `SurveySection`.
:return: The added `SurveySection`.
"""
section = SurveySection(survey=survey)
section.populate_from_dict(data)
db.session.add(section)
db.session.flush()
logger.info('Survey section %s added by %s', section, session.user)
return section
| mit | 0acb70170ba2ec004f6414c5896d3060 | 32.016949 | 96 | 0.705339 | 3.746154 | false | false | false | false |
indico/indico | indico/cli/event.py | 1 | 5596 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import sys
import click
from indico.cli.core import cli_group
from indico.core import signals
from indico.core.db import db
from indico.modules.events import Event
from indico.modules.events.export import export_event, import_event
from indico.modules.events.models.series import EventSeries
from indico.modules.users.models.users import User
@cli_group()
def cli():
pass
@cli.command()
@click.argument('event_id', type=int)
@click.option('-u', '--user', 'user_id', type=int, default=None, metavar='USER_ID',
help='The user which will be shown on the log as having restored the event (default: no user).')
@click.option('-m', '--message', 'message', metavar='MESSAGE', help='An additional message for the log')
def restore(event_id, user_id, message):
"""Restore a deleted event."""
event = Event.get(event_id)
user = User.get(user_id) if user_id else None
if event is None:
click.secho('This event does not exist', fg='red')
sys.exit(1)
elif not event.is_deleted:
click.secho('This event is not deleted', fg='yellow')
sys.exit(1)
event.restore(message, user)
signals.core.after_process.send()
db.session.commit()
click.secho(f'Event undeleted: "{event.title}"', fg='green')
@cli.command()
@click.argument('event_id', type=int)
@click.argument('target_file', type=click.File('wb'))
def export(event_id, target_file):
"""Export all data associated with an event.
This exports the whole event as an archive which can be imported
on another other Indico instance. Importing an event is only
guaranteed to work if it was exported on the same Indico version.
"""
event = Event.get(event_id)
if event is None:
click.secho('This event does not exist', fg='red')
sys.exit(1)
elif event.is_deleted:
click.secho('This event has been deleted', fg='yellow')
click.confirm('Export it anyway?', abort=True)
export_event(event, target_file)
@cli.command('import')
@click.argument('source_file', type=click.File('rb'))
@click.option('--create-users/--no-create-users', default=None,
help='Whether to create missing users or skip them. By default a confirmation prompt is shown when '
'the archive contains such users')
@click.option('--create-affiliations/--no-create-affiliations', default=None,
help='Whether to create missing affiliations or skip them. By default a confirmation prompt is shown '
'when the archive contains such affiliations')
@click.option('--force', is_flag=True, help='Ignore Indico version mismatches (DANGER)')
@click.option('-v', '--verbose', is_flag=True, help='Show verbose information on what is being imported')
@click.option('-y', '--yes', is_flag=True, help='Always commit the imported event without prompting')
@click.option('-c', '--category', 'category_id', type=int, default=0, metavar='ID',
help='ID of the target category. Defaults to the root category.')
def import_(source_file, create_users, create_affiliations, force, verbose, yes, category_id):
"""Import an event exported from another Indico instance."""
click.echo('Importing event...')
event = import_event(source_file, category_id, create_users=create_users, create_affiliations=create_affiliations,
verbose=verbose, force=force)
if event is None:
click.secho('Import failed.', fg='red')
sys.exit(1)
if not yes and not click.confirm(click.style('Import finished. Commit the changes?', fg='green'), default=True):
db.session.rollback()
sys.exit(1)
db.session.commit()
click.secho(event.external_url, fg='green', bold=True)
@cli.command('create-series')
@click.option('--title-sequence/--no-title-sequence', 'show_sequence_in_title', default=True,
help='Whether to show the series sequence in the event titles (lectures only); enabled by default')
@click.option('--links/--no-links', 'show_links', default=True,
help='Whether to show links to other events in the series on the event page; enabled by default')
@click.argument('event_ids', nargs=-1, type=int, metavar='EVENT_ID...')
def create_series(show_sequence_in_title, show_links, event_ids):
"""Create a series from a list of events."""
events = Event.query.filter(Event.id.in_(event_ids), ~Event.is_deleted).all()
if missing := (set(event_ids) - {e.id for e in events}):
click.secho('Events not found:', fg='red', bold=True)
for event_id in missing:
click.echo(f'- {event_id}')
sys.exit(1)
elif conflict := [e for e in events if e.series]:
click.secho('Events already assigned to a series:', fg='red', bold=True)
for event in conflict:
click.echo(format_event(event))
sys.exit(1)
click.echo('Selected events:')
for event in events:
click.echo(format_event(event))
click.confirm('Create series?', default=True, abort=True)
series = EventSeries(events=events, show_sequence_in_title=show_sequence_in_title, show_links=show_links)
db.session.commit()
click.secho(f'Series successfully created (id={series.id}).', fg='green', bold=True)
def format_event(event):
return f'- {event.id}: [{event.start_dt_local.date()}] {event.title} ({event.external_url})'
| mit | d588186cc83c60c614a8e7e243d78365 | 42.379845 | 118 | 0.67441 | 3.636127 | false | false | false | false |
indico/indico | indico/modules/events/editing/models/revision_files.py | 2 | 2051 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.core.db import db
from indico.util.fs import secure_filename
from indico.util.locators import locator_property
from indico.util.string import format_repr
from indico.web.flask.util import url_for
class EditingRevisionFile(db.Model):
__tablename__ = 'revision_files'
__table_args__ = {'schema': 'event_editing'}
revision_id = db.Column(
db.ForeignKey('event_editing.revisions.id'),
index=True,
primary_key=True
)
file_id = db.Column(
db.ForeignKey('indico.files.id'),
index=True,
primary_key=True
)
file_type_id = db.Column(
db.ForeignKey('event_editing.file_types.id'),
index=True
)
file = db.relationship(
'File',
lazy=False,
backref=db.backref(
'editing_revision_files',
cascade='all, delete-orphan',
lazy=True
)
)
file_type = db.relationship(
'EditingFileType',
lazy=False,
backref=db.backref(
'files',
cascade='all, delete-orphan',
lazy=True
)
)
revision = db.relationship(
'EditingRevision',
lazy=True,
backref=db.backref(
'files',
cascade='all, delete-orphan',
lazy=True
)
)
def __repr__(self):
return format_repr(self, 'revision_id', 'file_id')
@locator_property
def locator(self):
return dict(self.revision.locator, file_id=self.file_id,
filename=secure_filename(self.file.filename, f'file-{self.file_id}'))
@property
def download_url(self):
return url_for('event_editing.download_file', self)
@property
def external_download_url(self):
return url_for('event_editing.download_file', self, _external=True)
| mit | f579c5c50d509356646e3322c356eaff | 25.986842 | 89 | 0.59727 | 3.749543 | false | false | false | false |
indico/indico | indico/web/http_api/hooks/base.py | 2 | 11292 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
"""
Base export interface
"""
import re
from datetime import datetime, time, timedelta
from types import GeneratorType
from urllib.parse import unquote
import pytz
from flask import current_app, request
from indico.core import signals
from indico.core.config import config
from indico.core.db import db
from indico.core.logger import Logger
from indico.core.notifications import flush_email_queue, init_email_queue
from indico.util.date_time import now_utc
from indico.web.http_api.exceptions import ArgumentParseError, LimitExceededException
from indico.web.http_api.metadata import Serializer
from indico.web.http_api.metadata.atom import AtomSerializer
from indico.web.http_api.metadata.html import HTML4Serializer
from indico.web.http_api.metadata.ical import ICalSerializer
from indico.web.http_api.metadata.jsonp import JSONPSerializer
from indico.web.http_api.responses import HTTPAPIError
from indico.web.http_api.util import get_query_parameter
class HTTPAPIHook:
"""This class is the hook between the query (path+params) and the generator of the results (fossil).
It is also in charge of checking the parameters and the access rights.
"""
HOOK_LIST = []
TYPES = None # abstract
PREFIX = 'export' # url prefix. must exist in indico.web.flask.blueprints.api, too! also used as function prefix
RE = None # abstract
METHOD_NAME = None # overrides method name derived from prefix+type
DEFAULT_DETAIL = None # abstract
MAX_RECORDS = {}
SERIALIZER_TYPE_MAP = {} # maps fossil type names to friendly names (useful for plugins e.g. RoomCERN --> Room)
VALID_FORMATS = None # None = all formats except 'bin'
GUEST_ALLOWED = True # When False, it forces authentication
COMMIT = False # commit database changes
HTTP_POST = False # require (and allow) HTTP POST
NO_CACHE = False
@classmethod
def parseRequest(cls, path, queryParams):
"""Parse a request path and return a hook and the requested data type."""
path = unquote(path)
hooks = cls.HOOK_LIST
for expCls in hooks:
Logger.get('HTTPAPIHook.parseRequest').debug(expCls)
m = expCls._matchPath(path)
if m:
gd = m.groupdict()
g = m.groups()
type = g[0]
format = g[-1]
if format not in DataFetcher.getAllowedFormats():
return None, None
elif expCls.VALID_FORMATS and format not in expCls.VALID_FORMATS:
return None, None
elif expCls.VALID_FORMATS is None and format == 'bin':
return None, None
return expCls(queryParams, type, gd, format), format
return None, None
@staticmethod
def register(cls):
"""Register a hook.
To use it, simply decorate the hook class with this method."""
assert cls.RE is not None
HTTPAPIHook.HOOK_LIST.append(cls)
return cls
@classmethod
def _matchPath(cls, path):
if not hasattr(cls, '_RE'):
types = '|'.join(cls.TYPES)
cls._RE = re.compile(r'/' + cls.PREFIX + '/(' + types + r')' + ('/' + cls.RE).rstrip('/') + r'\.(\w+)$')
return cls._RE.match(path)
def __init__(self, queryParams, type, pathParams, format):
self._format = format
self._queryParams = queryParams
self._type = type
self._pathParams = pathParams
def _getParams(self):
self._offset = get_query_parameter(self._queryParams, ['O', 'offset'], 0, integer=True)
if self._offset < 0:
raise HTTPAPIError('Offset must be a positive number', 400)
self._orderBy = get_query_parameter(self._queryParams, ['o', 'order'])
self._descending = get_query_parameter(self._queryParams, ['c', 'descending'], 'no') == 'yes'
self._detail = get_query_parameter(self._queryParams, ['d', 'detail'], self.DEFAULT_DETAIL)
tzName = get_query_parameter(self._queryParams, ['tz'], None)
if tzName is None:
tzName = config.DEFAULT_TIMEZONE
try:
self._tz = pytz.timezone(tzName)
except pytz.UnknownTimeZoneError as exc:
raise HTTPAPIError(f"Bad timezone: '{exc}'", 400)
max = self.MAX_RECORDS.get(self._detail, 1000)
self._userLimit = get_query_parameter(self._queryParams, ['n', 'limit'], 0, integer=True)
if self._userLimit > max:
raise HTTPAPIError("You can only request up to %d records per request with the detail level '%s'" %
(max, self._detail), 400)
self._limit = self._userLimit if self._userLimit > 0 else max
fromDT = get_query_parameter(self._queryParams, ['f', 'from'])
toDT = get_query_parameter(self._queryParams, ['t', 'to'])
dayDT = get_query_parameter(self._queryParams, ['day'])
if (fromDT or toDT) and dayDT:
raise HTTPAPIError("'day' can only be used without 'from' and 'to'", 400)
elif dayDT:
fromDT = toDT = dayDT
self._fromDT = DataFetcher._getDateTime('from', fromDT, self._tz) if fromDT else None
self._toDT = DataFetcher._getDateTime('to', toDT, self._tz, aux=self._fromDT) if toDT else None
def _has_access(self, user):
return True
@property
def serializer_args(self):
return {}
def _getMethodName(self):
if self.METHOD_NAME:
return self.METHOD_NAME
return self.PREFIX + '_' + self._type.replace('-', '_')
def _performCall(self, func, user):
resultList = []
complete = True
try:
res = func(user)
if isinstance(res, GeneratorType):
for obj in res:
resultList.append(obj)
else:
resultList = res
except LimitExceededException:
complete = (self._limit == self._userLimit)
return resultList, complete
def _perform(self, user, func, extra_func):
self._getParams()
if not self._has_access(user):
raise HTTPAPIError('Access to this resource is restricted.', 403)
resultList, complete = self._performCall(func, user)
if isinstance(resultList, current_app.response_class):
return True, resultList, None, None
extra = extra_func(user, resultList) if extra_func else None
return False, resultList, complete, extra
def __call__(self, user):
"""Perform the actual exporting."""
if self.HTTP_POST != (request.method == 'POST'):
# XXX: this should never happen, since HTTP_POST is only used within /api/,
# where the flask url rule requires POST
raise HTTPAPIError('This action requires %s' % ('POST' if self.HTTP_POST else 'GET'), 405)
if not self.GUEST_ALLOWED and not user:
raise HTTPAPIError('Guest access to this resource is forbidden.', 403)
method_name = self._getMethodName()
func = getattr(self, method_name, None)
extra_func = getattr(self, method_name + '_extra', None)
if not func:
raise NotImplementedError(method_name)
if not self.COMMIT:
is_response, resultList, complete, extra = self._perform(user, func, extra_func)
db.session.rollback()
else:
try:
init_email_queue()
is_response, resultList, complete, extra = self._perform(user, func, extra_func)
signals.core.after_process.send()
db.session.commit()
flush_email_queue()
except Exception:
db.session.rollback()
raise
if is_response:
return resultList
return resultList, extra, complete, self.SERIALIZER_TYPE_MAP
class DataFetcher:
_deltas = {'yesterday': timedelta(-1),
'tomorrow': timedelta(1)}
def __init__(self, user, hook):
self._user = user
self._hook = hook
@classmethod
def getAllowedFormats(cls):
return Serializer.getAllFormats()
@classmethod
def _parseDateTime(cls, dateTime, allowNegativeOffset):
"""
Accepted formats:
* ISO 8601 subset - YYYY-MM-DD[THH:MM]
* 'today', 'yesterday', 'tomorrow' and 'now'
* days in the future/past: '[+/-]DdHHhMMm'
'ctx' means that the date will change according to its function
('from' or 'to')
"""
# if it's a an "alias", return immediately
now = now_utc()
if dateTime in cls._deltas:
return ('ctx', now + cls._deltas[dateTime])
elif dateTime == 'now':
return ('abs', now)
elif dateTime == 'today':
return ('ctx', now)
m = re.match(r'^([+-])?(?:(\d{1,3})d)?(?:(\d{1,2})h)?(?:(\d{1,2})m)?$', dateTime)
if m:
mod = -1 if m.group(1) == '-' else 1
if not allowNegativeOffset and mod == -1:
raise ArgumentParseError('End date cannot be a negative offset')
atoms = list(0 if a is None else int(a) * mod for a in m.groups()[1:])
if atoms[1] > 23 or atoms[2] > 59:
raise ArgumentParseError('Invalid time!')
return ('ctx', timedelta(days=atoms[0], hours=atoms[1], minutes=atoms[2]))
else:
# iso 8601 subset
try:
return ('abs', datetime.strptime(dateTime, '%Y-%m-%dT%H:%M'))
except ValueError:
pass
try:
return ('ctx', datetime.strptime(dateTime, '%Y-%m-%d'))
except ValueError:
raise ArgumentParseError("Impossible to parse '%s'" % dateTime)
@classmethod
def _getDateTime(cls, ctx, dateTime, tz, aux=None):
try:
rel, value = cls._parseDateTime(dateTime, ctx == 'from')
except ArgumentParseError as exc:
raise HTTPAPIError(str(exc), 400)
if rel == 'abs':
return tz.localize(value) if not value.tzinfo else value
elif rel == 'ctx' and isinstance(value, timedelta):
value = now_utc() + value
# from here on, 'value' has to be a datetime
if ctx == 'from':
return tz.localize(value.combine(value.date(), time(0, 0, 0)))
else:
return tz.localize(value.combine(value.date(), time(23, 59, 59)))
class IteratedDataFetcher(DataFetcher):
def __init__(self, user, hook):
super().__init__(user, hook)
self._tz = hook._tz
self._offset = hook._offset
self._limit = hook._limit
self._detail = hook._detail
self._orderBy = hook._orderBy
self._descending = hook._descending
self._fromDT = hook._fromDT
self._toDT = hook._toDT
Serializer.register('html', HTML4Serializer)
Serializer.register('jsonp', JSONPSerializer)
Serializer.register('ics', ICalSerializer)
Serializer.register('atom', AtomSerializer)
| mit | 60ccde69c129c2438c6bb712d7f7768b | 37.671233 | 117 | 0.598742 | 3.973258 | false | false | false | false |
indico/indico | indico/util/mimetypes.py | 2 | 2740 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import mimetypes
import re
_exact_mapping = {
'application/json': 'icon-file-css',
'text/css': 'icon-file-css',
'text/calendar': 'icon-calendar',
# Word
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': 'icon-file-word',
'application/msword': 'icon-file-word',
# PDF
'application/pdf': 'icon-file-pdf',
# Excel
'application/vnd.ms-excel': 'icon-file-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': 'icon-file-excel',
# Powerpoint
'application/vnd.ms-powerpoint': 'icon-file-presentation',
'application/vnd.openxmlformats-officedocument.presentationml.presentation': 'icon-file-presentation',
# Archive
'application/x-7z-compressed': 'icon-file-zip',
'application/x-ace-compressed': 'icon-file-zip',
'application/x-rar-compressed': 'icon-file-zip',
'application/x-tar': 'icon-file-zip',
'application/zip': 'icon-file-zip',
# Markup Languages
'application/xml': 'icon-file-xml',
'text/xml': 'icon-file-xml',
'text/n3': 'icon-file-xml',
'text/html': 'icon-file-xml',
'text/sgml': 'icon-file-xml',
# X-separated-values
'text/csv': 'icon-file-spreadsheet',
'text/tab-separated-values': 'icon-file-spreadsheet',
}
_regex_mapping = [
# Archive
('^application/x-bzip', 'icon-file-zip'), # matches bzip and bzip2
# Audio
('^audio/', 'icon-file-music'),
# Images
('^image/', 'icon-file-image'),
# Text
('^text/', 'icon-file-text'),
# Video
('^video/', 'icon-file-video'),
# OpenOffice
(r'application/vnd\.oasis\.opendocument\.', 'icon-file-openoffice'),
# XML
(r'.+/.+\+xml$', 'icon-file-xml'),
# JSON
(r'.+/.+\+json$', 'icon-file-css')
]
_regex_mapping = [(re.compile(regex), icon) for regex, icon in _regex_mapping]
def icon_from_mimetype(mimetype, default_icon='icon-file-filled'):
"""Get the most suitable icon for a MIME type."""
mimetype = mimetype.lower()
try:
return _exact_mapping[mimetype]
except KeyError:
for pattern, icon in _regex_mapping:
if pattern.search(mimetype):
return icon
return default_icon
def register_custom_mimetypes():
"""Register additional extension/mimetype mappings.
This is used for mimetypes/extensions that are not in the official
mapping but useful, e.g. because indico has special handling for
files with that type.
"""
mimetypes.add_type('text/markdown', '.md')
| mit | 3e9c27e8e30fb9f27d23b85103ae07b9 | 31.619048 | 106 | 0.64781 | 3.366093 | false | false | false | false |
indico/indico | docs/source/indico_uml_directive.py | 4 | 1164 | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
"""
A simple Sphinx directive which extends plantweb's `directive`.
The config setting `indico_uml_prelude` can be used to include
additional plantuml code before every `indico_uml` block.
"""
from docutils.statemachine import StringList
from plantweb.directive import UmlDirective
class IndicoUMLDirective(UmlDirective):
def run(self):
env = self.state_machine.document.settings.env
lines = env.config.indico_uml_prelude.split('\n')
content = StringList()
for n, l in enumerate(lines):
content.append(l, source='diagram', offset=n)
for n, l in enumerate(self.content, n + 1):
content.append(l, source='diagram', offset=n)
self.content = content
return super().run()
def _get_directive_name(self):
return 'indico_uml'
def setup(app):
app.add_directive('indico_uml', IndicoUMLDirective)
app.add_config_value('indico_uml_prelude', '', True)
| mit | b4659733cd5a80b15808c4970364779c | 30.459459 | 63 | 0.685567 | 3.592593 | false | true | false | false |
indico/indico | indico/modules/rb/statistics.py | 2 | 3137 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from datetime import date, datetime, time
from dateutil.relativedelta import relativedelta
from indico.core.db import db
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.reservations import Reservation
from indico.util.date_time import iterdays
WORKING_TIME_PERIODS = ((time(8, 30), time(12, 30)), (time(13, 30), time(17, 30)))
def calculate_rooms_bookable_time(rooms, start_date=None, end_date=None):
if end_date is None:
end_date = date.today() - relativedelta(days=1)
if start_date is None:
start_date = end_date - relativedelta(days=29)
working_time_per_day = sum((datetime.combine(date.today(), end) - datetime.combine(date.today(), start)).seconds
for start, end in WORKING_TIME_PERIODS)
working_days = sum(1 for __ in iterdays(start_date, end_date, skip_weekends=True))
return working_days * working_time_per_day * len(rooms)
def calculate_rooms_booked_time(rooms, start_date=None, end_date=None):
if end_date is None:
end_date = date.today() - relativedelta(days=1)
if start_date is None:
start_date = end_date - relativedelta(days=29)
# Reservations on working days
reservations_query = (Reservation.query
.filter(Reservation.room_id.in_(r.id for r in rooms),
db.extract('dow', ReservationOccurrence.start_dt).between(1, 5),
db.cast(ReservationOccurrence.start_dt, db.Date) >= start_date,
db.cast(ReservationOccurrence.end_dt, db.Date) <= end_date,
ReservationOccurrence.is_valid)
.join(Reservation.occurrences))
rsv_start = db.cast(ReservationOccurrence.start_dt, db.TIME)
rsv_end = db.cast(ReservationOccurrence.end_dt, db.TIME)
slots = ((db.cast(start, db.TIME), db.cast(end, db.TIME)) for start, end in WORKING_TIME_PERIODS)
# this basically handles all possible ways an occurrence overlaps with each one of the working time slots
overlaps = sum(db.case([
((rsv_start < start) & (rsv_end > end), db.extract('epoch', end - start)),
((rsv_start < start) & (rsv_end > start) & (rsv_end <= end), db.extract('epoch', rsv_end - start)),
((rsv_start >= start) & (rsv_start < end) & (rsv_end > end), db.extract('epoch', end - rsv_start)),
((rsv_start >= start) & (rsv_end <= end), db.extract('epoch', rsv_end - rsv_start))
], else_=0) for start, end in slots)
return reservations_query.with_entities(db.func.sum(overlaps)).scalar() or 0
def calculate_rooms_occupancy(rooms, start=None, end=None):
bookable_time = calculate_rooms_bookable_time(rooms, start, end)
booked_time = calculate_rooms_booked_time(rooms, start, end)
return booked_time / bookable_time if bookable_time else 0
| mit | e482fa9ddf46d73cb49a7c29e5267f41 | 48.015625 | 116 | 0.652534 | 3.424672 | false | false | false | false |
indico/indico | indico/migrations/versions/20210219_1555_da06d8f50342_separate_authorized_scopes_from_tokens.py | 4 | 3691 | """Separate authorized scopes from tokens
Revision ID: da06d8f50342
Revises: 3782de7970da
Create Date: 2021-02-19 15:55:53.134744
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'da06d8f50342'
down_revision = '3782de7970da'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'application_user_links',
sa.Column('id', sa.Integer(), nullable=False, primary_key=True),
sa.Column('application_id', sa.Integer(), nullable=False, index=True),
sa.Column('user_id', sa.Integer(), nullable=False, index=True),
sa.Column('scopes', postgresql.ARRAY(sa.String()), nullable=False),
sa.ForeignKeyConstraint(['application_id'], ['oauth.applications.id']),
sa.ForeignKeyConstraint(['user_id'], ['users.users.id']),
sa.UniqueConstraint('application_id', 'user_id'),
schema='oauth'
)
op.create_unique_constraint(None, 'tokens', ['application_id', 'user_id', 'scopes'], schema='oauth')
op.drop_constraint('uq_tokens_application_id_user_id', 'tokens', schema='oauth')
op.execute('''
INSERT INTO oauth.application_user_links (application_id, user_id, scopes)
SELECT application_id, user_id, scopes FROM oauth.tokens;
''')
op.add_column('tokens', sa.Column('app_user_link_id', sa.Integer(), nullable=True), schema='oauth')
op.create_index(None, 'tokens', ['app_user_link_id'], unique=False, schema='oauth')
op.create_unique_constraint(None, 'tokens', ['app_user_link_id', 'scopes'], schema='oauth')
op.execute('''
UPDATE oauth.tokens t SET app_user_link_id = (
SELECT id FROM oauth.application_user_links WHERE application_id = t.application_id AND user_id = t.user_id
);
''')
op.alter_column('tokens', 'app_user_link_id', nullable=False, schema='oauth')
op.create_foreign_key(None, 'tokens', 'application_user_links', ['app_user_link_id'], ['id'],
source_schema='oauth', referent_schema='oauth', ondelete='CASCADE')
op.drop_column('tokens', 'application_id', schema='oauth')
op.drop_column('tokens', 'user_id', schema='oauth')
def downgrade():
op.add_column('tokens', sa.Column('application_id', sa.Integer(), nullable=True), schema='oauth')
op.add_column('tokens', sa.Column('user_id', sa.Integer(), nullable=True), schema='oauth')
op.create_index(None, 'tokens', ['user_id'], unique=False, schema='oauth')
op.execute('''
DELETE FROM oauth.tokens
WHERE app_user_link_id IN (
SELECT app_user_link_id FROM oauth.tokens GROUP BY app_user_link_id HAVING COUNT(*) > 1
);
UPDATE oauth.tokens t SET application_id = (
SELECT application_id FROM oauth.application_user_links WHERE id = t.app_user_link_id
), user_id = (
SELECT user_id FROM oauth.application_user_links WHERE id = t.app_user_link_id
);
''')
op.create_foreign_key(None, 'tokens', 'applications', ['application_id'], ['id'],
source_schema='oauth', referent_schema='oauth')
op.create_foreign_key(None, 'tokens', 'users', ['user_id'], ['id'],
source_schema='oauth', referent_schema='users')
op.alter_column('tokens', 'application_id', nullable=False, schema='oauth')
op.alter_column('tokens', 'user_id', nullable=False, schema='oauth')
op.drop_column('tokens', 'app_user_link_id', schema='oauth')
op.create_unique_constraint(None, 'tokens', ['application_id', 'user_id'], schema='oauth')
op.drop_table('application_user_links', schema='oauth')
| mit | 0082c2d6303716d71f39c73ac1a1385d | 46.935065 | 119 | 0.645083 | 3.528681 | false | false | false | false |
indico/indico | indico/core/plugins/controllers.py | 2 | 2589 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from collections import defaultdict
from operator import attrgetter
from flask import flash, request
from werkzeug.exceptions import NotFound
from indico.core.plugins import PluginCategory, plugin_engine
from indico.core.plugins.views import WPPlugins
from indico.modules.admin import RHAdminBase
from indico.util.i18n import _
from indico.web.flask.util import redirect_or_jsonify, url_for
from indico.web.forms.base import FormDefaults
class RHPluginsBase(RHAdminBase):
pass
class RHPlugins(RHPluginsBase):
def _process(self):
plugins = [p for p in plugin_engine.get_active_plugins().values()]
categories = defaultdict(list)
other = []
for plugin in plugins:
if plugin.category:
categories[plugin.category].append(plugin)
else:
other.append(plugin)
# Sort the plugins of each category in alphabetic order and in a way that the internal plugins are always
# listed in the front
for category in categories:
categories[category].sort(key=attrgetter('configurable', 'title'))
ordered_categories = dict(sorted(categories.items()))
if other:
ordered_categories[PluginCategory.other] = sorted(other, key=attrgetter('configurable', 'title'))
return WPPlugins.render_template('index.html', categorized_plugins=ordered_categories)
class RHPluginDetails(RHPluginsBase):
back_button_endpoint = 'plugins.index'
def _process_args(self):
self.plugin = plugin_engine.get_plugin(request.view_args['plugin'])
if not self.plugin or not self.plugin.configurable:
raise NotFound
def _process(self):
plugin = self.plugin
form = None
with plugin.plugin_context():
if plugin.settings_form:
defaults = FormDefaults(**plugin.settings.get_all())
form = plugin.settings_form(obj=defaults)
if form.validate_on_submit():
plugin.settings.set_multi(form.data)
flash(_('Settings saved ({0})').format(plugin.title), 'success')
return redirect_or_jsonify(request.url)
return WPPlugins.render_template('details.html', plugin=plugin, form=form,
back_url=url_for(self.back_button_endpoint))
| mit | 556c49cd4322c796924f923cd3360d09 | 37.641791 | 113 | 0.660487 | 4.223491 | false | false | false | false |
indico/indico | indico/modules/events/abstracts/__init__.py | 2 | 7783 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import render_template, session
from indico.core import signals
from indico.core.config import config
from indico.core.logger import Logger
from indico.core.permissions import ManagementPermission
from indico.modules.events.abstracts.clone import AbstractSettingsCloner
from indico.modules.events.abstracts.notifications import ContributionTypeCondition, StateCondition, TrackCondition
from indico.modules.events.features.base import EventFeature
from indico.modules.events.models.events import Event, EventType
from indico.modules.events.timetable.models.breaks import Break
from indico.modules.events.tracks.models.tracks import Track
from indico.util.i18n import _
from indico.util.placeholders import Placeholder
from indico.web.flask.templating import template_hook
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('events.abstracts')
@signals.event.updated.connect
@signals.event.contribution_created.connect
@signals.event.contribution_updated.connect
@signals.event.contribution_deleted.connect
@signals.event.session_deleted.connect
@signals.event.session_updated.connect
@signals.event.person_updated.connect
@signals.event.times_changed.connect
def _clear_boa_cache(sender, obj=None, **kwargs):
from indico.modules.events.abstracts.util import clear_boa_cache
if isinstance(obj, Break):
# breaks do not show up in the BoA
return
event = (obj or sender).event
clear_boa_cache(event)
@signals.menu.items.connect_via('event-management-sidemenu')
def _extend_event_management_menu(sender, event, **kwargs):
if not event.can_manage(session.user, permission='abstracts') or not AbstractsFeature.is_allowed_for_event(event):
return
return SideMenuItem('abstracts', _('Call for Abstracts'), url_for('abstracts.management', event),
section='workflows', weight=30)
@signals.event.get_feature_definitions.connect
def _get_feature_definitions(sender, **kwargs):
return AbstractsFeature
@signals.event_management.get_cloners.connect
def _get_cloners(sender, **kwargs):
yield AbstractSettingsCloner
@signals.users.merged.connect
def _merge_users(target, source, **kwargs):
from indico.modules.events.abstracts.models.abstracts import Abstract
from indico.modules.events.abstracts.models.comments import AbstractComment
from indico.modules.events.abstracts.models.reviews import AbstractReview
from indico.modules.events.abstracts.settings import abstracts_settings
Abstract.query.filter_by(submitter_id=source.id).update({Abstract.submitter_id: target.id})
Abstract.query.filter_by(modified_by_id=source.id).update({Abstract.modified_by_id: target.id})
Abstract.query.filter_by(judge_id=source.id).update({Abstract.judge_id: target.id})
AbstractComment.query.filter_by(user_id=source.id).update({AbstractComment.user_id: target.id})
AbstractComment.query.filter_by(modified_by_id=source.id).update({AbstractComment.modified_by_id: target.id})
AbstractReview.query.filter_by(user_id=source.id).update({AbstractReview.user_id: target.id})
abstracts_settings.acls.merge_users(target, source)
@signals.core.get_conditions.connect_via('abstract-notifications')
def _get_abstract_notification_rules(sender, **kwargs):
yield StateCondition
yield TrackCondition
yield ContributionTypeCondition
class AbstractsFeature(EventFeature):
name = 'abstracts'
friendly_name = _('Call for Abstracts')
description = _('Gives event managers the opportunity to open a "Call for Abstracts" and use the abstract '
'reviewing workflow.')
@classmethod
def is_allowed_for_event(cls, event):
return event.type_ == EventType.conference
@signals.acl.get_management_permissions.connect_via(Event)
def _get_event_management_permissions(sender, **kwargs):
yield AbstractReviewerPermission
yield GlobalReviewPermission
@signals.acl.get_management_permissions.connect_via(Event)
def _get_abstract_permissions(sender, **kwargs):
yield AbstractPermission
@signals.acl.get_management_permissions.connect_via(Track)
def _get_track_management_permissions(sender, **kwargs):
yield ReviewPermission
@signals.event_management.management_url.connect
def _get_event_management_url(event, **kwargs):
if event.can_manage(session.user, permission='abstracts'):
return url_for('abstracts.management', event)
class GlobalReviewPermission(ManagementPermission):
name = 'review_all_abstracts'
friendly_name = _('Review for all tracks')
description = _('Grants abstract reviewing rights to all tracks of the event.')
class ReviewPermission(ManagementPermission):
name = 'review'
friendly_name = _('Review')
description = _('Grants track reviewer rights in a track.')
user_selectable = True
color = 'orange'
default = True
class AbstractPermission(ManagementPermission):
name = 'abstracts'
friendly_name = _('Abstracts')
description = _('Grants abstract management rights on an event.')
user_selectable = True
class AbstractReviewerPermission(ManagementPermission):
name = 'abstract_reviewer'
friendly_name = _('Reviewer')
description = _('Grants abstract reviewing rights on an event.')
@signals.core.get_placeholders.connect_via('abstract-notification-email')
def _get_notification_placeholders(sender, **kwargs):
from indico.modules.events.abstracts import placeholders
for name in placeholders.__all__:
obj = getattr(placeholders, name)
if issubclass(obj, Placeholder):
yield obj
@signals.menu.items.connect_via('event-editing-sidemenu')
def _extend_editing_menu(sender, event, **kwargs):
if event.has_feature('abstracts'):
yield SideMenuItem('abstracts', _('Call for Abstracts'), url_for('abstracts.call_for_abstracts', event))
@signals.event.sidemenu.connect
def _extend_event_menu(sender, **kwargs):
from indico.modules.events.abstracts.util import has_user_tracks
from indico.modules.events.contributions import contribution_settings
from indico.modules.events.layout.util import MenuEntryData
def _boa_visible(event):
return (event.has_feature('abstracts') and contribution_settings.get(event, 'published')
and (config.LATEX_ENABLED or event.has_custom_boa))
def _reviewing_area_visible(event):
if not session.user or not event.has_feature('abstracts'):
return False
return has_user_tracks(event, session.user)
yield MenuEntryData(title=_('Book of Abstracts'), name='abstracts_book', endpoint='abstracts.export_boa',
position=9, visible=_boa_visible, static_site=True)
yield MenuEntryData(title=_('Call for Abstracts'), name='call_for_abstracts',
endpoint='abstracts.call_for_abstracts', position=2,
visible=lambda event: event.has_feature('abstracts'))
yield MenuEntryData(title=_('Reviewing Area'), name='abstract_reviewing_area',
endpoint='abstracts.display_reviewable_tracks', position=0, parent='call_for_abstracts',
visible=_reviewing_area_visible)
@template_hook('conference-home-info')
def _inject_cfa_announcement(event, **kwargs):
if (event.has_feature('abstracts') and
(event.cfa.is_open or (session.user and event.cfa.can_submit_abstracts(session.user)))):
return render_template('events/abstracts/display/conference_home.html', event=event)
| mit | 06ca4b0a36cbc7dd2e4fae839a51b79c | 39.536458 | 118 | 0.740203 | 3.815196 | false | false | false | false |
indico/indico | indico/modules/attachments/controllers/display/event.py | 2 | 2732 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import redirect, request, session
from werkzeug.exceptions import Forbidden
from indico.modules.attachments.controllers.display.base import DownloadAttachmentMixin
from indico.modules.attachments.controllers.event_package import AttachmentPackageMixin
from indico.modules.attachments.controllers.util import SpecificFolderMixin
from indico.modules.attachments.views import (WPEventFolderDisplay, WPPackageEventAttachmentsDisplay,
WPPackageEventAttachmentsDisplayConference)
from indico.modules.events.controllers.base import RHDisplayEventBase
from indico.modules.events.models.events import EventType
class RHDownloadEventAttachment(DownloadAttachmentMixin, RHDisplayEventBase):
def _process_args(self):
RHDisplayEventBase._process_args(self)
DownloadAttachmentMixin._process_args(self)
def _check_access(self):
try:
DownloadAttachmentMixin._check_access(self)
except Forbidden:
# if we get here the user has no access to the attachment itself so we
# trigger the event access check since it may show the access key form
# or registration required message
RHDisplayEventBase._check_access(self)
# the user may have access to the event but not the material so if we
# are here we need to re-raise the original exception
raise
class RHListEventAttachmentFolder(SpecificFolderMixin, RHDisplayEventBase):
def _process_args(self):
RHDisplayEventBase._process_args(self)
SpecificFolderMixin._process_args(self)
def _check_access(self):
if not self.folder.can_access(session.user):
# basically the same logic as in RHDownloadEventAttachment. see the comments
# there for a more detailed explanation.
RHDisplayEventBase._check_access(self)
raise Forbidden
def _process(self):
if request.args.get('redirect_if_single') == '1' and len(self.folder.attachments) == 1:
return redirect(self.folder.attachments[0].download_url)
return WPEventFolderDisplay.render_template('folder.html', self.event, folder=self.folder)
class RHPackageEventAttachmentsDisplay(AttachmentPackageMixin, RHDisplayEventBase):
@property
def wp(self):
if self.event.type_ == EventType.conference:
return WPPackageEventAttachmentsDisplayConference
else:
return WPPackageEventAttachmentsDisplay
| mit | decdf78b87edad938b2f120926295be6 | 42.365079 | 101 | 0.721816 | 4.329635 | false | false | false | false |
indico/indico | indico/modules/events/layout/clone.py | 1 | 3312 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.core.db import db
from indico.modules.events.cloning import EventCloner, get_attrs_to_clone
from indico.modules.events.features.util import is_feature_enabled
from indico.modules.events.layout import layout_settings
from indico.modules.events.layout.models.menu import EventPage, MenuEntry
from indico.modules.events.models.events import EventType
from indico.util.i18n import _
class ImageCloner(EventCloner):
name = 'images'
friendly_name = _('Images')
@property
def is_visible(self):
return is_feature_enabled(self.old_event, 'images')
@property
def is_available(self):
return self._find_images(self.old_event).has_rows()
def get_conflicts(self, target_event):
if self._find_images(target_event).has_rows():
return [_('The target event already has images')]
def _find_images(self, event):
return event.layout_images
def run(self, new_event, cloners, shared_data, event_exists=False):
from indico.modules.events.layout.models.images import ImageFile
for old_image in self._find_images(self.old_event):
new_image = ImageFile(filename=old_image.filename, content_type=old_image.content_type)
new_event.layout_images.append(new_image)
with old_image.open() as fd:
new_image.save(fd)
db.session.flush()
class LayoutCloner(EventCloner):
name = 'layout'
friendly_name = _('Layout settings and menu customization')
new_event_only = True
@property
def is_visible(self):
return self.old_event.type_ == EventType.conference
def run(self, new_event, cloners, shared_data, event_exists=False):
with db.session.no_autoflush:
for col in ('logo_metadata', 'logo', 'stylesheet_metadata', 'stylesheet'):
setattr(new_event, col, getattr(self.old_event, col))
layout_settings.set_multi(new_event, layout_settings.get_all(self.old_event, no_defaults=True))
if layout_settings.get(self.old_event, 'use_custom_menu'):
for menu_entry in MenuEntry.get_for_event(self.old_event):
self._copy_menu_entry(menu_entry, new_event)
db.session.flush()
def _copy_menu_entry(self, menu_entry, new_event, parent=None, include_children=True):
base_columns = get_attrs_to_clone(MenuEntry)
new_menu_entry = MenuEntry(**{col: getattr(menu_entry, col) for col in base_columns})
if menu_entry.is_page:
with db.session.no_autoflush: # menu_entry.page is lazy-loaded
page = EventPage(event=new_event, html=menu_entry.page.html)
new_menu_entry.page = page
if menu_entry.page.is_default:
new_event.default_page = new_menu_entry.page
new_event.menu_entries.append(new_menu_entry)
if parent is not None:
parent.append(new_menu_entry)
if include_children:
for child in menu_entry.children:
self._copy_menu_entry(child, new_event, new_menu_entry.children, include_children=False)
| mit | 6bb56e886ed88e7b438082576da7c089 | 40.4 | 104 | 0.666063 | 3.576674 | false | false | false | false |
indico/indico | indico/modules/events/persons/schemas.py | 2 | 4809 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from marshmallow import fields, post_dump, post_load, pre_load
from marshmallow_enum import EnumField
from indico.core.marshmallow import mm
from indico.modules.events.models.persons import EventPerson
from indico.modules.users.models.affiliations import Affiliation
from indico.modules.users.models.users import UserTitle
from indico.modules.users.schemas import AffiliationSchema
from indico.util.marshmallow import ModelField
class PersonLinkSchema(mm.Schema):
type = fields.String(dump_default='person_link')
person_id = fields.Int()
user_id = fields.Int(attribute='person.user_id', dump_only=True)
user_identifier = fields.String(attribute='person.user.identifier', dump_only=True)
name = fields.String(attribute='display_full_name', dump_only=True)
first_name = fields.String(load_default='')
last_name = fields.String(required=True)
_title = EnumField(UserTitle, data_key='title')
affiliation = fields.String(load_default='')
affiliation_link = ModelField(Affiliation, data_key='affiliation_id', load_default=None, load_only=True)
affiliation_id = fields.Integer(load_default=None, dump_only=True)
affiliation_meta = fields.Nested(AffiliationSchema, attribute='affiliation_link', dump_only=True)
phone = fields.String(load_default='')
address = fields.String(load_default='')
email = fields.String(load_default='')
display_order = fields.Int(load_default=0, dump_default=0)
avatar_url = fields.Function(lambda o: o.person.user.avatar_url if o.person.user else None, dump_only=True)
roles = fields.List(fields.String(), load_only=True)
@pre_load
def load_nones(self, data, **kwargs):
if not data.get('title'):
data['title'] = UserTitle.none.name
if not data.get('affiliation'):
data['affiliation'] = ''
if data.get('affiliation_id') == -1:
# external search results with a predefined affiliation
del data['affiliation_id']
return data
@post_load
def ensure_affiliation_text(self, data, **kwargs):
if data['affiliation_link']:
data['affiliation'] = data['affiliation_link'].name
return data
@post_dump
def dump_type(self, data, **kwargs):
if data['person_id'] is None:
del data['type']
del data['person_id']
if data['title'] == UserTitle.none.name:
data['title'] = None
return data
class EventPersonSchema(mm.SQLAlchemyAutoSchema):
class Meta:
model = EventPerson
public_fields = ('id', 'identifier', '_title', 'email', 'affiliation', 'affiliation_link', 'affiliation_id',
'affiliation_meta', 'name', 'first_name', 'last_name', 'user_identifier')
fields = public_fields + ('phone', 'address')
type = fields.Constant('EventPerson')
_title = EnumField(UserTitle, data_key='title')
name = fields.String(attribute='full_name')
user_identifier = fields.String(attribute='user.identifier')
last_name = fields.String(required=True)
email = fields.String(load_default='')
affiliation_link = ModelField(Affiliation, data_key='affiliation_id', load_default=None, load_only=True)
affiliation_id = fields.Integer(load_default=None, dump_only=True)
affiliation_meta = fields.Nested(AffiliationSchema, attribute='affiliation_link', dump_only=True)
@pre_load
def handle_affiliation_link(self, data, **kwargs):
# in some cases we get data that's already been loaded by PersonLinkSchema and thus no longer
# has an affiliation_id but only an affiliation_link...
data = data.copy()
if affiliation_link := data.pop('affiliation_link', None):
data['affiliation_id'] = affiliation_link.id
return data
@pre_load
def load_title(self, data, **kwargs):
if 'title' in data and not data.get('title'):
data['title'] = UserTitle.none.name
return data
@post_load
def ensure_affiliation_text(self, data, **kwargs):
if affiliation_link := data.get('affiliation_link'):
data['affiliation'] = affiliation_link.name
return data
@post_dump
def handle_no_title(self, data, **kwargs):
if data['title'] == UserTitle.none.name:
data['title'] = None
return data
class EventPersonUpdateSchema(EventPersonSchema):
class Meta(EventPersonSchema.Meta):
fields = ('title', 'first_name', 'last_name', 'address', 'phone', 'affiliation', 'affiliation_link')
title = EnumField(UserTitle)
| mit | 3b31644bd25ae6ab8d416f2a2bee8054 | 40.817391 | 116 | 0.669994 | 3.690714 | false | false | false | false |
indico/indico | indico/modules/legal/forms.py | 2 | 1314 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from wtforms.fields import TextAreaField, URLField
from wtforms.validators import URL, Optional
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm
from indico.web.forms.widgets import CKEditorWidget
class LegalMessagesForm(IndicoForm):
network_protected_disclaimer = TextAreaField(_('Network-protected information disclaimer'), widget=CKEditorWidget())
restricted_disclaimer = TextAreaField(_('Restricted information disclaimer'), widget=CKEditorWidget())
tos_url = URLField(_('URL'), [Optional(), URL()],
description=_('The URL to an external page with terms and conditions'))
tos = TextAreaField(_('Text'), widget=CKEditorWidget(),
description=_('Only used if no URL is provided'))
privacy_policy_url = URLField(_('URL'), [Optional(), URL()],
description=_('The URL to an external page with the privacy policy'))
privacy_policy = TextAreaField(_('Text'), widget=CKEditorWidget(),
description=_('Only used if no URL is provided'))
| mit | d8ff8cc94fcfb55368b811bcc1524521 | 49.538462 | 120 | 0.679604 | 4.308197 | false | false | false | false |
indico/indico | indico/migrations/versions/20210607_1548_735dc4e8d2f3_update_merged_users_in_registrations.py | 4 | 1729 | """Update merged users in registrations
Revision ID: 735dc4e8d2f3
Revises: 178d297eae7e
Create Date: 2021-06-07 15:48:19.154975
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '735dc4e8d2f3'
down_revision = '178d297eae7e'
branch_labels = None
depends_on = None
def upgrade():
op.execute('''
CREATE TEMPORARY TABLE merge_map AS
WITH RECURSIVE merge_map (user_id, current_user_id) AS (
SELECT id, id FROM users.users WHERE merged_into_id IS NULL
UNION ALL
SELECT u.id, mm.current_user_id FROM users.users u, merge_map mm WHERE u.merged_into_id = mm.user_id
) SELECT * FROM merge_map WHERE user_id != current_user_id;
CREATE INDEX ix_merge_map_current_user_id ON merge_map USING btree (current_user_id);
UPDATE event_registration.registrations r
SET user_id = mm.current_user_id
FROM merge_map mm
WHERE mm.user_id = r.user_id AND mm.current_user_id != r.user_id AND NOT EXISTS (
-- avoid conflcit with existing registration of the target user
SELECT 1
FROM event_registration.registrations r2
WHERE r2.registration_form_id = r.registration_form_id AND r2.user_id = mm.current_user_id
) AND NOT EXISTS (
-- avoid conflict with existing registration of another user merged into the same target user
SELECT 1
FROM event_registration.registrations r3
JOIN merge_map mm2 ON (mm2.user_id = r3.user_id AND mm2.current_user_id = mm.current_user_id)
WHERE r3.registration_form_id = r.registration_form_id AND mm2.user_id != r.user_id
);
''')
def downgrade():
pass
| mit | fd269f56db74113a3bb963177b769c36 | 35.020833 | 112 | 0.655292 | 3.5 | false | false | false | false |
indico/indico | indico/modules/vc/plugins.py | 2 | 9554 | # This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import re
from flask import render_template
from flask_pluginengine import render_plugin_template
from indico.core import signals
from indico.modules.events.contributions import Contribution
from indico.modules.events.sessions.models.blocks import SessionBlock
from indico.modules.vc.forms import VCPluginSettingsFormBase
from indico.modules.vc.models.vc_rooms import VCRoomEventAssociation, VCRoomLinkType
from indico.util.decorators import classproperty
from indico.web.flask.templating import get_overridable_template_name
from indico.web.forms.base import FormDefaults
PREFIX_RE = re.compile('^vc_')
class VCPluginMixin:
settings_form = VCPluginSettingsFormBase
default_settings = {'notification_emails': []}
acl_settings = {'acl', 'managers'}
#: the :class:`IndicoForm` to use for the videoconference room form
vc_room_form = None
#: the :class:`IndicoForm` to use for the videoconference room attach form
vc_room_attach_form = None
#: the readable name of the VC plugin
friendly_name = None
def init(self):
super().init()
if not self.name.startswith('vc_'):
raise Exception('Videoconference plugins must be named vc_*')
self.connect(signals.users.merged, self._merge_users)
@property
def service_name(self):
return PREFIX_RE.sub('', self.name)
@property
def logo_url(self):
raise NotImplementedError('VC plugin must have a logo URL')
@property
def icon_url(self):
raise NotImplementedError('VC plugin must have an icon URL')
@classproperty
@staticmethod
def category():
from indico.core.plugins import PluginCategory
return PluginCategory.videoconference
def get_vc_room_form_defaults(self, event):
return {
'name': event.title,
'show': True,
'linking': 'event',
'contribution': '',
'block': ''
}
def get_vc_room_attach_form_defaults(self, event):
return {
'room': None,
'contribution': None,
'block': None,
'linking': 'event',
'show': True
}
def get_notification_cc_list(self, action, vc_room, event):
return set()
def get_notification_bcc_list(self, action, vc_room, event):
return set(self.settings.get('notification_emails', set()))
def render_form(self, **kwargs):
"""Render the videoconference room form.
:param kwargs: arguments passed to the template
"""
return render_template('vc/manage_event_create_room.html', **kwargs)
def render_info_box(self, vc_room, event_vc_room, event, **kwargs):
"""Render the information shown in the expandable box of a VC room row.
:param vc_room: the VC room object
:param event_vc_room: the association of an event and a VC room
:param event: the event with the current VC room attached to it
:param kwargs: arguments passed to the template
"""
return render_plugin_template(f'{self.name}:info_box.html', plugin=self, event_vc_room=event_vc_room,
event=event, vc_room=vc_room, settings=self.settings, **kwargs)
def render_manage_event_info_box(self, vc_room, event_vc_room, event, **kwargs):
"""
Render the information shown in the expandable box on a
VC room in the management area.
:param vc_room: the VC room object
:param event_vc_room: the association of an event and a VC room
:param event: the event with the current VC room attached to it
:param kwargs: arguments passed to the template
"""
return render_plugin_template(f'{self.name}:manage_event_info_box.html', plugin=self,
event_vc_room=event_vc_room, event=event, vc_room=vc_room,
settings=self.settings, **kwargs)
def render_buttons(self, vc_room, event_vc_room, **kwargs):
"""
Render a list of plugin specific buttons (eg: Join URL, etc)
in the management area.
:param vc_room: the VC room object
:param event_vc_room: the association of an event and a VC room
:param kwargs: arguments passed to the template
"""
name = get_overridable_template_name('management_buttons.html', self, core_prefix='vc/')
return render_template(name, plugin=self, vc_room=vc_room, event_vc_room=event_vc_room, **kwargs)
def get_extra_delete_msg(self, vc_room, event_vc_room):
"""
Return a custom message to show in the confirmation dialog
when deleting a VC room.
:param vc_room: the VC room object
:param event_vc_room: the association of an event and a VC room
:return: a string (may contain HTML) with the message to display
"""
return ''
def render_event_buttons(self, vc_room, event_vc_room, **kwargs):
"""
Render a list of plugin specific buttons (eg: Join URL, etc)
in the event page.
:param vc_room: the VC room object
:param event_vc_room: the association of an event and a VC room
:param kwargs: arguments passed to the template
"""
name = get_overridable_template_name('event_buttons.html', self, core_prefix='vc/')
return render_template(name, plugin=self, vc_room=vc_room, event_vc_room=event_vc_room,
event=event_vc_room.event, **kwargs)
def create_form(self, event, existing_vc_room=None, existing_event_vc_room=None):
"""Create the videoconference room form.
:param event: the event the videoconference room is for
:param existing_vc_room: a vc_room from which to retrieve data for the form
:return: an instance of an :class:`IndicoForm` subclass
"""
if existing_vc_room and existing_event_vc_room:
kwargs = {
'name': existing_vc_room.name,
'linking': existing_event_vc_room.link_type.name,
'show': existing_event_vc_room.show
}
if existing_event_vc_room.link_type == VCRoomLinkType.contribution:
kwargs['contribution'] = existing_event_vc_room.contribution_id
elif existing_event_vc_room.link_type == VCRoomLinkType.block:
kwargs['block'] = existing_event_vc_room.session_block_id
data = existing_vc_room.data
data.update(existing_event_vc_room.data)
defaults = FormDefaults(data, **kwargs)
else:
defaults = FormDefaults(self.get_vc_room_form_defaults(event))
with self.plugin_context():
return self.vc_room_form(prefix='vc-', obj=defaults, event=event, vc_room=existing_vc_room)
def update_data_association(self, event, vc_room, event_vc_room, data):
contribution_id = data.pop('contribution')
block_id = data.pop('block')
link_type = VCRoomLinkType[data.pop('linking')]
if link_type == VCRoomLinkType.event:
event_vc_room.link_object = event
elif link_type == VCRoomLinkType.contribution:
event_vc_room.link_object = Contribution.get_or_404(contribution_id)
elif link_type == VCRoomLinkType.block:
event_vc_room.link_object = SessionBlock.get_or_404(block_id)
event_vc_room.vc_room = vc_room
event_vc_room.show = data.pop('show')
if event_vc_room.data is None:
event_vc_room.data = {}
def update_data_vc_room(self, vc_room, data, is_new=False):
if 'name' in data:
vc_room.name = data.pop('name')
if vc_room.data is None:
vc_room.data = {}
def create_room(self, vc_room, event):
raise NotImplementedError('Plugin must implement create_room()')
def clone_room(self, old_event_vc_room, link_object):
"""Clone the room, returning a new :class:`VCRoomEventAssociation`.
:param old_event_vc_room: the original :class:`VCRoomEventAssociation`
:param link_object: the new object the association will be tied to
:return: the new :class:`VCRoomEventAssociation`
"""
return VCRoomEventAssociation(show=old_event_vc_room.show, data=old_event_vc_room.data,
link_object=link_object)
def can_manage_vc_rooms(self, user, event):
"""Check if a user can manage vc rooms on an event."""
if self.can_manage_vc(user):
return True
if not self.settings.acls.get('acl'): # everyone has access
return True
return self.settings.acls.contains_user('acl', user)
def can_manage_vc_room(self, user, room):
"""Check if a user can manage a vc room."""
return (user.is_admin or
self.can_manage_vc(user) or
any(evt_assoc.event.can_manage(user) for evt_assoc in room.events))
def can_manage_vc(self, user):
"""Check if a user has management rights on this VC system."""
if user.is_admin:
return True
return self.settings.acls.contains_user('managers', user)
def _merge_users(self, target, source, **kwargs):
self.settings.acls.merge_users(target, source)
| mit | 5e6adee80fa1341ab9e6f2699e91b053 | 38.974895 | 109 | 0.630731 | 3.8869 | false | false | false | false |
wandb/client | wandb/sdk/internal/file_pusher.py | 1 | 5854 | import logging
import os
import queue
import tempfile
import time
from typing import Mapping, Optional, Tuple, TYPE_CHECKING
import wandb
from wandb.filesync import dir_watcher, stats, step_checksum, step_upload
import wandb.util
if TYPE_CHECKING:
from wandb.sdk.interface import artifacts
from wandb.sdk.internal import (
artifacts as internal_artifacts,
file_stream,
internal_api,
)
# Temporary directory for copies we make of some file types to
# reduce the probability that the file gets changed while we're
# uploading it.
TMP_DIR = tempfile.TemporaryDirectory("wandb")
logger = logging.getLogger(__name__)
class FilePusher:
"""Parallel file upload class.
This manages uploading multiple files in parallel. It will restart a given file's
upload job if it receives a notification that that file has been modified.
The finish() method will block until all events have been processed and all
uploads are complete.
"""
MAX_UPLOAD_JOBS = 64
def __init__(
self,
api: "internal_api.Api",
file_stream: "file_stream.FileStreamApi",
silent: Optional[bool] = False,
) -> None:
self._api = api
self._tempdir = tempfile.TemporaryDirectory("wandb")
self._stats = stats.Stats()
self._incoming_queue: "queue.Queue[step_checksum.Event]" = queue.Queue()
self._event_queue: "queue.Queue[step_upload.Event]" = queue.Queue()
self._step_checksum = step_checksum.StepChecksum(
self._api,
self._tempdir,
self._incoming_queue,
self._event_queue,
self._stats,
)
self._step_checksum.start()
self._step_upload = step_upload.StepUpload(
self._api,
self._stats,
self._event_queue,
self.MAX_UPLOAD_JOBS,
file_stream=file_stream,
silent=bool(silent),
)
self._step_upload.start()
def get_status(self) -> Tuple[bool, Mapping[str, int]]:
running = self.is_alive()
summary = self._stats.summary()
return running, summary
def print_status(self, prefix: bool = True) -> None:
step = 0
spinner_states = ["-", "\\", "|", "/"]
stop = False
while True:
if not self.is_alive():
stop = True
summary = self._stats.summary()
line = " {:.2f}MB of {:.2f}MB uploaded ({:.2f}MB deduped)\r".format(
summary["uploaded_bytes"] / 1048576.0,
summary["total_bytes"] / 1048576.0,
summary["deduped_bytes"] / 1048576.0,
)
line = spinner_states[step % 4] + line
step += 1
wandb.termlog(line, newline=False, prefix=prefix)
if stop:
break
time.sleep(0.25)
dedupe_fraction = (
summary["deduped_bytes"] / float(summary["total_bytes"])
if summary["total_bytes"] > 0
else 0
)
if dedupe_fraction > 0.01:
wandb.termlog(
"W&B sync reduced upload amount by %.1f%% "
% (dedupe_fraction * 100),
prefix=prefix,
)
# clear progress line.
wandb.termlog(" " * 79, prefix=prefix)
def file_counts_by_category(self) -> Mapping[str, int]:
return self._stats.file_counts_by_category()
def file_changed(
self,
save_name: dir_watcher.SaveName,
path: str,
artifact_id: Optional[str] = None,
copy: bool = True,
use_prepare_flow: bool = False,
save_fn: Optional[step_upload.SaveFn] = None,
digest: Optional[str] = None,
):
"""Tell the file pusher that a file's changed and should be uploaded.
Arguments:
save_name: string logical location of the file relative to the run
directory.
path: actual string path of the file to upload on the filesystem.
"""
# Tests in linux were failing because wandb-events.jsonl didn't exist
if not os.path.exists(path) or not os.path.isfile(path):
return
if os.path.getsize(path) == 0:
return
save_name = dir_watcher.SaveName(wandb.util.to_forward_slash_path(save_name))
event = step_checksum.RequestUpload(
path,
dir_watcher.SaveName(save_name),
artifact_id,
copy,
use_prepare_flow,
save_fn,
digest,
)
self._incoming_queue.put(event)
def store_manifest_files(
self,
manifest: "artifacts.ArtifactManifest",
artifact_id: str,
save_fn: "internal_artifacts.SaveFn",
) -> None:
event = step_checksum.RequestStoreManifestFiles(manifest, artifact_id, save_fn)
self._incoming_queue.put(event)
def commit_artifact(
self,
artifact_id: str,
finalize: bool = True,
before_commit: Optional[step_upload.PreCommitFn] = None,
on_commit: Optional[step_upload.PostCommitFn] = None,
):
event = step_checksum.RequestCommitArtifact(
artifact_id, finalize, before_commit, on_commit
)
self._incoming_queue.put(event)
def finish(self, callback: Optional[step_upload.OnRequestFinishFn] = None):
logger.info("shutting down file pusher")
self._incoming_queue.put(step_checksum.RequestFinish(callback))
def join(self) -> None:
# NOTE: must have called finish before join
logger.info("waiting for file pusher")
while self.is_alive():
time.sleep(0.5)
def is_alive(self) -> bool:
return self._step_checksum.is_alive() or self._step_upload.is_alive()
| mit | 1734f696b3259fa41d31bca2564e80ce | 31.342541 | 87 | 0.580629 | 3.968814 | false | false | false | false |
wandb/client | wandb/integration/kfp/wandb_logging.py | 1 | 6159 | def wandb_log( # noqa: C901
func=None,
# /, # py38 only
log_component_file=True,
):
"""Wrap a standard python function and log to W&B"""
import json
import os
from functools import wraps
from inspect import Parameter, signature
from kfp import components
from kfp.components import (
InputArtifact,
InputBinaryFile,
InputPath,
InputTextFile,
OutputArtifact,
OutputBinaryFile,
OutputPath,
OutputTextFile,
)
import wandb
from wandb.sdk.lib import telemetry as wb_telemetry
output_types = (OutputArtifact, OutputBinaryFile, OutputPath, OutputTextFile)
input_types = (InputArtifact, InputBinaryFile, InputPath, InputTextFile)
def isinstance_namedtuple(x):
t = type(x)
b = t.__bases__
if len(b) != 1 or b[0] != tuple:
return False
f = getattr(t, "_fields", None)
if not isinstance(f, tuple):
return False
return all(type(n) == str for n in f)
def get_iframe_html(run):
return f'<iframe src="{run.url}?kfp=true" style="border:none;width:100%;height:100%;min-width:900px;min-height:600px;"></iframe>'
def get_link_back_to_kubeflow():
wandb_kubeflow_url = os.getenv("WANDB_KUBEFLOW_URL")
return f"{wandb_kubeflow_url}/#/runs/details/{{workflow.uid}}"
def log_input_scalar(name, data, run=None):
run.config[name] = data
wandb.termlog(f"Setting config: {name} to {data}")
def log_input_artifact(name, data, type, run=None):
artifact = wandb.Artifact(name, type=type)
artifact.add_file(data)
run.use_artifact(artifact)
wandb.termlog(f"Using artifact: {name}")
def log_output_scalar(name, data, run=None):
if isinstance_namedtuple(data):
for k, v in zip(data._fields, data):
run.log({f"{func.__name__}.{k}": v})
else:
run.log({name: data})
def log_output_artifact(name, data, type, run=None):
artifact = wandb.Artifact(name, type=type)
artifact.add_file(data)
run.log_artifact(artifact)
wandb.termlog(f"Logging artifact: {name}")
def _log_component_file(func, run=None):
name = func.__name__
output_component_file = f"{name}.yml"
components._python_op.func_to_component_file(func, output_component_file)
artifact = wandb.Artifact(name, type="kubeflow_component_file")
artifact.add_file(output_component_file)
run.log_artifact(artifact)
wandb.termlog(f"Logging component file: {output_component_file}")
# Add `mlpipeline_ui_metadata_path` to signature to show W&B run in "ML Visualizations tab"
sig = signature(func)
no_default = []
has_default = []
for param in sig.parameters.values():
if param.default is param.empty:
no_default.append(param)
else:
has_default.append(param)
new_params = tuple(
(
*no_default,
Parameter(
"mlpipeline_ui_metadata_path",
annotation=OutputPath(),
kind=Parameter.POSITIONAL_OR_KEYWORD,
),
*has_default,
)
)
new_sig = sig.replace(parameters=new_params)
new_anns = {param.name: param.annotation for param in new_params}
if "return" in func.__annotations__:
new_anns["return"] = func.__annotations__["return"]
def decorator(func):
input_scalars = {}
input_artifacts = {}
output_scalars = {}
output_artifacts = {}
for name, ann in func.__annotations__.items():
if name == "return":
output_scalars[name] = ann
elif isinstance(ann, output_types):
output_artifacts[name] = ann
elif isinstance(ann, input_types):
input_artifacts[name] = ann
else:
input_scalars[name] = ann
@wraps(func)
def wrapper(*args, **kwargs):
bound = new_sig.bind(*args, **kwargs)
bound.apply_defaults()
mlpipeline_ui_metadata_path = bound.arguments["mlpipeline_ui_metadata_path"]
del bound.arguments["mlpipeline_ui_metadata_path"]
with wandb.init(
job_type=func.__name__,
group="{{workflow.annotations.pipelines.kubeflow.org/run_name}}",
) as run:
# Link back to the kfp UI
kubeflow_url = get_link_back_to_kubeflow()
run.notes = kubeflow_url
run.config["LINK_TO_KUBEFLOW_RUN"] = kubeflow_url
iframe_html = get_iframe_html(run)
metadata = {
"outputs": [
{
"type": "markdown",
"storage": "inline",
"source": iframe_html,
}
]
}
with open(mlpipeline_ui_metadata_path, "w") as metadata_file:
json.dump(metadata, metadata_file)
if log_component_file:
_log_component_file(func, run=run)
for name, _ in input_scalars.items():
log_input_scalar(name, kwargs[name], run)
for name, ann in input_artifacts.items():
log_input_artifact(name, kwargs[name], ann.type, run)
with wb_telemetry.context(run=run) as tel:
tel.feature.kfp_wandb_log = True
result = func(*bound.args, **bound.kwargs)
for name, _ in output_scalars.items():
log_output_scalar(name, result, run)
for name, ann in output_artifacts.items():
log_output_artifact(name, kwargs[name], ann.type, run)
return result
wrapper.__signature__ = new_sig
wrapper.__annotations__ = new_anns
return wrapper
if func is None:
return decorator
else:
return decorator(func)
| mit | e4525e2043b29d71164f4cbde5c047d3 | 32.655738 | 137 | 0.550414 | 3.93044 | false | false | false | false |
wandb/client | wandb/vendor/pygments/lexers/dalvik.py | 29 | 4420 | # -*- coding: utf-8 -*-
"""
pygments.lexers.dalvik
~~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Dalvik VM-related languages.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Keyword, Text, Comment, Name, String, Number, \
Punctuation
__all__ = ['SmaliLexer']
class SmaliLexer(RegexLexer):
"""
For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
code.
.. versionadded:: 1.6
"""
name = 'Smali'
aliases = ['smali']
filenames = ['*.smali']
mimetypes = ['text/smali']
tokens = {
'root': [
include('comment'),
include('label'),
include('field'),
include('method'),
include('class'),
include('directive'),
include('access-modifier'),
include('instruction'),
include('literal'),
include('punctuation'),
include('type'),
include('whitespace')
],
'directive': [
(r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
r'enum|method|registers|locals|array-data|packed-switch|'
r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
r'epilogue|source)', Keyword),
(r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
'packed-switch|sparse-switch|parameter|local)', Keyword),
(r'^[ \t]*\.restart local', Keyword),
],
'access-modifier': [
(r'(public|private|protected|static|final|synchronized|bridge|'
r'varargs|native|abstract|strictfp|synthetic|constructor|'
r'declared-synchronized|interface|enum|annotation|volatile|'
r'transient)', Keyword),
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
],
'instruction': [
(r'\b[vp]\d+\b', Name.Builtin), # registers
(r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
],
'literal': [
(r'".*"', String),
(r'0x[0-9A-Fa-f]+t?', Number.Hex),
(r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+L?', Number.Integer),
],
'field': [
(r'(\$?\b)([\w$]*)(:)',
bygroups(Punctuation, Name.Variable, Punctuation)),
],
'method': [
(r'<(?:cl)?init>', Name.Function), # constructor
(r'(\$?\b)([\w$]*)(\()',
bygroups(Punctuation, Name.Function, Punctuation)),
],
'label': [
(r':\w+', Name.Label),
],
'class': [
# class names in the form Lcom/namespace/ClassName;
# I only want to color the ClassName part, so the namespace part is
# treated as 'Text'
(r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
bygroups(Keyword.Type, Text, Name.Class, Text)),
],
'punctuation': [
(r'->', Punctuation),
(r'[{},():=.-]', Punctuation),
],
'type': [
(r'[ZBSCIJFDV\[]+', Keyword.Type),
],
'comment': [
(r'#.*?\n', Comment),
],
}
def analyse_text(text):
score = 0
if re.search(r'^\s*\.class\s', text, re.MULTILINE):
score += 0.5
if re.search(r'\b((check-cast|instance-of|throw-verification-error'
r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
score += 0.3
if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
r'\b(array-data|class-change-error|declared-synchronized|'
r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
r'illegal-class-access|illegal-field-access|'
r'illegal-method-access|instantiation-error|no-error|'
r'no-such-class|no-such-field|no-such-method|'
r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
score += 0.6
return score
| mit | ba01f3177284a376215c6690a895be64 | 34.36 | 80 | 0.49095 | 3.733108 | false | false | false | false |
wandb/client | wandb/vendor/pygments/lexers/nix.py | 29 | 4031 | # -*- coding: utf-8 -*-
"""
pygments.lexers.nix
~~~~~~~~~~~~~~~~~~~
Lexers for the NixOS Nix language.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal
__all__ = ['NixLexer']
class NixLexer(RegexLexer):
"""
For the `Nix language <http://nixos.org/nix/>`_.
.. versionadded:: 2.0
"""
name = 'Nix'
aliases = ['nixos', 'nix']
filenames = ['*.nix']
mimetypes = ['text/x-nix']
flags = re.MULTILINE | re.UNICODE
keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
'else', 'then', '...']
builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
'map', 'removeAttrs', 'throw', 'toString', 'derivation']
operators = ['++', '+', '?', '.', '!', '//', '==',
'!=', '&&', '||', '->', '=']
punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
tokens = {
'root': [
# comments starting with #
(r'#.*$', Comment.Single),
# multiline comments
(r'/\*', Comment.Multiline, 'comment'),
# whitespace
(r'\s+', Text),
# keywords
('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
# highlight the builtins
('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
Name.Builtin),
(r'\b(true|false|null)\b', Name.Constant),
# operators
('(%s)' % '|'.join(re.escape(entry) for entry in operators),
Operator),
# word operators
(r'\b(or|and)\b', Operator.Word),
# punctuations
('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
# integers
(r'[0-9]+', Number.Integer),
# strings
(r'"', String.Double, 'doublequote'),
(r"''", String.Single, 'singlequote'),
# paths
(r'[\w.+-]*(\/[\w.+-]+)+', Literal),
(r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
# urls
(r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
# names of variables
(r'[\w-]+\s*=', String.Symbol),
(r'[a-zA-Z_][\w\'-]*', Text),
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'singlequote': [
(r"'''", String.Escape),
(r"''\$\{", String.Escape),
(r"''\n", String.Escape),
(r"''\r", String.Escape),
(r"''\t", String.Escape),
(r"''", String.Single, '#pop'),
(r'\$\{', String.Interpol, 'antiquote'),
(r"[^']", String.Single),
],
'doublequote': [
(r'\\', String.Escape),
(r'\\"', String.Escape),
(r'\\$\{', String.Escape),
(r'"', String.Double, '#pop'),
(r'\$\{', String.Interpol, 'antiquote'),
(r'[^"]', String.Double),
],
'antiquote': [
(r"\}", String.Interpol, '#pop'),
# TODO: we should probably escape also here ''${ \${
(r"\$\{", String.Interpol, '#push'),
include('root'),
],
}
def analyse_text(text):
rv = 0.0
# TODO: let/in
if re.search(r'import.+?<[^>]+>', text):
rv += 0.4
if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
rv += 0.4
if re.search(r'=\s+mkIf\s+', text):
rv += 0.4
if re.search(r'\{[a-zA-Z,\s]+\}:', text):
rv += 0.1
return rv
| mit | 9c33576b74e14d95f0c86f781243991a | 28.639706 | 89 | 0.420987 | 3.621743 | false | false | false | false |
wandb/client | tests/functional_tests/t0_main/lightning/train_tpu_ddp.py | 1 | 1149 | #!/usr/bin/env python
import os
from pl_base import BoringModel, RandomDataset
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import WandbLogger
from torch.utils.data import DataLoader
import wandb
def main():
# Use concurrency experiment
wandb.require(experiment="service")
print("PIDPID", os.getpid())
# Set up data
num_samples = 100000
train = DataLoader(RandomDataset(32, num_samples), batch_size=32)
val = DataLoader(RandomDataset(32, num_samples), batch_size=32)
test = DataLoader(RandomDataset(32, num_samples), batch_size=32)
# init model
model = BoringModel()
# set up wandb
config = dict(some_hparam="Logged Before Trainer starts DDP")
wandb_logger = WandbLogger(log_model=True, config=config, save_code=True)
# Initialize a trainer
trainer = Trainer(max_epochs=1, logger=wandb_logger, tpu_cores=8)
# Train the model
trainer.fit(model, train, val)
trainer.test(test_dataloaders=test)
if __name__ == "__main__":
# export TPU_IP_ADDRESS=your-tpu-ip-address
# export XRT_TPU_CONFIG="tpu_worker;0;$TPU_IP_ADDRESS:8470"
main()
| mit | d1d4a7d4ac9064ae70c1cdb76b4c385d | 27.02439 | 77 | 0.70148 | 3.209497 | false | true | false | false |
wandb/client | wandb/vendor/graphql-core-1.1/wandb_graphql/language/visitor.py | 3 | 6276 | from copy import copy
import six
from . import ast
from .visitor_meta import QUERY_DOCUMENT_KEYS, VisitorMeta
class Falsey(object):
def __nonzero__(self):
return False
def __bool__(self):
return False
BREAK = object()
REMOVE = Falsey()
class Stack(object):
__slots__ = 'in_array', 'index', 'keys', 'edits', 'prev'
def __init__(self, in_array, index, keys, edits, prev):
self.in_array = in_array
self.index = index
self.keys = keys
self.edits = edits
self.prev = prev
def visit(root, visitor, key_map=None):
visitor_keys = key_map or QUERY_DOCUMENT_KEYS
stack = None
in_array = isinstance(root, list)
keys = [root]
index = -1
edits = []
parent = None
path = []
ancestors = []
new_root = root
leave = visitor.leave
enter = visitor.enter
path_pop = path.pop
ancestors_pop = ancestors.pop
path_append = path.append
ancestors_append = ancestors.append
while True:
index += 1
is_leaving = index == len(keys)
is_edited = is_leaving and edits
if is_leaving:
key = path_pop() if ancestors else None
node = parent
parent = ancestors_pop() if ancestors else None
if is_edited:
if in_array:
node = list(node)
else:
node = copy(node)
edit_offset = 0
for edit_key, edit_value in edits:
if in_array:
edit_key -= edit_offset
if in_array and edit_value is REMOVE:
node.pop(edit_key)
edit_offset += 1
else:
if isinstance(node, list):
node[edit_key] = edit_value
else:
setattr(node, edit_key, edit_value)
index = stack.index
keys = stack.keys
edits = stack.edits
in_array = stack.in_array
stack = stack.prev
else:
if parent:
key = index if in_array else keys[index]
if isinstance(parent, list):
node = parent[key]
else:
node = getattr(parent, key, None)
else:
key = None
node = new_root
if node is REMOVE or node is None:
continue
if parent:
path_append(key)
result = None
if not isinstance(node, list):
assert isinstance(node, ast.Node), 'Invalid AST Node: ' + repr(node)
if is_leaving:
result = leave(node, key, parent, path, ancestors)
else:
result = enter(node, key, parent, path, ancestors)
if result is BREAK:
break
if result is False:
if not is_leaving:
path_pop()
continue
elif result is not None:
edits.append((key, result))
if not is_leaving:
if isinstance(result, ast.Node):
node = result
else:
path_pop()
continue
if result is None and is_edited:
edits.append((key, node))
if not is_leaving:
stack = Stack(in_array, index, keys, edits, stack)
in_array = isinstance(node, list)
keys = node if in_array else visitor_keys.get(type(node), None) or []
index = -1
edits = []
if parent:
ancestors_append(parent)
parent = node
if not stack:
break
if edits:
new_root = edits[-1][1]
return new_root
@six.add_metaclass(VisitorMeta)
class Visitor(object):
__slots__ = ()
def enter(self, node, key, parent, path, ancestors):
method = self._get_enter_handler(type(node))
if method:
return method(self, node, key, parent, path, ancestors)
def leave(self, node, key, parent, path, ancestors):
method = self._get_leave_handler(type(node))
if method:
return method(self, node, key, parent, path, ancestors)
class ParallelVisitor(Visitor):
__slots__ = 'skipping', 'visitors'
def __init__(self, visitors):
self.visitors = visitors
self.skipping = [None] * len(visitors)
def enter(self, node, key, parent, path, ancestors):
for i, visitor in enumerate(self.visitors):
if not self.skipping[i]:
result = visitor.enter(node, key, parent, path, ancestors)
if result is False:
self.skipping[i] = node
elif result is BREAK:
self.skipping[i] = BREAK
elif result is not None:
return result
def leave(self, node, key, parent, path, ancestors):
for i, visitor in enumerate(self.visitors):
if not self.skipping[i]:
result = visitor.leave(node, key, parent, path, ancestors)
if result is BREAK:
self.skipping[i] = BREAK
elif result is not None and result is not False:
return result
elif self.skipping[i] == node:
self.skipping[i] = REMOVE
class TypeInfoVisitor(Visitor):
__slots__ = 'visitor', 'type_info'
def __init__(self, type_info, visitor):
self.type_info = type_info
self.visitor = visitor
def enter(self, node, key, parent, path, ancestors):
self.type_info.enter(node)
result = self.visitor.enter(node, key, parent, path, ancestors)
if result is not None:
self.type_info.leave(node)
if isinstance(result, ast.Node):
self.type_info.enter(result)
return result
def leave(self, node, key, parent, path, ancestors):
result = self.visitor.leave(node, key, parent, path, ancestors)
self.type_info.leave(node)
return result
| mit | 80e47a655d9a4da4f4f32d5be227822d | 26.893333 | 81 | 0.50733 | 4.313402 | false | false | false | false |
wandb/client | wandb/sklearn/calculate/learning_curve.py | 1 | 1703 | from warnings import simplefilter
import numpy as np
from sklearn import model_selection
import wandb
from wandb.sklearn import utils
# ignore all future warnings
simplefilter(action="ignore", category=FutureWarning)
def learning_curve(
model,
X,
y,
cv=None,
shuffle=False,
random_state=None,
train_sizes=None,
n_jobs=1,
scoring=None,
):
"""Trains model on datasets of varying size and generates plot of score vs size.
Called by plot_learning_curve to visualize learning curve. Please use the function
plot_learning_curve() if you wish to visualize your learning curves.
"""
train_sizes, train_scores, test_scores = model_selection.learning_curve(
model,
X,
y,
cv=cv,
n_jobs=n_jobs,
train_sizes=train_sizes,
scoring=scoring,
shuffle=shuffle,
random_state=random_state,
)
train_scores_mean = np.mean(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
table = make_table(train_scores_mean, test_scores_mean, train_sizes)
chart = wandb.visualize("wandb/learning_curve/v1", table)
return chart
def make_table(train, test, train_sizes):
data = []
for i in range(len(train)):
if utils.check_against_limit(
i,
"learning_curve",
utils.chart_limit / 2,
):
break
train_set = ["train", utils.round_2(train[i]), train_sizes[i]]
test_set = ["test", utils.round_2(test[i]), train_sizes[i]]
data.append(train_set)
data.append(test_set)
table = wandb.Table(columns=["dataset", "score", "train_size"], data=data)
return table
| mit | 8d4e15b74d970bb056aa60cf2762c46f | 25.609375 | 86 | 0.630652 | 3.547917 | false | true | false | false |
wandb/client | wandb/sdk/internal/meta.py | 1 | 10015 | """
meta.
"""
from datetime import datetime
import glob
import json
import logging
import multiprocessing
import os
from shutil import copyfile
import subprocess
import sys
from urllib.parse import unquote
from wandb import util
from wandb.vendor.pynvml import pynvml
from ..lib.filenames import (
CONDA_ENVIRONMENTS_FNAME,
DIFF_FNAME,
METADATA_FNAME,
REQUIREMENTS_FNAME,
)
from ..lib.git import GitRepo
logger = logging.getLogger(__name__)
class Meta:
"""Used to store metadata during and after a run."""
def __init__(self, settings=None, interface=None):
logger.debug("meta init")
self._settings = settings
self.data = {}
self.fname = os.path.join(self._settings.files_dir, METADATA_FNAME)
self._interface = interface
self._git = GitRepo(
root=self._settings.git_root,
remote=self._settings.git_remote,
remote_url=self._settings.git_remote_url,
commit=self._settings.git_commit,
)
# Location under "code" directory in files where program was saved.
self._saved_program = None
# Locations under files directory where diff patches were saved.
self._saved_patches = []
logger.debug("meta init done")
def _save_pip(self):
"""Saves the current working set of pip packages to {REQUIREMENTS_FNAME}"""
logger.debug("save pip")
try:
import pkg_resources
installed_packages = [d for d in iter(pkg_resources.working_set)]
installed_packages_list = sorted(
f"{i.key}=={i.version}" for i in installed_packages
)
with open(
os.path.join(self._settings.files_dir, REQUIREMENTS_FNAME), "w"
) as f:
f.write("\n".join(installed_packages_list))
except Exception:
logger.exception("Error saving pip packages")
logger.debug("save pip done")
def _save_conda(self):
current_shell_is_conda = os.path.exists(os.path.join(sys.prefix, "conda-meta"))
if not current_shell_is_conda:
return False
logger.debug("save conda")
try:
with open(
os.path.join(self._settings.files_dir, CONDA_ENVIRONMENTS_FNAME), "w"
) as f:
subprocess.call(
["conda", "env", "export"], stdout=f, stderr=subprocess.DEVNULL
)
except Exception:
logger.exception("Error saving conda packages")
logger.debug("save conda done")
def _save_code(self):
logger.debug("save code")
if self._settings.program_relpath is None:
logger.warning("unable to save code -- program entry not found")
return
root: str = self._git.root or os.getcwd()
program_relative: str = self._settings.program_relpath
util.mkdir_exists_ok(
os.path.join(
self._settings.files_dir, "code", os.path.dirname(program_relative)
)
)
program_absolute = os.path.join(root, program_relative)
if not os.path.exists(program_absolute):
logger.warning("unable to save code -- can't find %s" % program_absolute)
return
saved_program = os.path.join(self._settings.files_dir, "code", program_relative)
self._saved_program = program_relative
if not os.path.exists(saved_program):
copyfile(program_absolute, saved_program)
logger.debug("save code done")
def _save_patches(self):
"""Save the current state of this repository to one or more patches.
Makes one patch against HEAD and another one against the most recent
commit that occurs in an upstream branch. This way we can be robust
to history editing as long as the user never does "push -f" to break
history on an upstream branch.
Writes the first patch to <files_dir>/<DIFF_FNAME> and the second to
<files_dir>/upstream_diff_<commit_id>.patch.
"""
if not self._git.enabled:
return False
logger.debug("save patches")
try:
root = self._git.root
diff_args = ["git", "diff"]
if self._git.has_submodule_diff:
diff_args.append("--submodule=diff")
if self._git.dirty:
patch_path = os.path.join(self._settings.files_dir, DIFF_FNAME)
with open(patch_path, "wb") as patch:
# we diff against HEAD to ensure we get changes in the index
subprocess.check_call(
diff_args + ["HEAD"], stdout=patch, cwd=root, timeout=5
)
self._saved_patches.append(
os.path.relpath(patch_path, start=self._settings.files_dir)
)
upstream_commit = self._git.get_upstream_fork_point()
if upstream_commit and upstream_commit != self._git.repo.head.commit:
sha = upstream_commit.hexsha
upstream_patch_path = os.path.join(
self._settings.files_dir, f"upstream_diff_{sha}.patch"
)
with open(upstream_patch_path, "wb") as upstream_patch:
subprocess.check_call(
diff_args + [sha], stdout=upstream_patch, cwd=root, timeout=5
)
self._saved_patches.append(
os.path.relpath(
upstream_patch_path, start=self._settings.files_dir
)
)
# TODO: A customer saw `ValueError: Reference at 'refs/remotes/origin/foo'
# does not exist` so we now catch ValueError. Catching this error feels
# too generic.
except (
ValueError,
subprocess.CalledProcessError,
subprocess.TimeoutExpired,
) as e:
logger.error("Error generating diff: %s" % e)
logger.debug("save patches done")
def _setup_sys(self):
self.data["os"] = self._settings._os
self.data["python"] = self._settings._python
self.data["heartbeatAt"] = datetime.utcnow().isoformat()
self.data["startedAt"] = datetime.utcfromtimestamp(
self._settings._start_time
).isoformat()
self.data["docker"] = self._settings.docker
try:
pynvml.nvmlInit()
self.data["gpu"] = pynvml.nvmlDeviceGetName(
pynvml.nvmlDeviceGetHandleByIndex(0)
).decode("utf8")
self.data["gpu_count"] = pynvml.nvmlDeviceGetCount()
except pynvml.NVMLError:
pass
try:
self.data["cpu_count"] = multiprocessing.cpu_count()
except NotImplementedError:
pass
self.data["cuda"] = self._settings._cuda
self.data["args"] = self._settings._args
self.data["state"] = "running"
def _setup_git(self):
if self._settings.disable_git:
return
# in case of manually passing the git repo info, `enabled` wouldb be False
# but we still want to save the git repo info
if not self._git.enabled and self._git.auto:
return
logger.debug("setup git")
self.data["git"] = {
"remote": self._git.remote_url,
"commit": self._git.last_commit,
}
self.data["email"] = self._git.email
self.data["root"] = self._git.root or self.data.get("root") or os.getcwd()
logger.debug("setup git done")
def probe(self):
logger.debug("probe")
self._setup_sys()
if self._settings.program is not None:
self.data["program"] = self._settings.program
if not self._settings.disable_code:
if self._settings.program_relpath is not None:
self.data["codePath"] = self._settings.program_relpath
elif self._settings._jupyter:
if self._settings.notebook_name:
self.data["program"] = self._settings.notebook_name
elif self._settings._jupyter_path:
if self._settings._jupyter_path.startswith("fileId="):
unescaped = unquote(self._settings._jupyter_path)
self.data["colab"] = (
"https://colab.research.google.com/notebook#"
+ unescaped # noqa
)
self.data["program"] = self._settings._jupyter_name
else:
self.data["program"] = self._settings._jupyter_path
self.data["root"] = self._settings._jupyter_root
self._setup_git()
if self._settings.anonymous != "true":
self.data["host"] = self._settings.host
self.data["username"] = self._settings.username
self.data["executable"] = sys.executable
else:
self.data.pop("email", None)
self.data.pop("root", None)
if self._settings.save_code:
self._save_code()
self._save_patches()
if self._settings._save_requirements:
self._save_pip()
self._save_conda()
logger.debug("probe done")
def write(self):
with open(self.fname, "w") as f:
s = json.dumps(self.data, indent=4)
f.write(s)
f.write("\n")
base_name = os.path.basename(self.fname)
files = dict(files=[(base_name, "now")])
if self._saved_program:
saved_program = os.path.join("code", self._saved_program)
files["files"].append((glob.escape(saved_program), "now"))
for patch in self._saved_patches:
files["files"].append((glob.escape(patch), "now"))
self._interface.publish_files(files)
| mit | 3c7f7be65a5aabac383faef2c01136fe | 36.092593 | 88 | 0.557963 | 4.193886 | false | false | false | false |
wandb/client | tests/unit_tests/tests_launch/test_launch_jobs.py | 1 | 7412 | import json
import os
import tempfile
from unittest import mock
import pytest
import wandb
from wandb.errors import CommError, LaunchError
from wandb.sdk.data_types._dtypes import TypeRegistry
import wandb.sdk.launch.launch as launch
from wandb.sdk.launch.launch_add import launch_add
import wandb.sdk.launch._project_spec as _project_spec
from .test_launch import (
check_mock_run_info,
code_download_func,
EMPTY_BACKEND_CONFIG,
mock_load_backend,
mocked_fetchable_git_repo,
)
from tests import utils
INPUT_TYPES = TypeRegistry.type_of(
{"epochs": 2, "heavy": False, "sleep_every": 0}
).to_json()
OUTPUT_TYPES = TypeRegistry.type_of({"loss": 0.2, "cool": True}).to_json()
@pytest.fixture
def mocked_public_artifact(monkeypatch):
def mock_artifact_fetcher(job_download_func):
def artifact_fetcher(client, name, type):
if type == "job":
job_artifact = mock.MagicMock()
job_artifact.type = "job"
job_artifact.download = job_download_func
job_artifact.digest = "job123"
return job_artifact
else:
code_artifact = mock.MagicMock()
code_artifact.type = "code"
code_artifact.download = code_download_func
code_artifact.digest = "code123"
return code_artifact
monkeypatch.setattr(
wandb.apis.public.Api,
"artifact",
lambda *arg, **kwargs: artifact_fetcher(*arg, **kwargs),
)
monkeypatch.setattr(
wandb.sdk.launch._project_spec.wandb.apis.public.Api,
"artifact",
lambda *arg, **kwargs: artifact_fetcher(*arg, **kwargs),
)
return mock_artifact_fetcher
def test_fetch_job_fail(api):
kwargs = {
"uri": None,
"job": "test:v0",
"api": api,
"launch_spec": {},
"target_entity": "live_mock_server_entity",
"target_project": "Test_project",
"name": None,
"docker_config": {},
"git_info": {},
"overrides": {},
"resource": "local",
"resource_args": {},
"cuda": None,
"run_id": None,
}
launch_project = _project_spec.LaunchProject(**kwargs)
with pytest.raises(LaunchError) as e_info:
launch_project._fetch_job()
assert "Job test:v0 not found" in str(e_info.value)
def test_launch_job_artifact(
live_mock_server,
test_settings,
mock_load_backend,
mocked_public_artifact,
monkeypatch,
):
api = wandb.sdk.internal.internal_api.Api(
default_settings=test_settings, load_settings=False
)
def job_download_func(root):
with open(os.path.join(root, "source_info.json"), "w") as f:
source = {
"_version": "v0",
"source_type": "artifact",
"source": {
"artifact": "wandb-artifact://mock_server_entity/test/runs/1/artifacts/test-artifact",
"entrypoint": ["python", "train.py"],
},
"input_types": INPUT_TYPES,
"output_types": OUTPUT_TYPES,
}
f.write(json.dumps(source))
with open(os.path.join(root, "requirements.frozen.txt"), "w") as f:
f.write(utils.fixture_open("requirements.txt").read())
mocked_public_artifact(job_download_func)
api = wandb.sdk.internal.internal_api.Api(
default_settings=test_settings, load_settings=False
)
kwargs = {
"job": "test-job:v0",
"api": api,
"entity": "mock_server_entity",
"project": "test",
}
mock_with_run_info = launch.run(**kwargs)
check_mock_run_info(mock_with_run_info, EMPTY_BACKEND_CONFIG, kwargs)
def test_launch_job_repo(
live_mock_server,
test_settings,
mocked_fetchable_git_repo,
mock_load_backend,
monkeypatch,
mocked_public_artifact,
):
api = wandb.sdk.internal.internal_api.Api(
default_settings=test_settings, load_settings=False
)
def job_download_func(root):
with open(os.path.join(root, "source_info.json"), "w") as f:
source = {
"_version": "v0",
"source_type": "repo",
"source": {
"git": {
"remote": "https://github.com/test/remote",
"commit": "asdasdasdasd",
},
"entrypoint": ["python", "train.py"],
},
"input_types": INPUT_TYPES,
"output_types": OUTPUT_TYPES,
}
f.write(json.dumps(source))
with open(os.path.join(root, "requirements.frozen.txt"), "w") as f:
f.write(utils.fixture_open("requirements.txt").read())
mocked_public_artifact(job_download_func)
api = wandb.sdk.internal.internal_api.Api(
default_settings=test_settings, load_settings=False
)
kwargs = {
"job": "test-job:v0",
"api": api,
"entity": "mock_server_entity",
"project": "test",
}
mock_with_run_info = launch.run(**kwargs)
check_mock_run_info(mock_with_run_info, EMPTY_BACKEND_CONFIG, kwargs)
def test_launch_job_container(
live_mock_server,
test_settings,
mocked_fetchable_git_repo,
mock_load_backend,
monkeypatch,
mocked_public_artifact,
):
api = wandb.sdk.internal.internal_api.Api(
default_settings=test_settings, load_settings=False
)
def job_download_func(root):
with open(os.path.join(root, "source_info.json"), "w") as f:
source = {
"_version": "v0",
"source_type": "image",
"source": {"image": "my-test-image:latest"},
"input_types": INPUT_TYPES,
"output_types": OUTPUT_TYPES,
}
f.write(json.dumps(source))
with open(os.path.join(root, "requirements.frozen.txt"), "w") as f:
f.write(utils.fixture_open("requirements.txt").read())
mocked_public_artifact(job_download_func)
api = wandb.sdk.internal.internal_api.Api(
default_settings=test_settings, load_settings=False
)
kwargs = {
"job": "test-job:v0",
"api": api,
"entity": "mock_server_entity",
"project": "test",
}
mock_with_run_info = launch.run(**kwargs)
check_mock_run_info(mock_with_run_info, EMPTY_BACKEND_CONFIG, kwargs)
def test_launch_add_container_queued_run(live_mock_server, mocked_public_artifact):
def job_download_func(root=None):
if root is None:
root = tempfile.mkdtemp()
with open(os.path.join(root, "source_info.json"), "w") as f:
source = {
"_version": "v0",
"source_type": "image",
"source": {"image": "my-test-image:latest"},
"input_types": INPUT_TYPES,
"output_types": OUTPUT_TYPES,
}
f.write(json.dumps(source))
with open(os.path.join(root, "requirements.frozen.txt"), "w") as f:
f.write(utils.fixture_open("requirements.txt").read())
return root
mocked_public_artifact(job_download_func)
queued_run = launch_add(job="test-job:v0")
with pytest.raises(CommError):
queued_run.wait_until_finished()
| mit | e738b26bd750b56436945addc3baaafc | 30.675214 | 106 | 0.565704 | 3.644051 | false | true | false | false |
wandb/client | wandb/vendor/pygments/formatters/html.py | 25 | 31759 | # -*- coding: utf-8 -*-
"""
pygments.formatters.html
~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for HTML output.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
import os
import sys
import os.path
from pygments.formatter import Formatter
from pygments.token import Token, Text, STANDARD_TYPES
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
StringIO, string_types, iteritems
try:
import ctags
except ImportError:
ctags = None
__all__ = ['HtmlFormatter']
_escape_html_table = {
ord('&'): u'&',
ord('<'): u'<',
ord('>'): u'>',
ord('"'): u'"',
ord("'"): u''',
}
def escape_html(text, table=_escape_html_table):
"""Escape &, <, > as well as single and double quotes for HTML."""
return text.translate(table)
def _get_ttype_class(ttype):
fname = STANDARD_TYPES.get(ttype)
if fname:
return fname
aname = ''
while fname is None:
aname = '-' + ttype[-1] + aname
ttype = ttype.parent
fname = STANDARD_TYPES.get(ttype)
return fname + aname
CSSFILE_TEMPLATE = '''\
td.linenos { background-color: #f0f0f0; padding-right: 10px; }
span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; }
pre { line-height: 125%%; }
%(styledefs)s
'''
DOC_HEADER = '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<title>%(title)s</title>
<meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
<style type="text/css">
''' + CSSFILE_TEMPLATE + '''
</style>
</head>
<body>
<h2>%(title)s</h2>
'''
DOC_HEADER_EXTERNALCSS = '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<title>%(title)s</title>
<meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
<link rel="stylesheet" href="%(cssfile)s" type="text/css">
</head>
<body>
<h2>%(title)s</h2>
'''
DOC_FOOTER = '''\
</body>
</html>
'''
class HtmlFormatter(Formatter):
r"""
Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped
in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass`
option.
If the `linenos` option is set to ``"table"``, the ``<pre>`` is
additionally wrapped inside a ``<table>`` which has one row and two
cells: one containing the line numbers and one containing the code.
Example:
.. sourcecode:: html
<div class="highlight" >
<table><tr>
<td class="linenos" title="click to toggle"
onclick="with (this.firstChild.style)
{ display = (display == '') ? 'none' : '' }">
<pre>1
2</pre>
</td>
<td class="code">
<pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
<span class="Ke">pass</span>
</pre>
</td>
</tr></table></div>
(whitespace added to improve clarity).
Wrapping can be disabled using the `nowrap` option.
A list of lines can be specified using the `hl_lines` option to make these
lines highlighted (as of Pygments 0.11).
With the `full` option, a complete HTML 4 document is output, including
the style definitions inside a ``<style>`` tag, or in a separate file if
the `cssfile` option is given.
When `tagsfile` is set to the path of a ctags index file, it is used to
generate hyperlinks from names to their definition. You must enable
`lineanchors` and run ctags with the `-n` option for this to work. The
`python-ctags` module from PyPI must be installed to use this feature;
otherwise a `RuntimeError` will be raised.
The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
containing CSS rules for the CSS classes used by the formatter. The
argument `arg` can be used to specify additional CSS selectors that
are prepended to the classes. A call `fmter.get_style_defs('td .code')`
would result in the following CSS classes:
.. sourcecode:: css
td .code .kw { font-weight: bold; color: #00FF00 }
td .code .cm { color: #999999 }
...
If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
`get_style_defs()` method to request multiple prefixes for the tokens:
.. sourcecode:: python
formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
The output would then look like this:
.. sourcecode:: css
div.syntax pre .kw,
pre.syntax .kw { font-weight: bold; color: #00FF00 }
div.syntax pre .cm,
pre.syntax .cm { color: #999999 }
...
Additional options accepted:
`nowrap`
If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>``
tag. This disables most other options (default: ``False``).
`full`
Tells the formatter to output a "full" document, i.e. a complete
self-contained document (default: ``False``).
`title`
If `full` is true, the title that should be used to caption the
document (default: ``''``).
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``). This option has no effect if the `cssfile`
and `noclobber_cssfile` option are given and the file specified in
`cssfile` exists.
`noclasses`
If set to true, token ``<span>`` tags will not use CSS classes, but
inline styles. This is not recommended for larger pieces of code since
it increases output size by quite a bit (default: ``False``).
`classprefix`
Since the token types use relatively short class names, they may clash
with some of your own class names. In this case you can use the
`classprefix` option to give a string to prepend to all Pygments-generated
CSS class names for token types.
Note that this option also affects the output of `get_style_defs()`.
`cssclass`
CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
If you set this option, the default selector for `get_style_defs()`
will be this class.
.. versionadded:: 0.9
If you select the ``'table'`` line numbers, the wrapping table will
have a CSS class of this string plus ``'table'``, the default is
accordingly ``'highlighttable'``.
`cssstyles`
Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
`prestyles`
Inline CSS styles for the ``<pre>`` tag (default: ``''``).
.. versionadded:: 0.11
`cssfile`
If the `full` option is true and this option is given, it must be the
name of an external file. If the filename does not include an absolute
path, the file's path will be assumed to be relative to the main output
file's path, if the latter can be found. The stylesheet is then written
to this file instead of the HTML file.
.. versionadded:: 0.6
`noclobber_cssfile`
If `cssfile` is given and the specified file exists, the css file will
not be overwritten. This allows the use of the `full` option in
combination with a user specified css file. Default is ``False``.
.. versionadded:: 1.1
`linenos`
If set to ``'table'``, output line numbers as a table with two cells,
one containing the line numbers, the other the whole code. This is
copy-and-paste-friendly, but may cause alignment problems with some
browsers or fonts. If set to ``'inline'``, the line numbers will be
integrated in the ``<pre>`` tag that contains the code (that setting
is *new in Pygments 0.8*).
For compatibility with Pygments 0.7 and earlier, every true value
except ``'inline'`` means the same as ``'table'`` (in particular, that
means also ``True``).
The default value is ``False``, which means no line numbers at all.
**Note:** with the default ("table") line number mechanism, the line
numbers and code can have different line heights in Internet Explorer
unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
CSS property (you get the default line spacing with ``line-height:
125%``).
`hl_lines`
Specify a list of lines to be highlighted.
.. versionadded:: 0.11
`linenostart`
The line number for the first line (default: ``1``).
`linenostep`
If set to a number n > 1, only every nth line number is printed.
`linenospecial`
If set to a number n > 0, every nth line number is given the CSS
class ``"special"`` (default: ``0``).
`nobackground`
If set to ``True``, the formatter won't output the background color
for the wrapping element (this automatically defaults to ``False``
when there is no wrapping element [eg: no argument for the
`get_syntax_defs` method given]) (default: ``False``).
.. versionadded:: 0.6
`lineseparator`
This string is output between lines of code. It defaults to ``"\n"``,
which is enough to break a line inside ``<pre>`` tags, but you can
e.g. set it to ``"<br>"`` to get HTML line breaks.
.. versionadded:: 0.7
`lineanchors`
If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
output line in an anchor tag with a ``name`` of ``foo-linenumber``.
This allows easy linking to certain lines.
.. versionadded:: 0.9
`linespans`
If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
output line in a span tag with an ``id`` of ``foo-linenumber``.
This allows easy access to lines via javascript.
.. versionadded:: 1.6
`anchorlinenos`
If set to `True`, will wrap line numbers in <a> tags. Used in
combination with `linenos` and `lineanchors`.
`tagsfile`
If set to the path of a ctags file, wrap names in anchor tags that
link to their definitions. `lineanchors` should be used, and the
tags file should specify line numbers (see the `-n` option to ctags).
.. versionadded:: 1.6
`tagurlformat`
A string formatting pattern used to generate links to ctags definitions.
Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
Defaults to an empty string, resulting in just `#prefix-number` links.
.. versionadded:: 1.6
`filename`
A string used to generate a filename when rendering <pre> blocks,
for example if displaying source code.
.. versionadded:: 2.1
**Subclassing the HTML formatter**
.. versionadded:: 0.7
The HTML formatter is now built in a way that allows easy subclassing, thus
customizing the output HTML code. The `format()` method calls
`self._format_lines()` which returns a generator that yields tuples of ``(1,
line)``, where the ``1`` indicates that the ``line`` is a line of the
formatted source code.
If the `nowrap` option is set, the generator is the iterated over and the
resulting HTML is output.
Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
other generators. These may add some HTML code to the one generated by
`_format_lines()`, either by modifying the lines generated by the latter,
then yielding them again with ``(1, line)``, and/or by yielding other HTML
code before or after the lines, with ``(0, html)``. The distinction between
source lines and other code makes it possible to wrap the generator multiple
times.
The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
A custom `HtmlFormatter` subclass could look like this:
.. sourcecode:: python
class CodeHtmlFormatter(HtmlFormatter):
def wrap(self, source, outfile):
return self._wrap_code(source)
def _wrap_code(self, source):
yield 0, '<code>'
for i, t in source:
if i == 1:
# it's a line of formatted code
t += '<br>'
yield i, t
yield 0, '</code>'
This results in wrapping the formatted lines with a ``<code>`` tag, where the
source lines are broken using ``<br>`` tags.
After calling `wrap()`, the `format()` method also adds the "line numbers"
and/or "full document" wrappers if the respective options are set. Then, all
HTML yielded by the wrapped generator is output.
"""
name = 'HTML'
aliases = ['html']
filenames = ['*.html', '*.htm']
def __init__(self, **options):
Formatter.__init__(self, **options)
self.title = self._decodeifneeded(self.title)
self.nowrap = get_bool_opt(options, 'nowrap', False)
self.noclasses = get_bool_opt(options, 'noclasses', False)
self.classprefix = options.get('classprefix', '')
self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
self.filename = self._decodeifneeded(options.get('filename', ''))
if self.tagsfile:
if not ctags:
raise RuntimeError('The "ctags" package must to be installed '
'to be able to use the "tagsfile" feature.')
self._ctags = ctags.CTags(self.tagsfile)
linenos = options.get('linenos', False)
if linenos == 'inline':
self.linenos = 2
elif linenos:
# compatibility with <= 0.7
self.linenos = 1
else:
self.linenos = 0
self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
self.nobackground = get_bool_opt(options, 'nobackground', False)
self.lineseparator = options.get('lineseparator', '\n')
self.lineanchors = options.get('lineanchors', '')
self.linespans = options.get('linespans', '')
self.anchorlinenos = options.get('anchorlinenos', False)
self.hl_lines = set()
for lineno in get_list_opt(options, 'hl_lines', []):
try:
self.hl_lines.add(int(lineno))
except ValueError:
pass
self._create_stylesheet()
def _get_css_class(self, ttype):
"""Return the css class of this token type prefixed with
the classprefix option."""
ttypeclass = _get_ttype_class(ttype)
if ttypeclass:
return self.classprefix + ttypeclass
return ''
def _get_css_classes(self, ttype):
"""Return the css classes of this token type prefixed with
the classprefix option."""
cls = self._get_css_class(ttype)
while ttype not in STANDARD_TYPES:
ttype = ttype.parent
cls = self._get_css_class(ttype) + ' ' + cls
return cls
def _create_stylesheet(self):
t2c = self.ttype2class = {Token: ''}
c2s = self.class2style = {}
for ttype, ndef in self.style:
name = self._get_css_class(ttype)
style = ''
if ndef['color']:
style += 'color: #%s; ' % ndef['color']
if ndef['bold']:
style += 'font-weight: bold; '
if ndef['italic']:
style += 'font-style: italic; '
if ndef['underline']:
style += 'text-decoration: underline; '
if ndef['bgcolor']:
style += 'background-color: #%s; ' % ndef['bgcolor']
if ndef['border']:
style += 'border: 1px solid #%s; ' % ndef['border']
if style:
t2c[ttype] = name
# save len(ttype) to enable ordering the styles by
# hierarchy (necessary for CSS cascading rules!)
c2s[name] = (style[:-2], ttype, len(ttype))
def get_style_defs(self, arg=None):
"""
Return CSS style definitions for the classes produced by the current
highlighting style. ``arg`` can be a string or list of selectors to
insert before the token type classes.
"""
if arg is None:
arg = ('cssclass' in self.options and '.'+self.cssclass or '')
if isinstance(arg, string_types):
args = [arg]
else:
args = list(arg)
def prefix(cls):
if cls:
cls = '.' + cls
tmp = []
for arg in args:
tmp.append((arg and arg + ' ' or '') + cls)
return ', '.join(tmp)
styles = [(level, ttype, cls, style)
for cls, (style, ttype, level) in iteritems(self.class2style)
if cls and style]
styles.sort()
lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
for (level, ttype, cls, style) in styles]
if arg and not self.nobackground and \
self.style.background_color is not None:
text_style = ''
if Text in self.ttype2class:
text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
lines.insert(0, '%s { background: %s;%s }' %
(prefix(''), self.style.background_color, text_style))
if self.style.highlight_color is not None:
lines.insert(0, '%s.hll { background-color: %s }' %
(prefix(''), self.style.highlight_color))
return '\n'.join(lines)
def _decodeifneeded(self, value):
if isinstance(value, bytes):
if self.encoding:
return value.decode(self.encoding)
return value.decode()
return value
def _wrap_full(self, inner, outfile):
if self.cssfile:
if os.path.isabs(self.cssfile):
# it's an absolute filename
cssfilename = self.cssfile
else:
try:
filename = outfile.name
if not filename or filename[0] == '<':
# pseudo files, e.g. name == '<fdopen>'
raise AttributeError
cssfilename = os.path.join(os.path.dirname(filename),
self.cssfile)
except AttributeError:
print('Note: Cannot determine output file name, '
'using current directory as base for the CSS file name',
file=sys.stderr)
cssfilename = self.cssfile
# write CSS file only if noclobber_cssfile isn't given as an option.
try:
if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
cf = open(cssfilename, "w")
cf.write(CSSFILE_TEMPLATE %
{'styledefs': self.get_style_defs('body')})
cf.close()
except IOError as err:
err.strerror = 'Error writing CSS file: ' + err.strerror
raise
yield 0, (DOC_HEADER_EXTERNALCSS %
dict(title=self.title,
cssfile=self.cssfile,
encoding=self.encoding))
else:
yield 0, (DOC_HEADER %
dict(title=self.title,
styledefs=self.get_style_defs('body'),
encoding=self.encoding))
for t, line in inner:
yield t, line
yield 0, DOC_FOOTER
def _wrap_tablelinenos(self, inner):
dummyoutfile = StringIO()
lncount = 0
for t, line in inner:
if t:
lncount += 1
dummyoutfile.write(line)
fl = self.linenostart
mw = len(str(lncount + fl - 1))
sp = self.linenospecial
st = self.linenostep
la = self.lineanchors
aln = self.anchorlinenos
nocls = self.noclasses
if sp:
lines = []
for i in range(fl, fl+lncount):
if i % st == 0:
if i % sp == 0:
if aln:
lines.append('<a href="#%s-%d" class="special">%*d</a>' %
(la, i, mw, i))
else:
lines.append('<span class="special">%*d</span>' % (mw, i))
else:
if aln:
lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
else:
lines.append('%*d' % (mw, i))
else:
lines.append('')
ls = '\n'.join(lines)
else:
lines = []
for i in range(fl, fl+lncount):
if i % st == 0:
if aln:
lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
else:
lines.append('%*d' % (mw, i))
else:
lines.append('')
ls = '\n'.join(lines)
# in case you wonder about the seemingly redundant <div> here: since the
# content in the other cell also is wrapped in a div, some browsers in
# some configurations seem to mess up the formatting...
if nocls:
yield 0, ('<table class="%stable">' % self.cssclass +
'<tr><td><div class="linenodiv" '
'style="background-color: #f0f0f0; padding-right: 10px">'
'<pre style="line-height: 125%">' +
ls + '</pre></div></td><td class="code">')
else:
yield 0, ('<table class="%stable">' % self.cssclass +
'<tr><td class="linenos"><div class="linenodiv"><pre>' +
ls + '</pre></div></td><td class="code">')
yield 0, dummyoutfile.getvalue()
yield 0, '</td></tr></table>'
def _wrap_inlinelinenos(self, inner):
# need a list of lines since we need the width of a single number :(
lines = list(inner)
sp = self.linenospecial
st = self.linenostep
num = self.linenostart
mw = len(str(len(lines) + num - 1))
if self.noclasses:
if sp:
for t, line in lines:
if num % sp == 0:
style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
else:
style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
yield 1, '<span style="%s">%*s </span>' % (
style, mw, (num % st and ' ' or num)) + line
num += 1
else:
for t, line in lines:
yield 1, ('<span style="background-color: #f0f0f0; '
'padding: 0 5px 0 5px">%*s </span>' % (
mw, (num % st and ' ' or num)) + line)
num += 1
elif sp:
for t, line in lines:
yield 1, '<span class="lineno%s">%*s </span>' % (
num % sp == 0 and ' special' or '', mw,
(num % st and ' ' or num)) + line
num += 1
else:
for t, line in lines:
yield 1, '<span class="lineno">%*s </span>' % (
mw, (num % st and ' ' or num)) + line
num += 1
def _wrap_lineanchors(self, inner):
s = self.lineanchors
# subtract 1 since we have to increment i *before* yielding
i = self.linenostart - 1
for t, line in inner:
if t:
i += 1
yield 1, '<a name="%s-%d"></a>' % (s, i) + line
else:
yield 0, line
def _wrap_linespans(self, inner):
s = self.linespans
i = self.linenostart - 1
for t, line in inner:
if t:
i += 1
yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
else:
yield 0, line
def _wrap_div(self, inner):
style = []
if (self.noclasses and not self.nobackground and
self.style.background_color is not None):
style.append('background: %s' % (self.style.background_color,))
if self.cssstyles:
style.append(self.cssstyles)
style = '; '.join(style)
yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
(style and (' style="%s"' % style)) + '>')
for tup in inner:
yield tup
yield 0, '</div>\n'
def _wrap_pre(self, inner):
style = []
if self.prestyles:
style.append(self.prestyles)
if self.noclasses:
style.append('line-height: 125%')
style = '; '.join(style)
if self.filename:
yield 0, ('<span class="filename">' + self.filename + '</span>')
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
for tup in inner:
yield tup
yield 0, '</pre>'
def _format_lines(self, tokensource):
"""
Just format the tokens, without any wrapping tags.
Yield individual lines.
"""
nocls = self.noclasses
lsep = self.lineseparator
# for <span style=""> lookup only
getcls = self.ttype2class.get
c2s = self.class2style
escape_table = _escape_html_table
tagsfile = self.tagsfile
lspan = ''
line = []
for ttype, value in tokensource:
if nocls:
cclass = getcls(ttype)
while cclass is None:
ttype = ttype.parent
cclass = getcls(ttype)
cspan = cclass and '<span style="%s">' % c2s[cclass][0] or ''
else:
cls = self._get_css_classes(ttype)
cspan = cls and '<span class="%s">' % cls or ''
parts = value.translate(escape_table).split('\n')
if tagsfile and ttype in Token.Name:
filename, linenumber = self._lookup_ctag(value)
if linenumber:
base, filename = os.path.split(filename)
if base:
base += '/'
filename, extension = os.path.splitext(filename)
url = self.tagurlformat % {'path': base, 'fname': filename,
'fext': extension}
parts[0] = "<a href=\"%s#%s-%d\">%s" % \
(url, self.lineanchors, linenumber, parts[0])
parts[-1] = parts[-1] + "</a>"
# for all but the last line
for part in parts[:-1]:
if line:
if lspan != cspan:
line.extend(((lspan and '</span>'), cspan, part,
(cspan and '</span>'), lsep))
else: # both are the same
line.extend((part, (lspan and '</span>'), lsep))
yield 1, ''.join(line)
line = []
elif part:
yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
else:
yield 1, lsep
# for the last line
if line and parts[-1]:
if lspan != cspan:
line.extend(((lspan and '</span>'), cspan, parts[-1]))
lspan = cspan
else:
line.append(parts[-1])
elif parts[-1]:
line = [cspan, parts[-1]]
lspan = cspan
# else we neither have to open a new span nor set lspan
if line:
line.extend(((lspan and '</span>'), lsep))
yield 1, ''.join(line)
def _lookup_ctag(self, token):
entry = ctags.TagEntry()
if self._ctags.find(entry, token, 0):
return entry['file'], entry['lineNumber']
else:
return None, None
def _highlight_lines(self, tokensource):
"""
Highlighted the lines specified in the `hl_lines` option by
post-processing the token stream coming from `_format_lines`.
"""
hls = self.hl_lines
for i, (t, value) in enumerate(tokensource):
if t != 1:
yield t, value
if i + 1 in hls: # i + 1 because Python indexes start at 0
if self.noclasses:
style = ''
if self.style.highlight_color is not None:
style = (' style="background-color: %s"' %
(self.style.highlight_color,))
yield 1, '<span%s>%s</span>' % (style, value)
else:
yield 1, '<span class="hll">%s</span>' % value
else:
yield 1, value
def wrap(self, source, outfile):
"""
Wrap the ``source``, which is a generator yielding
individual lines, in custom generators. See docstring
for `format`. Can be overridden.
"""
return self._wrap_div(self._wrap_pre(source))
def format_unencoded(self, tokensource, outfile):
"""
The formatting process uses several nested generators; which of
them are used is determined by the user's options.
Each generator should take at least one argument, ``inner``,
and wrap the pieces of text generated by this.
Always yield 2-tuples: (code, text). If "code" is 1, the text
is part of the original tokensource being highlighted, if it's
0, the text is some piece of wrapping. This makes it possible to
use several different wrappers that process the original source
linewise, e.g. line number generators.
"""
source = self._format_lines(tokensource)
if self.hl_lines:
source = self._highlight_lines(source)
if not self.nowrap:
if self.linenos == 2:
source = self._wrap_inlinelinenos(source)
if self.lineanchors:
source = self._wrap_lineanchors(source)
if self.linespans:
source = self._wrap_linespans(source)
source = self.wrap(source, outfile)
if self.linenos == 1:
source = self._wrap_tablelinenos(source)
if self.full:
source = self._wrap_full(source, outfile)
for t, piece in source:
outfile.write(piece)
| mit | aaf0925505cd5b976b9cf881fa606972 | 36.319624 | 86 | 0.537328 | 4.155849 | false | false | false | false |
wandb/client | wandb/sdk/service/service_sock.py | 1 | 2369 | """socket service.
Implement ServiceInterface for socket transport.
"""
from typing import TYPE_CHECKING
from wandb.proto import wandb_server_pb2 as spb
from .service_base import _pbmap_apply_dict
from .service_base import ServiceInterface
from ..lib.sock_client import SockClient
if TYPE_CHECKING:
from wandb.sdk.wandb_settings import Settings
class ServiceSockInterface(ServiceInterface):
_sock_client: SockClient
def __init__(self) -> None:
self._sock_client = SockClient()
def get_transport(self) -> str:
return "tcp"
def _get_sock_client(self) -> SockClient:
return self._sock_client
def _svc_connect(self, port: int) -> None:
self._sock_client.connect(port=port)
def _svc_inform_init(self, settings: "Settings", run_id: str) -> None:
inform_init = spb.ServerInformInitRequest()
settings_dict = settings.make_static()
_pbmap_apply_dict(inform_init._settings_map, settings_dict)
inform_init._info.stream_id = run_id
assert self._sock_client
self._sock_client.send(inform_init=inform_init)
def _svc_inform_start(self, settings: "Settings", run_id: str) -> None:
inform_start = spb.ServerInformStartRequest()
settings_dict = settings.make_static()
_pbmap_apply_dict(inform_start._settings_map, settings_dict)
inform_start._info.stream_id = run_id
assert self._sock_client
self._sock_client.send(inform_start=inform_start)
def _svc_inform_finish(self, run_id: str = None) -> None:
assert run_id
inform_finish = spb.ServerInformFinishRequest()
inform_finish._info.stream_id = run_id
assert self._sock_client
self._sock_client.send(inform_finish=inform_finish)
def _svc_inform_attach(self, attach_id: str) -> spb.ServerInformAttachResponse:
inform_attach = spb.ServerInformAttachRequest()
inform_attach._info.stream_id = attach_id
assert self._sock_client
response = self._sock_client.send_and_recv(inform_attach=inform_attach)
return response.inform_attach_response
def _svc_inform_teardown(self, exit_code: int) -> None:
inform_teardown = spb.ServerInformTeardownRequest(exit_code=exit_code)
assert self._sock_client
self._sock_client.send(inform_teardown=inform_teardown)
| mit | ad83da79f352dad7c478e489c57d5ef6 | 33.333333 | 83 | 0.678767 | 3.393983 | false | false | false | false |
wandb/client | wandb/vendor/watchdog/observers/api.py | 16 | 11720 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import threading
from watchdog.utils import BaseThread
from watchdog.utils.compat import queue
from watchdog.utils.bricks import SkipRepeatsQueue
DEFAULT_EMITTER_TIMEOUT = 1 # in seconds.
DEFAULT_OBSERVER_TIMEOUT = 1 # in seconds.
# Collection classes
class EventQueue(SkipRepeatsQueue):
"""Thread-safe event queue based on a special queue that skips adding
the same event (:class:`FileSystemEvent`) multiple times consecutively.
Thus avoiding dispatching multiple event handling
calls when multiple identical events are produced quicker than an observer
can consume them.
"""
class ObservedWatch(object):
"""An scheduled watch.
:param path:
Path string.
:param recursive:
``True`` if watch is recursive; ``False`` otherwise.
"""
def __init__(self, path, recursive):
self._path = path
self._is_recursive = recursive
@property
def path(self):
"""The path that this watch monitors."""
return self._path
@property
def is_recursive(self):
"""Determines whether subdirectories are watched for the path."""
return self._is_recursive
@property
def key(self):
return self.path, self.is_recursive
def __eq__(self, watch):
return self.key == watch.key
def __ne__(self, watch):
return self.key != watch.key
def __hash__(self):
return hash(self.key)
def __repr__(self):
return "<ObservedWatch: path=%s, is_recursive=%s>" % (
self.path, self.is_recursive)
# Observer classes
class EventEmitter(BaseThread):
"""
Producer thread base class subclassed by event emitters
that generate events and populate a queue with them.
:param event_queue:
The event queue to populate with generated events.
:type event_queue:
:class:`watchdog.events.EventQueue`
:param watch:
The watch to observe and produce events for.
:type watch:
:class:`ObservedWatch`
:param timeout:
Timeout (in seconds) between successive attempts at reading events.
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
BaseThread.__init__(self)
self._event_queue = event_queue
self._watch = watch
self._timeout = timeout
@property
def timeout(self):
"""
Blocking timeout for reading events.
"""
return self._timeout
@property
def watch(self):
"""
The watch associated with this emitter.
"""
return self._watch
def queue_event(self, event):
"""
Queues a single event.
:param event:
Event to be queued.
:type event:
An instance of :class:`watchdog.events.FileSystemEvent`
or a subclass.
"""
self._event_queue.put((event, self.watch))
def queue_events(self, timeout):
"""Override this method to populate the event queue with events
per interval period.
:param timeout:
Timeout (in seconds) between successive attempts at
reading events.
:type timeout:
``float``
"""
def run(self):
try:
while self.should_keep_running():
self.queue_events(self.timeout)
finally:
pass
class EventDispatcher(BaseThread):
"""
Consumer thread base class subclassed by event observer threads
that dispatch events from an event queue to appropriate event handlers.
:param timeout:
Event queue blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseThread.__init__(self)
self._event_queue = EventQueue()
self._timeout = timeout
@property
def timeout(self):
"""Event queue block timeout."""
return self._timeout
@property
def event_queue(self):
"""The event queue which is populated with file system events
by emitters and from which events are dispatched by a dispatcher
thread."""
return self._event_queue
def dispatch_events(self, event_queue, timeout):
"""Override this method to consume events from an event queue, blocking
on the queue for the specified timeout before raising :class:`queue.Empty`.
:param event_queue:
Event queue to populate with one set of events.
:type event_queue:
:class:`EventQueue`
:param timeout:
Interval period (in seconds) to wait before timing out on the
event queue.
:type timeout:
``float``
:raises:
:class:`queue.Empty`
"""
def run(self):
while self.should_keep_running():
try:
self.dispatch_events(self.event_queue, self.timeout)
except queue.Empty:
continue
class BaseObserver(EventDispatcher):
"""Base observer."""
def __init__(self, emitter_class, timeout=DEFAULT_OBSERVER_TIMEOUT):
EventDispatcher.__init__(self, timeout)
self._emitter_class = emitter_class
self._lock = threading.RLock()
self._watches = set()
self._handlers = dict()
self._emitters = set()
self._emitter_for_watch = dict()
def _add_emitter(self, emitter):
self._emitter_for_watch[emitter.watch] = emitter
self._emitters.add(emitter)
def _remove_emitter(self, emitter):
del self._emitter_for_watch[emitter.watch]
self._emitters.remove(emitter)
emitter.stop()
try:
emitter.join()
except RuntimeError:
pass
def _clear_emitters(self):
for emitter in self._emitters:
emitter.stop()
for emitter in self._emitters:
try:
emitter.join()
except RuntimeError:
pass
self._emitters.clear()
self._emitter_for_watch.clear()
def _add_handler_for_watch(self, event_handler, watch):
if watch not in self._handlers:
self._handlers[watch] = set()
self._handlers[watch].add(event_handler)
def _remove_handlers_for_watch(self, watch):
del self._handlers[watch]
@property
def emitters(self):
"""Returns event emitter created by this observer."""
return self._emitters
def start(self):
for emitter in self._emitters:
emitter.start()
super(BaseObserver, self).start()
def schedule(self, event_handler, path, recursive=False):
"""
Schedules watching a path and calls appropriate methods specified
in the given event handler in response to file system events.
:param event_handler:
An event handler instance that has appropriate event handling
methods which will be called by the observer in response to
file system events.
:type event_handler:
:class:`watchdog.events.FileSystemEventHandler` or a subclass
:param path:
Directory path that will be monitored.
:type path:
``str``
:param recursive:
``True`` if events will be emitted for sub-directories
traversed recursively; ``False`` otherwise.
:type recursive:
``bool``
:return:
An :class:`ObservedWatch` object instance representing
a watch.
"""
with self._lock:
watch = ObservedWatch(path, recursive)
self._add_handler_for_watch(event_handler, watch)
# If we don't have an emitter for this watch already, create it.
if self._emitter_for_watch.get(watch) is None:
emitter = self._emitter_class(event_queue=self.event_queue,
watch=watch,
timeout=self.timeout)
self._add_emitter(emitter)
if self.is_alive():
emitter.start()
self._watches.add(watch)
return watch
def add_handler_for_watch(self, event_handler, watch):
"""Adds a handler for the given watch.
:param event_handler:
An event handler instance that has appropriate event handling
methods which will be called by the observer in response to
file system events.
:type event_handler:
:class:`watchdog.events.FileSystemEventHandler` or a subclass
:param watch:
The watch to add a handler for.
:type watch:
An instance of :class:`ObservedWatch` or a subclass of
:class:`ObservedWatch`
"""
with self._lock:
self._add_handler_for_watch(event_handler, watch)
def remove_handler_for_watch(self, event_handler, watch):
"""Removes a handler for the given watch.
:param event_handler:
An event handler instance that has appropriate event handling
methods which will be called by the observer in response to
file system events.
:type event_handler:
:class:`watchdog.events.FileSystemEventHandler` or a subclass
:param watch:
The watch to remove a handler for.
:type watch:
An instance of :class:`ObservedWatch` or a subclass of
:class:`ObservedWatch`
"""
with self._lock:
self._handlers[watch].remove(event_handler)
def unschedule(self, watch):
"""Unschedules a watch.
:param watch:
The watch to unschedule.
:type watch:
An instance of :class:`ObservedWatch` or a subclass of
:class:`ObservedWatch`
"""
with self._lock:
emitter = self._emitter_for_watch[watch]
del self._handlers[watch]
self._remove_emitter(emitter)
self._watches.remove(watch)
def unschedule_all(self):
"""Unschedules all watches and detaches all associated event
handlers."""
with self._lock:
self._handlers.clear()
self._clear_emitters()
self._watches.clear()
def on_thread_stop(self):
self.unschedule_all()
def dispatch_events(self, event_queue, timeout):
event, watch = event_queue.get(block=True, timeout=timeout)
with self._lock:
# To allow unschedule/stop and safe removal of event handlers
# within event handlers itself, check if the handler is still
# registered after every dispatch.
for handler in list(self._handlers.get(watch, [])):
if handler in self._handlers.get(watch, []):
handler.dispatch(event)
event_queue.task_done()
| mit | 7c9bb4a6b6dec9ac27133b515412f346 | 30.761518 | 83 | 0.601195 | 4.451196 | false | false | false | false |
wandb/client | wandb/vendor/graphql-core-1.1/wandb_graphql/utils/extend_schema.py | 3 | 14143 | from collections import defaultdict
from ..error import GraphQLError
from ..language import ast
from ..pyutils.ordereddict import OrderedDict
from ..type.definition import (GraphQLArgument, GraphQLEnumType,
GraphQLEnumValue, GraphQLField,
GraphQLInputObjectField, GraphQLInputObjectType,
GraphQLInterfaceType, GraphQLList,
GraphQLNonNull, GraphQLObjectType,
GraphQLScalarType, GraphQLUnionType)
from ..type.introspection import (__Directive, __DirectiveLocation,
__EnumValue, __Field, __InputValue, __Schema,
__Type, __TypeKind)
from ..type.scalars import (GraphQLBoolean, GraphQLFloat, GraphQLID,
GraphQLInt, GraphQLString)
from ..type.schema import GraphQLSchema
from .value_from_ast import value_from_ast
def extend_schema(schema, documentAST=None):
"""Produces a new schema given an existing schema and a document which may
contain GraphQL type extensions and definitions. The original schema will
remain unaltered.
Because a schema represents a graph of references, a schema cannot be
extended without effectively making an entire copy. We do not know until it's
too late if subgraphs remain unchanged.
This algorithm copies the provided schema, applying extensions while
producing the copy. The original schema remains unaltered."""
assert isinstance(
schema, GraphQLSchema), 'Must provide valid GraphQLSchema'
assert documentAST and isinstance(
documentAST, ast.Document), 'Must provide valid Document AST'
# Collect the type definitions and extensions found in the document.
type_definition_map = {}
type_extensions_map = defaultdict(list)
for _def in documentAST.definitions:
if isinstance(_def, (
ast.ObjectTypeDefinition,
ast.InterfaceTypeDefinition,
ast.EnumTypeDefinition,
ast.UnionTypeDefinition,
ast.ScalarTypeDefinition,
ast.InputObjectTypeDefinition,
)):
# Sanity check that none of the defined types conflict with the
# schema's existing types.
type_name = _def.name.value
if schema.get_type(type_name):
raise GraphQLError(
('Type "{}" already exists in the schema. It cannot also ' +
'be defined in this type definition.').format(type_name),
[_def]
)
type_definition_map[type_name] = _def
elif isinstance(_def, ast.TypeExtensionDefinition):
# Sanity check that this type extension exists within the
# schema's existing types.
extended_type_name = _def.definition.name.value
existing_type = schema.get_type(extended_type_name)
if not existing_type:
raise GraphQLError(
('Cannot extend type "{}" because it does not ' +
'exist in the existing schema.').format(extended_type_name),
[_def.definition]
)
if not isinstance(existing_type, GraphQLObjectType):
raise GraphQLError(
'Cannot extend non-object type "{}".'.format(
extended_type_name),
[_def.definition]
)
type_extensions_map[extended_type_name].append(_def)
# Below are functions used for producing this schema that have closed over
# this scope and have access to the schema, cache, and newly defined types.
def get_type_from_def(type_def):
type = _get_named_type(type_def.name)
assert type, 'Invalid schema'
return type
def get_type_from_AST(astNode):
type = _get_named_type(astNode.name.value)
if not type:
raise GraphQLError(
('Unknown type: "{}". Ensure that this type exists ' +
'either in the original schema, or is added in a type definition.').format(
astNode.name.value),
[astNode]
)
return type
# Given a name, returns a type from either the existing schema or an
# added type.
def _get_named_type(typeName):
cached_type_def = type_def_cache.get(typeName)
if cached_type_def:
return cached_type_def
existing_type = schema.get_type(typeName)
if existing_type:
type_def = extend_type(existing_type)
type_def_cache[typeName] = type_def
return type_def
type_ast = type_definition_map.get(typeName)
if type_ast:
type_def = build_type(type_ast)
type_def_cache[typeName] = type_def
return type_def
# Given a type's introspection result, construct the correct
# GraphQLType instance.
def extend_type(type):
if isinstance(type, GraphQLObjectType):
return extend_object_type(type)
if isinstance(type, GraphQLInterfaceType):
return extend_interface_type(type)
if isinstance(type, GraphQLUnionType):
return extend_union_type(type)
return type
def extend_object_type(type):
return GraphQLObjectType(
name=type.name,
description=type.description,
interfaces=lambda: extend_implemented_interfaces(type),
fields=lambda: extend_field_map(type),
)
def extend_interface_type(type):
return GraphQLInterfaceType(
name=type.name,
description=type.description,
fields=lambda: extend_field_map(type),
resolve_type=cannot_execute_client_schema,
)
def extend_union_type(type):
return GraphQLUnionType(
name=type.name,
description=type.description,
types=list(map(get_type_from_def, type.types)),
resolve_type=cannot_execute_client_schema,
)
def extend_implemented_interfaces(type):
interfaces = list(map(get_type_from_def, type.interfaces))
# If there are any extensions to the interfaces, apply those here.
extensions = type_extensions_map[type.name]
for extension in extensions:
for namedType in extension.definition.interfaces:
interface_name = namedType.name.value
if any([_def.name == interface_name for _def in interfaces]):
raise GraphQLError(
('Type "{}" already implements "{}". ' +
'It cannot also be implemented in this type extension.').format(
type.name, interface_name),
[namedType]
)
interfaces.append(get_type_from_AST(namedType))
return interfaces
def extend_field_map(type):
new_field_map = OrderedDict()
old_field_map = type.fields
for field_name, field in old_field_map.items():
new_field_map[field_name] = GraphQLField(
extend_field_type(field.type),
description=field.description,
deprecation_reason=field.deprecation_reason,
args=field.args,
resolver=cannot_execute_client_schema,
)
# If there are any extensions to the fields, apply those here.
extensions = type_extensions_map[type.name]
for extension in extensions:
for field in extension.definition.fields:
field_name = field.name.value
if field_name in old_field_map:
raise GraphQLError(
('Field "{}.{}" already exists in the ' +
'schema. It cannot also be defined in this type extension.').format(
type.name, field_name),
[field]
)
new_field_map[field_name] = GraphQLField(
build_field_type(field.type),
args=build_input_values(field.arguments),
resolver=cannot_execute_client_schema,
)
return new_field_map
def extend_field_type(type):
if isinstance(type, GraphQLList):
return GraphQLList(extend_field_type(type.of_type))
if isinstance(type, GraphQLNonNull):
return GraphQLNonNull(extend_field_type(type.of_type))
return get_type_from_def(type)
def build_type(type_ast):
_type_build = {
ast.ObjectTypeDefinition: build_object_type,
ast.InterfaceTypeDefinition: build_interface_type,
ast.UnionTypeDefinition: build_union_type,
ast.ScalarTypeDefinition: build_scalar_type,
ast.EnumTypeDefinition: build_enum_type,
ast.InputObjectTypeDefinition: build_input_object_type
}
func = _type_build.get(type(type_ast))
if func:
return func(type_ast)
def build_object_type(type_ast):
return GraphQLObjectType(
type_ast.name.value,
interfaces=lambda: build_implemented_interfaces(type_ast),
fields=lambda: build_field_map(type_ast),
)
def build_interface_type(type_ast):
return GraphQLInterfaceType(
type_ast.name.value,
fields=lambda: build_field_map(type_ast),
resolve_type=cannot_execute_client_schema,
)
def build_union_type(type_ast):
return GraphQLUnionType(
type_ast.name.value,
types=list(map(get_type_from_AST, type_ast.types)),
resolve_type=cannot_execute_client_schema,
)
def build_scalar_type(type_ast):
return GraphQLScalarType(
type_ast.name.value,
serialize=lambda *args, **kwargs: None,
# Note: validation calls the parse functions to determine if a
# literal value is correct. Returning null would cause use of custom
# scalars to always fail validation. Returning false causes them to
# always pass validation.
parse_value=lambda *args, **kwargs: False,
parse_literal=lambda *args, **kwargs: False,
)
def build_enum_type(type_ast):
return GraphQLEnumType(
type_ast.name.value,
values={v.name.value: GraphQLEnumValue() for v in type_ast.values},
)
def build_input_object_type(type_ast):
return GraphQLInputObjectType(
type_ast.name.value,
fields=lambda: build_input_values(
type_ast.fields, GraphQLInputObjectField),
)
def build_implemented_interfaces(type_ast):
return list(map(get_type_from_AST, type_ast.interfaces))
def build_field_map(type_ast):
return {
field.name.value: GraphQLField(
build_field_type(field.type),
args=build_input_values(field.arguments),
resolver=cannot_execute_client_schema,
) for field in type_ast.fields
}
def build_input_values(values, input_type=GraphQLArgument):
input_values = OrderedDict()
for value in values:
type = build_field_type(value.type)
input_values[value.name.value] = input_type(
type,
default_value=value_from_ast(value.default_value, type)
)
return input_values
def build_field_type(type_ast):
if isinstance(type_ast, ast.ListType):
return GraphQLList(build_field_type(type_ast.type))
if isinstance(type_ast, ast.NonNullType):
return GraphQLNonNull(build_field_type(type_ast.type))
return get_type_from_AST(type_ast)
# If this document contains no new types, then return the same unmodified
# GraphQLSchema instance.
if not type_extensions_map and not type_definition_map:
return schema
# A cache to use to store the actual GraphQLType definition objects by name.
# Initialize to the GraphQL built in scalars and introspection types. All
# functions below are inline so that this type def cache is within the scope
# of the closure.
type_def_cache = {
'String': GraphQLString,
'Int': GraphQLInt,
'Float': GraphQLFloat,
'Boolean': GraphQLBoolean,
'ID': GraphQLID,
'__Schema': __Schema,
'__Directive': __Directive,
'__DirectiveLocation': __DirectiveLocation,
'__Type': __Type,
'__Field': __Field,
'__InputValue': __InputValue,
'__EnumValue': __EnumValue,
'__TypeKind': __TypeKind,
}
# Get the root Query, Mutation, and Subscription types.
query_type = get_type_from_def(schema.get_query_type())
existing_mutation_type = schema.get_mutation_type()
mutationType = existing_mutation_type and get_type_from_def(
existing_mutation_type) or None
existing_subscription_type = schema.get_subscription_type()
subscription_type = existing_subscription_type and get_type_from_def(
existing_subscription_type) or None
# Iterate through all types, getting the type definition for each, ensuring
# that any type not directly referenced by a field will get created.
types = [get_type_from_def(_def) for _def in schema.get_type_map().values()]
# Do the same with new types, appending to the list of defined types.
types += [get_type_from_AST(_def) for _def in type_definition_map.values()]
# Then produce and return a Schema with these types.
return GraphQLSchema(
query=query_type,
mutation=mutationType,
subscription=subscription_type,
# Copy directives.
directives=schema.get_directives(),
types=types
)
def cannot_execute_client_schema(*args, **kwargs):
raise Exception('Client Schema cannot be used for execution.')
| mit | 6d60940d66cc993c88e5ac4aa98a1de3 | 38.616246 | 93 | 0.601216 | 4.404547 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.