repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
sashahart/vex | vex/options.py | get_options | python | def get_options(argv):
arg_parser = make_arg_parser()
options, unknown = arg_parser.parse_known_args(argv)
if unknown:
arg_parser.print_help()
raise exceptions.UnknownArguments(
"unknown args: {0!r}".format(unknown))
options.print_help = arg_parser.print_help
return options | Called to parse the given list as command-line arguments.
:returns:
an options object as returned by argparse. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/options.py#L94-L107 | [
"def make_arg_parser():\n \"\"\"Return a standard ArgumentParser object.\n \"\"\"\n parser = argparse.ArgumentParser(\n formatter_class=argparse.RawTextHelpFormatter,\n usage=\"vex [OPTIONS] VIRTUALENV_NAME COMMAND_TO_RUN ...\",\n )\n\n make = parser.add_argument_group(title='To make a new virtualenv')\n make.add_argument(\n '-m', '--make',\n action=\"store_true\",\n help=\"make named virtualenv before running command\"\n )\n make.add_argument(\n '--python',\n help=\"specify which python for virtualenv to be made\",\n action=\"store\",\n default=None,\n )\n make.add_argument(\n '--site-packages',\n help=\"allow site package imports from new virtualenv\",\n action=\"store_true\",\n )\n make.add_argument(\n '--always-copy',\n help=\"use copies instead of symlinks in new virtualenv\",\n action=\"store_true\",\n )\n\n remove = parser.add_argument_group(title='To remove a virtualenv')\n remove.add_argument(\n '-r', '--remove',\n action=\"store_true\",\n help=\"remove the named virtualenv after running command\"\n )\n\n parser.add_argument(\n \"--path\",\n metavar=\"DIR\",\n help=\"absolute path to virtualenv to use\",\n action=\"store\"\n )\n parser.add_argument(\n '--cwd',\n metavar=\"DIR\",\n action=\"store\",\n default='.',\n help=\"path to run command in (default: '.' aka $PWD)\",\n )\n parser.add_argument(\n \"--config\",\n metavar=\"FILE\",\n default=None,\n action=\"store\",\n help=\"path to config file to read (default: '~/.vexrc')\"\n )\n parser.add_argument(\n '--shell-config',\n metavar=\"SHELL\",\n dest=\"shell_to_configure\",\n action=\"store\",\n default=None,\n help=\"print optional config for the specified shell\"\n )\n parser.add_argument(\n '--list',\n metavar=\"PREFIX\",\n nargs=\"?\",\n const=\"\",\n default=None,\n help=\"print a list of available virtualenvs [matching PREFIX]\",\n action=\"store\"\n )\n parser.add_argument(\n '--version',\n help=\"print the version of vex that is being run\",\n action=\"store_true\"\n )\n parser.add_argument(\n \"rest\",\n nargs=argparse.REMAINDER,\n help=argparse.SUPPRESS)\n\n return parser\n"
] | import argparse
from vex import exceptions
def make_arg_parser():
"""Return a standard ArgumentParser object.
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
usage="vex [OPTIONS] VIRTUALENV_NAME COMMAND_TO_RUN ...",
)
make = parser.add_argument_group(title='To make a new virtualenv')
make.add_argument(
'-m', '--make',
action="store_true",
help="make named virtualenv before running command"
)
make.add_argument(
'--python',
help="specify which python for virtualenv to be made",
action="store",
default=None,
)
make.add_argument(
'--site-packages',
help="allow site package imports from new virtualenv",
action="store_true",
)
make.add_argument(
'--always-copy',
help="use copies instead of symlinks in new virtualenv",
action="store_true",
)
remove = parser.add_argument_group(title='To remove a virtualenv')
remove.add_argument(
'-r', '--remove',
action="store_true",
help="remove the named virtualenv after running command"
)
parser.add_argument(
"--path",
metavar="DIR",
help="absolute path to virtualenv to use",
action="store"
)
parser.add_argument(
'--cwd',
metavar="DIR",
action="store",
default='.',
help="path to run command in (default: '.' aka $PWD)",
)
parser.add_argument(
"--config",
metavar="FILE",
default=None,
action="store",
help="path to config file to read (default: '~/.vexrc')"
)
parser.add_argument(
'--shell-config',
metavar="SHELL",
dest="shell_to_configure",
action="store",
default=None,
help="print optional config for the specified shell"
)
parser.add_argument(
'--list',
metavar="PREFIX",
nargs="?",
const="",
default=None,
help="print a list of available virtualenvs [matching PREFIX]",
action="store"
)
parser.add_argument(
'--version',
help="print the version of vex that is being run",
action="store_true"
)
parser.add_argument(
"rest",
nargs=argparse.REMAINDER,
help=argparse.SUPPRESS)
return parser
|
sashahart/vex | vex/main.py | get_vexrc | python | def get_vexrc(options, environ):
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc | Get a representation of the contents of the config file.
:returns:
a Vexrc instance. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L15-L27 | [
"def from_file(cls, path, environ):\n \"\"\"Make a Vexrc instance from given file in given environ.\n \"\"\"\n instance = cls()\n instance.read(path, environ)\n return instance\n"
] | """Main command-line entry-point and any code tightly coupled to it.
"""
import sys
import os
from vex import config
from vex.options import get_options
from vex.run import get_environ, run
from vex.shell_config import handle_shell_config
from vex.make import handle_make
from vex.remove import handle_remove
from vex import exceptions
from vex._version import VERSION
def get_cwd(options):
"""Discover what directory the command should run in.
"""
if not options.cwd:
return None
if not os.path.exists(options.cwd):
raise exceptions.InvalidCwd(
"can't --cwd to invalid path {0!r}".format(options.cwd))
return options.cwd
def get_virtualenv_name(options):
if options.path:
return os.path.dirname(options.path)
else:
ve_name = options.rest.pop(0) if options.rest else ''
if not ve_name:
raise exceptions.NoVirtualenvName(
"could not find a virtualenv name in the command line."
)
return ve_name
def get_virtualenv_path(ve_base, ve_name):
"""Check a virtualenv path, raising exceptions to explain problems.
"""
if not ve_base:
raise exceptions.NoVirtualenvsDirectory(
"could not figure out a virtualenvs directory. "
"make sure $HOME is set, or $WORKON_HOME,"
" or set virtualenvs=something in your .vexrc")
# Using this requires get_ve_base to pass through nonexistent dirs
if not os.path.exists(ve_base):
message = (
"virtualenvs directory {0!r} not found. "
"Create it or use vex --make to get started."
).format(ve_base)
raise exceptions.NoVirtualenvsDirectory(message)
if not ve_name:
raise exceptions.InvalidVirtualenv("no virtualenv name")
# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,
# and an absolute path will be accepted as first arg.
# So we check if they gave an absolute path as ve_name.
# But we don't want this error if $PWD == $WORKON_HOME,
# in which case 'foo' is a valid relative path to virtualenv foo.
ve_path = os.path.join(ve_base, ve_name)
if ve_path == ve_name and os.path.basename(ve_name) != ve_name:
raise exceptions.InvalidVirtualenv(
'To run in a virtualenv by its path, '
'use "vex --path {0}"'.format(ve_path))
ve_path = os.path.abspath(ve_path)
if not os.path.exists(ve_path):
raise exceptions.InvalidVirtualenv(
"no virtualenv found at {0!r}.".format(ve_path))
return ve_path
def get_command(options, vexrc, environ):
"""Get a command to run.
:returns:
a list of strings representing a command to be passed to Popen.
"""
command = options.rest
if not command:
command = vexrc.get_shell(environ)
if command and command[0].startswith('--'):
raise exceptions.InvalidCommand(
"don't put flags like '%s' after the virtualenv name."
% command[0])
if not command:
raise exceptions.InvalidCommand("no command given")
return command
def handle_version():
sys.stdout.write(VERSION + "\n")
return 0
def handle_list(ve_base, prefix=""):
if not os.path.isdir(ve_base):
sys.stderr.write("no virtualenvs directory at {0!r}\n".format(ve_base))
return 1
text = "\n".join(
sorted(
relative_path for relative_path in os.listdir(ve_base)
if (not relative_path.startswith("-"))
and relative_path.startswith(prefix)
and os.path.isdir(os.path.join(ve_base, relative_path))
)
)
sys.stdout.write(text + "\n")
return 0
def _main(environ, argv):
"""Logic for main(), with less direct system interaction.
Routines called here raise InvalidArgument with messages that
should be delivered on stderr, to be caught by main.
"""
options = get_options(argv)
if options.version:
return handle_version()
vexrc = get_vexrc(options, environ)
# Handle --shell-config as soon as its arguments are available.
if options.shell_to_configure:
return handle_shell_config(options.shell_to_configure, vexrc, environ)
if options.list is not None:
return handle_list(vexrc.get_ve_base(environ), options.list)
# Do as much as possible before a possible make, so errors can raise
# without leaving behind an unused virtualenv.
# get_virtualenv_name is destructive and must happen before get_command
cwd = get_cwd(options)
ve_base = vexrc.get_ve_base(environ)
ve_name = get_virtualenv_name(options)
command = get_command(options, vexrc, environ)
# Either we create ve_path, get it from options.path or find it
# in ve_base.
if options.make:
if options.path:
make_path = os.path.abspath(options.path)
else:
make_path = os.path.abspath(os.path.join(ve_base, ve_name))
handle_make(environ, options, make_path)
ve_path = make_path
elif options.path:
ve_path = os.path.abspath(options.path)
if not os.path.exists(ve_path) or not os.path.isdir(ve_path):
raise exceptions.InvalidVirtualenv(
"argument for --path is not a directory")
else:
try:
ve_path = get_virtualenv_path(ve_base, ve_name)
except exceptions.NoVirtualenvName:
options.print_help()
raise
# get_environ has to wait until ve_path is defined, which might
# be after a make; of course we can't run until we have env.
env = get_environ(environ, vexrc['env'], ve_path)
returncode = run(command, env=env, cwd=cwd)
if options.remove:
handle_remove(ve_path)
if returncode is None:
raise exceptions.InvalidCommand(
"command not found: {0!r}".format(command[0]))
return returncode
def main():
"""The main command-line entry point, with system interactions.
"""
argv = sys.argv[1:]
returncode = 1
try:
returncode = _main(os.environ, argv)
except exceptions.InvalidArgument as error:
if error.message:
sys.stderr.write("Error: " + error.message + '\n')
else:
raise
sys.exit(returncode)
|
sashahart/vex | vex/main.py | get_cwd | python | def get_cwd(options):
if not options.cwd:
return None
if not os.path.exists(options.cwd):
raise exceptions.InvalidCwd(
"can't --cwd to invalid path {0!r}".format(options.cwd))
return options.cwd | Discover what directory the command should run in. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L30-L38 | null | """Main command-line entry-point and any code tightly coupled to it.
"""
import sys
import os
from vex import config
from vex.options import get_options
from vex.run import get_environ, run
from vex.shell_config import handle_shell_config
from vex.make import handle_make
from vex.remove import handle_remove
from vex import exceptions
from vex._version import VERSION
def get_vexrc(options, environ):
"""Get a representation of the contents of the config file.
:returns:
a Vexrc instance.
"""
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc
def get_virtualenv_name(options):
if options.path:
return os.path.dirname(options.path)
else:
ve_name = options.rest.pop(0) if options.rest else ''
if not ve_name:
raise exceptions.NoVirtualenvName(
"could not find a virtualenv name in the command line."
)
return ve_name
def get_virtualenv_path(ve_base, ve_name):
"""Check a virtualenv path, raising exceptions to explain problems.
"""
if not ve_base:
raise exceptions.NoVirtualenvsDirectory(
"could not figure out a virtualenvs directory. "
"make sure $HOME is set, or $WORKON_HOME,"
" or set virtualenvs=something in your .vexrc")
# Using this requires get_ve_base to pass through nonexistent dirs
if not os.path.exists(ve_base):
message = (
"virtualenvs directory {0!r} not found. "
"Create it or use vex --make to get started."
).format(ve_base)
raise exceptions.NoVirtualenvsDirectory(message)
if not ve_name:
raise exceptions.InvalidVirtualenv("no virtualenv name")
# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,
# and an absolute path will be accepted as first arg.
# So we check if they gave an absolute path as ve_name.
# But we don't want this error if $PWD == $WORKON_HOME,
# in which case 'foo' is a valid relative path to virtualenv foo.
ve_path = os.path.join(ve_base, ve_name)
if ve_path == ve_name and os.path.basename(ve_name) != ve_name:
raise exceptions.InvalidVirtualenv(
'To run in a virtualenv by its path, '
'use "vex --path {0}"'.format(ve_path))
ve_path = os.path.abspath(ve_path)
if not os.path.exists(ve_path):
raise exceptions.InvalidVirtualenv(
"no virtualenv found at {0!r}.".format(ve_path))
return ve_path
def get_command(options, vexrc, environ):
"""Get a command to run.
:returns:
a list of strings representing a command to be passed to Popen.
"""
command = options.rest
if not command:
command = vexrc.get_shell(environ)
if command and command[0].startswith('--'):
raise exceptions.InvalidCommand(
"don't put flags like '%s' after the virtualenv name."
% command[0])
if not command:
raise exceptions.InvalidCommand("no command given")
return command
def handle_version():
sys.stdout.write(VERSION + "\n")
return 0
def handle_list(ve_base, prefix=""):
if not os.path.isdir(ve_base):
sys.stderr.write("no virtualenvs directory at {0!r}\n".format(ve_base))
return 1
text = "\n".join(
sorted(
relative_path for relative_path in os.listdir(ve_base)
if (not relative_path.startswith("-"))
and relative_path.startswith(prefix)
and os.path.isdir(os.path.join(ve_base, relative_path))
)
)
sys.stdout.write(text + "\n")
return 0
def _main(environ, argv):
"""Logic for main(), with less direct system interaction.
Routines called here raise InvalidArgument with messages that
should be delivered on stderr, to be caught by main.
"""
options = get_options(argv)
if options.version:
return handle_version()
vexrc = get_vexrc(options, environ)
# Handle --shell-config as soon as its arguments are available.
if options.shell_to_configure:
return handle_shell_config(options.shell_to_configure, vexrc, environ)
if options.list is not None:
return handle_list(vexrc.get_ve_base(environ), options.list)
# Do as much as possible before a possible make, so errors can raise
# without leaving behind an unused virtualenv.
# get_virtualenv_name is destructive and must happen before get_command
cwd = get_cwd(options)
ve_base = vexrc.get_ve_base(environ)
ve_name = get_virtualenv_name(options)
command = get_command(options, vexrc, environ)
# Either we create ve_path, get it from options.path or find it
# in ve_base.
if options.make:
if options.path:
make_path = os.path.abspath(options.path)
else:
make_path = os.path.abspath(os.path.join(ve_base, ve_name))
handle_make(environ, options, make_path)
ve_path = make_path
elif options.path:
ve_path = os.path.abspath(options.path)
if not os.path.exists(ve_path) or not os.path.isdir(ve_path):
raise exceptions.InvalidVirtualenv(
"argument for --path is not a directory")
else:
try:
ve_path = get_virtualenv_path(ve_base, ve_name)
except exceptions.NoVirtualenvName:
options.print_help()
raise
# get_environ has to wait until ve_path is defined, which might
# be after a make; of course we can't run until we have env.
env = get_environ(environ, vexrc['env'], ve_path)
returncode = run(command, env=env, cwd=cwd)
if options.remove:
handle_remove(ve_path)
if returncode is None:
raise exceptions.InvalidCommand(
"command not found: {0!r}".format(command[0]))
return returncode
def main():
"""The main command-line entry point, with system interactions.
"""
argv = sys.argv[1:]
returncode = 1
try:
returncode = _main(os.environ, argv)
except exceptions.InvalidArgument as error:
if error.message:
sys.stderr.write("Error: " + error.message + '\n')
else:
raise
sys.exit(returncode)
|
sashahart/vex | vex/main.py | get_virtualenv_path | python | def get_virtualenv_path(ve_base, ve_name):
if not ve_base:
raise exceptions.NoVirtualenvsDirectory(
"could not figure out a virtualenvs directory. "
"make sure $HOME is set, or $WORKON_HOME,"
" or set virtualenvs=something in your .vexrc")
# Using this requires get_ve_base to pass through nonexistent dirs
if not os.path.exists(ve_base):
message = (
"virtualenvs directory {0!r} not found. "
"Create it or use vex --make to get started."
).format(ve_base)
raise exceptions.NoVirtualenvsDirectory(message)
if not ve_name:
raise exceptions.InvalidVirtualenv("no virtualenv name")
# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,
# and an absolute path will be accepted as first arg.
# So we check if they gave an absolute path as ve_name.
# But we don't want this error if $PWD == $WORKON_HOME,
# in which case 'foo' is a valid relative path to virtualenv foo.
ve_path = os.path.join(ve_base, ve_name)
if ve_path == ve_name and os.path.basename(ve_name) != ve_name:
raise exceptions.InvalidVirtualenv(
'To run in a virtualenv by its path, '
'use "vex --path {0}"'.format(ve_path))
ve_path = os.path.abspath(ve_path)
if not os.path.exists(ve_path):
raise exceptions.InvalidVirtualenv(
"no virtualenv found at {0!r}.".format(ve_path))
return ve_path | Check a virtualenv path, raising exceptions to explain problems. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L53-L88 | null | """Main command-line entry-point and any code tightly coupled to it.
"""
import sys
import os
from vex import config
from vex.options import get_options
from vex.run import get_environ, run
from vex.shell_config import handle_shell_config
from vex.make import handle_make
from vex.remove import handle_remove
from vex import exceptions
from vex._version import VERSION
def get_vexrc(options, environ):
"""Get a representation of the contents of the config file.
:returns:
a Vexrc instance.
"""
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc
def get_cwd(options):
"""Discover what directory the command should run in.
"""
if not options.cwd:
return None
if not os.path.exists(options.cwd):
raise exceptions.InvalidCwd(
"can't --cwd to invalid path {0!r}".format(options.cwd))
return options.cwd
def get_virtualenv_name(options):
if options.path:
return os.path.dirname(options.path)
else:
ve_name = options.rest.pop(0) if options.rest else ''
if not ve_name:
raise exceptions.NoVirtualenvName(
"could not find a virtualenv name in the command line."
)
return ve_name
def get_command(options, vexrc, environ):
"""Get a command to run.
:returns:
a list of strings representing a command to be passed to Popen.
"""
command = options.rest
if not command:
command = vexrc.get_shell(environ)
if command and command[0].startswith('--'):
raise exceptions.InvalidCommand(
"don't put flags like '%s' after the virtualenv name."
% command[0])
if not command:
raise exceptions.InvalidCommand("no command given")
return command
def handle_version():
sys.stdout.write(VERSION + "\n")
return 0
def handle_list(ve_base, prefix=""):
if not os.path.isdir(ve_base):
sys.stderr.write("no virtualenvs directory at {0!r}\n".format(ve_base))
return 1
text = "\n".join(
sorted(
relative_path for relative_path in os.listdir(ve_base)
if (not relative_path.startswith("-"))
and relative_path.startswith(prefix)
and os.path.isdir(os.path.join(ve_base, relative_path))
)
)
sys.stdout.write(text + "\n")
return 0
def _main(environ, argv):
"""Logic for main(), with less direct system interaction.
Routines called here raise InvalidArgument with messages that
should be delivered on stderr, to be caught by main.
"""
options = get_options(argv)
if options.version:
return handle_version()
vexrc = get_vexrc(options, environ)
# Handle --shell-config as soon as its arguments are available.
if options.shell_to_configure:
return handle_shell_config(options.shell_to_configure, vexrc, environ)
if options.list is not None:
return handle_list(vexrc.get_ve_base(environ), options.list)
# Do as much as possible before a possible make, so errors can raise
# without leaving behind an unused virtualenv.
# get_virtualenv_name is destructive and must happen before get_command
cwd = get_cwd(options)
ve_base = vexrc.get_ve_base(environ)
ve_name = get_virtualenv_name(options)
command = get_command(options, vexrc, environ)
# Either we create ve_path, get it from options.path or find it
# in ve_base.
if options.make:
if options.path:
make_path = os.path.abspath(options.path)
else:
make_path = os.path.abspath(os.path.join(ve_base, ve_name))
handle_make(environ, options, make_path)
ve_path = make_path
elif options.path:
ve_path = os.path.abspath(options.path)
if not os.path.exists(ve_path) or not os.path.isdir(ve_path):
raise exceptions.InvalidVirtualenv(
"argument for --path is not a directory")
else:
try:
ve_path = get_virtualenv_path(ve_base, ve_name)
except exceptions.NoVirtualenvName:
options.print_help()
raise
# get_environ has to wait until ve_path is defined, which might
# be after a make; of course we can't run until we have env.
env = get_environ(environ, vexrc['env'], ve_path)
returncode = run(command, env=env, cwd=cwd)
if options.remove:
handle_remove(ve_path)
if returncode is None:
raise exceptions.InvalidCommand(
"command not found: {0!r}".format(command[0]))
return returncode
def main():
"""The main command-line entry point, with system interactions.
"""
argv = sys.argv[1:]
returncode = 1
try:
returncode = _main(os.environ, argv)
except exceptions.InvalidArgument as error:
if error.message:
sys.stderr.write("Error: " + error.message + '\n')
else:
raise
sys.exit(returncode)
|
sashahart/vex | vex/main.py | get_command | python | def get_command(options, vexrc, environ):
command = options.rest
if not command:
command = vexrc.get_shell(environ)
if command and command[0].startswith('--'):
raise exceptions.InvalidCommand(
"don't put flags like '%s' after the virtualenv name."
% command[0])
if not command:
raise exceptions.InvalidCommand("no command given")
return command | Get a command to run.
:returns:
a list of strings representing a command to be passed to Popen. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L91-L106 | [
"def get_shell(self, environ):\n \"\"\"Find a command to run.\n \"\"\"\n command = self.headings[self.default_heading].get('shell')\n if not command and os.name != 'nt':\n command = environ.get('SHELL', '')\n command = shlex.split(command) if command else None\n return command\n"
] | """Main command-line entry-point and any code tightly coupled to it.
"""
import sys
import os
from vex import config
from vex.options import get_options
from vex.run import get_environ, run
from vex.shell_config import handle_shell_config
from vex.make import handle_make
from vex.remove import handle_remove
from vex import exceptions
from vex._version import VERSION
def get_vexrc(options, environ):
"""Get a representation of the contents of the config file.
:returns:
a Vexrc instance.
"""
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc
def get_cwd(options):
"""Discover what directory the command should run in.
"""
if not options.cwd:
return None
if not os.path.exists(options.cwd):
raise exceptions.InvalidCwd(
"can't --cwd to invalid path {0!r}".format(options.cwd))
return options.cwd
def get_virtualenv_name(options):
if options.path:
return os.path.dirname(options.path)
else:
ve_name = options.rest.pop(0) if options.rest else ''
if not ve_name:
raise exceptions.NoVirtualenvName(
"could not find a virtualenv name in the command line."
)
return ve_name
def get_virtualenv_path(ve_base, ve_name):
"""Check a virtualenv path, raising exceptions to explain problems.
"""
if not ve_base:
raise exceptions.NoVirtualenvsDirectory(
"could not figure out a virtualenvs directory. "
"make sure $HOME is set, or $WORKON_HOME,"
" or set virtualenvs=something in your .vexrc")
# Using this requires get_ve_base to pass through nonexistent dirs
if not os.path.exists(ve_base):
message = (
"virtualenvs directory {0!r} not found. "
"Create it or use vex --make to get started."
).format(ve_base)
raise exceptions.NoVirtualenvsDirectory(message)
if not ve_name:
raise exceptions.InvalidVirtualenv("no virtualenv name")
# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,
# and an absolute path will be accepted as first arg.
# So we check if they gave an absolute path as ve_name.
# But we don't want this error if $PWD == $WORKON_HOME,
# in which case 'foo' is a valid relative path to virtualenv foo.
ve_path = os.path.join(ve_base, ve_name)
if ve_path == ve_name and os.path.basename(ve_name) != ve_name:
raise exceptions.InvalidVirtualenv(
'To run in a virtualenv by its path, '
'use "vex --path {0}"'.format(ve_path))
ve_path = os.path.abspath(ve_path)
if not os.path.exists(ve_path):
raise exceptions.InvalidVirtualenv(
"no virtualenv found at {0!r}.".format(ve_path))
return ve_path
def handle_version():
sys.stdout.write(VERSION + "\n")
return 0
def handle_list(ve_base, prefix=""):
if not os.path.isdir(ve_base):
sys.stderr.write("no virtualenvs directory at {0!r}\n".format(ve_base))
return 1
text = "\n".join(
sorted(
relative_path for relative_path in os.listdir(ve_base)
if (not relative_path.startswith("-"))
and relative_path.startswith(prefix)
and os.path.isdir(os.path.join(ve_base, relative_path))
)
)
sys.stdout.write(text + "\n")
return 0
def _main(environ, argv):
"""Logic for main(), with less direct system interaction.
Routines called here raise InvalidArgument with messages that
should be delivered on stderr, to be caught by main.
"""
options = get_options(argv)
if options.version:
return handle_version()
vexrc = get_vexrc(options, environ)
# Handle --shell-config as soon as its arguments are available.
if options.shell_to_configure:
return handle_shell_config(options.shell_to_configure, vexrc, environ)
if options.list is not None:
return handle_list(vexrc.get_ve_base(environ), options.list)
# Do as much as possible before a possible make, so errors can raise
# without leaving behind an unused virtualenv.
# get_virtualenv_name is destructive and must happen before get_command
cwd = get_cwd(options)
ve_base = vexrc.get_ve_base(environ)
ve_name = get_virtualenv_name(options)
command = get_command(options, vexrc, environ)
# Either we create ve_path, get it from options.path or find it
# in ve_base.
if options.make:
if options.path:
make_path = os.path.abspath(options.path)
else:
make_path = os.path.abspath(os.path.join(ve_base, ve_name))
handle_make(environ, options, make_path)
ve_path = make_path
elif options.path:
ve_path = os.path.abspath(options.path)
if not os.path.exists(ve_path) or not os.path.isdir(ve_path):
raise exceptions.InvalidVirtualenv(
"argument for --path is not a directory")
else:
try:
ve_path = get_virtualenv_path(ve_base, ve_name)
except exceptions.NoVirtualenvName:
options.print_help()
raise
# get_environ has to wait until ve_path is defined, which might
# be after a make; of course we can't run until we have env.
env = get_environ(environ, vexrc['env'], ve_path)
returncode = run(command, env=env, cwd=cwd)
if options.remove:
handle_remove(ve_path)
if returncode is None:
raise exceptions.InvalidCommand(
"command not found: {0!r}".format(command[0]))
return returncode
def main():
"""The main command-line entry point, with system interactions.
"""
argv = sys.argv[1:]
returncode = 1
try:
returncode = _main(os.environ, argv)
except exceptions.InvalidArgument as error:
if error.message:
sys.stderr.write("Error: " + error.message + '\n')
else:
raise
sys.exit(returncode)
|
sashahart/vex | vex/main.py | _main | python | def _main(environ, argv):
options = get_options(argv)
if options.version:
return handle_version()
vexrc = get_vexrc(options, environ)
# Handle --shell-config as soon as its arguments are available.
if options.shell_to_configure:
return handle_shell_config(options.shell_to_configure, vexrc, environ)
if options.list is not None:
return handle_list(vexrc.get_ve_base(environ), options.list)
# Do as much as possible before a possible make, so errors can raise
# without leaving behind an unused virtualenv.
# get_virtualenv_name is destructive and must happen before get_command
cwd = get_cwd(options)
ve_base = vexrc.get_ve_base(environ)
ve_name = get_virtualenv_name(options)
command = get_command(options, vexrc, environ)
# Either we create ve_path, get it from options.path or find it
# in ve_base.
if options.make:
if options.path:
make_path = os.path.abspath(options.path)
else:
make_path = os.path.abspath(os.path.join(ve_base, ve_name))
handle_make(environ, options, make_path)
ve_path = make_path
elif options.path:
ve_path = os.path.abspath(options.path)
if not os.path.exists(ve_path) or not os.path.isdir(ve_path):
raise exceptions.InvalidVirtualenv(
"argument for --path is not a directory")
else:
try:
ve_path = get_virtualenv_path(ve_base, ve_name)
except exceptions.NoVirtualenvName:
options.print_help()
raise
# get_environ has to wait until ve_path is defined, which might
# be after a make; of course we can't run until we have env.
env = get_environ(environ, vexrc['env'], ve_path)
returncode = run(command, env=env, cwd=cwd)
if options.remove:
handle_remove(ve_path)
if returncode is None:
raise exceptions.InvalidCommand(
"command not found: {0!r}".format(command[0]))
return returncode | Logic for main(), with less direct system interaction.
Routines called here raise InvalidArgument with messages that
should be delivered on stderr, to be caught by main. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L130-L182 | [
"def run(command, env, cwd):\n \"\"\"Run the given command.\n \"\"\"\n assert command\n if cwd:\n assert os.path.exists(cwd)\n if platform.system() == \"Windows\":\n exe = distutils.spawn.find_executable(command[0], path=env['PATH'])\n if exe:\n command[0] = exe\n _, command_name = os.path.split(command[0])\n if (command_name in ('bash', 'zsh')\n and 'VIRTUALENVWRAPPER_PYTHON' not in env):\n env['VIRTUALENVWRAPPER_PYTHON'] = ':'\n try:\n process = subprocess.Popen(command, env=env, cwd=cwd)\n process.wait()\n except exceptions.CommandNotFoundError as error:\n if error.errno != 2:\n raise\n return None\n return process.returncode\n",
"def get_options(argv):\n \"\"\"Called to parse the given list as command-line arguments.\n\n :returns:\n an options object as returned by argparse.\n \"\"\"\n arg_parser = make_arg_parser()\n options, unknown = arg_parser.parse_known_args(argv)\n if unknown:\n arg_parser.print_help()\n raise exceptions.UnknownArguments(\n \"unknown args: {0!r}\".format(unknown))\n options.print_help = arg_parser.print_help\n return options\n",
"def get_environ(environ, defaults, ve_path):\n \"\"\"Make an environment to run with.\n \"\"\"\n # Copy the parent environment, add in defaults from .vexrc.\n env = environ.copy()\n env.update(defaults)\n\n # Leaving in existing PYTHONHOME can cause some errors\n if 'PYTHONHOME' in env:\n del env['PYTHONHOME']\n\n # Now we have to adjust PATH to find scripts for the virtualenv...\n # PATH being unset/empty is OK, but ve_path must be set\n # or there is nothing for us to do here and it's bad.\n if not ve_path:\n raise exceptions.BadConfig('ve_path must be set')\n if platform.system() == 'Windows':\n ve_bin = os.path.join(ve_path, 'Scripts')\n else:\n ve_bin = os.path.join(ve_path, 'bin')\n\n # If user is currently in a virtualenv, DON'T just prepend\n # to its path (vex foo; echo $PATH -> \" /foo/bin:/bar/bin\")\n # but don't incur this cost unless we're already in one.\n # activate handles this by running 'deactivate' first, we don't\n # have that so we have to use other ways.\n # This would not be necessary and things would be simpler if vex\n # did not have to interoperate with a ubiquitous existing tool.\n # virtualenv doesn't...\n current_ve = env.get('VIRTUAL_ENV', '')\n system_path = environ.get('PATH', '')\n segments = system_path.split(os.pathsep)\n if current_ve:\n # Since activate doesn't export _OLD_VIRTUAL_PATH, we are going to\n # manually remove the virtualenv's bin.\n # A virtualenv's bin should not normally be on PATH except\n # via activate or similar, so I'm OK with this solution.\n current_ve_bin = os.path.join(current_ve, 'bin')\n try:\n segments.remove(current_ve_bin)\n except ValueError:\n raise exceptions.BadConfig(\n \"something set VIRTUAL_ENV prior to this vex execution, \"\n \"implying that a virtualenv is already activated \"\n \"and PATH should contain the virtualenv's bin directory. \"\n \"Unfortunately, it doesn't: it's {0!r}. \"\n \"You might want to check that PATH is not \"\n \"getting clobbered somewhere, e.g. in your shell's configs.\"\n .format(system_path)\n )\n\n segments.insert(0, ve_bin)\n env['PATH'] = os.pathsep.join(segments)\n env['VIRTUAL_ENV'] = ve_path\n return env\n",
"def handle_shell_config(shell, vexrc, environ):\n \"\"\"Carry out the logic of the --shell-config option.\n \"\"\"\n from vex import shell_config\n data = shell_config.shell_config_for(shell, vexrc, environ)\n if not data:\n raise exceptions.OtherShell(\"unknown shell: {0!r}\".format(shell))\n if hasattr(sys.stdout, 'buffer'):\n sys.stdout.buffer.write(data)\n else:\n sys.stdout.write(data)\n return 0\n",
"def handle_make(environ, options, make_path):\n if os.path.exists(make_path):\n # Can't ignore existing virtualenv happily because existing one\n # might have different parameters and --make implies nonexistent\n raise exceptions.VirtualenvAlreadyMade(\n \"virtualenv already exists: {0!r}\".format(make_path)\n )\n ve_base = os.path.dirname(make_path)\n if not os.path.exists(ve_base):\n os.mkdir(ve_base)\n elif not os.path.isdir(ve_base):\n raise exceptions.VirtualenvNotMade(\n \"could not make virtualenv: \"\n \"{0!r} already exists but is not a directory. \"\n \"Choose a different virtualenvs path using ~/.vexrc \"\n \"or $WORKON_HOME, or remove the existing file; \"\n \"then rerun your vex --make command.\".format(ve_base)\n )\n # TODO: virtualenv is usually not on PATH for Windows,\n # but finding it is a terrible issue.\n if os.name == 'nt' and not os.environ.get('VIRTUAL_ENV', ''):\n ve = os.path.join(\n os.path.dirname(sys.executable),\n 'Scripts',\n 'virtualenv'\n )\n else:\n ve = 'virtualenv'\n args = [ve, make_path]\n if options.python:\n if os.name == 'nt':\n python = distutils.spawn.find_executable(options.python)\n if python:\n options.python = python\n args += ['--python', options.python]\n if options.site_packages:\n args += ['--system-site-packages']\n if options.always_copy:\n args+= ['--always-copy']\n returncode = run(args, env=environ, cwd=ve_base)\n if returncode != 0:\n raise exceptions.VirtualenvNotMade(\"error creating virtualenv\")\n if os.name != 'nt':\n pydoc_path = os.path.join(make_path, 'bin', 'pydoc')\n with open(pydoc_path, 'wb') as out:\n out.write(PYDOC_SCRIPT)\n perms = os.stat(pydoc_path).st_mode\n os.chmod(pydoc_path, perms | 0o0111)\n else:\n pydoc_path = os.path.join(make_path, 'Scripts', 'pydoc.bat')\n with open(pydoc_path, 'wb') as out:\n out.write(PYDOC_BATCH)\n",
"def handle_remove(ve_path):\n if not os.path.exists(ve_path):\n return\n if hasattr(os, \"geteuid\"):\n if os.geteuid() == 0 or os.environ.get('USER', '') == 'root':\n raise exceptions.VirtualenvNotRemoved(\n \"not removing any directory as root user\")\n if ve_path in (\"/\", \"\\\\\"):\n raise exceptions.VirtualenvNotRemoved(\n \"not removing possible root directory {0!r}\".format(ve_path))\n if ve_path == os.path.expanduser(\"~\"):\n raise exceptions.VirtualenvNotRemoved(\n \"not removing possible home directory {0!r}\".format(ve_path))\n # last-minute checks\n if obviously_not_a_virtualenv(ve_path):\n raise exceptions.VirtualenvNotRemoved(\n \"path {0!r} did not look like a virtualenv\".format(ve_path))\n print(\"Removing {0!r}\".format(ve_path))\n shutil.rmtree(ve_path)\n",
"def get_vexrc(options, environ):\n \"\"\"Get a representation of the contents of the config file.\n\n :returns:\n a Vexrc instance.\n \"\"\"\n # Complain if user specified nonexistent file with --config.\n # But we don't want to complain just because ~/.vexrc doesn't exist.\n if options.config and not os.path.exists(options.config):\n raise exceptions.InvalidVexrc(\"nonexistent config: {0!r}\".format(options.config))\n filename = options.config or os.path.expanduser('~/.vexrc')\n vexrc = config.Vexrc.from_file(filename, environ)\n return vexrc\n",
"def get_cwd(options):\n \"\"\"Discover what directory the command should run in.\n \"\"\"\n if not options.cwd:\n return None\n if not os.path.exists(options.cwd):\n raise exceptions.InvalidCwd(\n \"can't --cwd to invalid path {0!r}\".format(options.cwd))\n return options.cwd\n",
"def get_virtualenv_name(options):\n if options.path:\n return os.path.dirname(options.path)\n else:\n ve_name = options.rest.pop(0) if options.rest else ''\n if not ve_name:\n raise exceptions.NoVirtualenvName(\n \"could not find a virtualenv name in the command line.\"\n )\n return ve_name\n",
"def get_virtualenv_path(ve_base, ve_name):\n \"\"\"Check a virtualenv path, raising exceptions to explain problems.\n \"\"\"\n if not ve_base:\n raise exceptions.NoVirtualenvsDirectory(\n \"could not figure out a virtualenvs directory. \"\n \"make sure $HOME is set, or $WORKON_HOME,\"\n \" or set virtualenvs=something in your .vexrc\")\n\n # Using this requires get_ve_base to pass through nonexistent dirs\n if not os.path.exists(ve_base):\n message = (\n \"virtualenvs directory {0!r} not found. \"\n \"Create it or use vex --make to get started.\"\n ).format(ve_base)\n raise exceptions.NoVirtualenvsDirectory(message)\n\n if not ve_name:\n raise exceptions.InvalidVirtualenv(\"no virtualenv name\")\n\n # n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,\n # and an absolute path will be accepted as first arg.\n # So we check if they gave an absolute path as ve_name.\n # But we don't want this error if $PWD == $WORKON_HOME,\n # in which case 'foo' is a valid relative path to virtualenv foo.\n ve_path = os.path.join(ve_base, ve_name)\n if ve_path == ve_name and os.path.basename(ve_name) != ve_name:\n raise exceptions.InvalidVirtualenv(\n 'To run in a virtualenv by its path, '\n 'use \"vex --path {0}\"'.format(ve_path))\n\n ve_path = os.path.abspath(ve_path)\n if not os.path.exists(ve_path):\n raise exceptions.InvalidVirtualenv(\n \"no virtualenv found at {0!r}.\".format(ve_path))\n return ve_path\n",
"def get_command(options, vexrc, environ):\n \"\"\"Get a command to run.\n\n :returns:\n a list of strings representing a command to be passed to Popen.\n \"\"\"\n command = options.rest\n if not command:\n command = vexrc.get_shell(environ)\n if command and command[0].startswith('--'):\n raise exceptions.InvalidCommand(\n \"don't put flags like '%s' after the virtualenv name.\"\n % command[0])\n if not command:\n raise exceptions.InvalidCommand(\"no command given\")\n return command\n",
"def handle_version():\n sys.stdout.write(VERSION + \"\\n\")\n return 0\n",
"def handle_list(ve_base, prefix=\"\"):\n if not os.path.isdir(ve_base):\n sys.stderr.write(\"no virtualenvs directory at {0!r}\\n\".format(ve_base))\n return 1\n text = \"\\n\".join(\n sorted(\n relative_path for relative_path in os.listdir(ve_base)\n if (not relative_path.startswith(\"-\"))\n and relative_path.startswith(prefix)\n and os.path.isdir(os.path.join(ve_base, relative_path))\n )\n )\n sys.stdout.write(text + \"\\n\")\n return 0\n"
] | """Main command-line entry-point and any code tightly coupled to it.
"""
import sys
import os
from vex import config
from vex.options import get_options
from vex.run import get_environ, run
from vex.shell_config import handle_shell_config
from vex.make import handle_make
from vex.remove import handle_remove
from vex import exceptions
from vex._version import VERSION
def get_vexrc(options, environ):
"""Get a representation of the contents of the config file.
:returns:
a Vexrc instance.
"""
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc
def get_cwd(options):
"""Discover what directory the command should run in.
"""
if not options.cwd:
return None
if not os.path.exists(options.cwd):
raise exceptions.InvalidCwd(
"can't --cwd to invalid path {0!r}".format(options.cwd))
return options.cwd
def get_virtualenv_name(options):
if options.path:
return os.path.dirname(options.path)
else:
ve_name = options.rest.pop(0) if options.rest else ''
if not ve_name:
raise exceptions.NoVirtualenvName(
"could not find a virtualenv name in the command line."
)
return ve_name
def get_virtualenv_path(ve_base, ve_name):
"""Check a virtualenv path, raising exceptions to explain problems.
"""
if not ve_base:
raise exceptions.NoVirtualenvsDirectory(
"could not figure out a virtualenvs directory. "
"make sure $HOME is set, or $WORKON_HOME,"
" or set virtualenvs=something in your .vexrc")
# Using this requires get_ve_base to pass through nonexistent dirs
if not os.path.exists(ve_base):
message = (
"virtualenvs directory {0!r} not found. "
"Create it or use vex --make to get started."
).format(ve_base)
raise exceptions.NoVirtualenvsDirectory(message)
if not ve_name:
raise exceptions.InvalidVirtualenv("no virtualenv name")
# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,
# and an absolute path will be accepted as first arg.
# So we check if they gave an absolute path as ve_name.
# But we don't want this error if $PWD == $WORKON_HOME,
# in which case 'foo' is a valid relative path to virtualenv foo.
ve_path = os.path.join(ve_base, ve_name)
if ve_path == ve_name and os.path.basename(ve_name) != ve_name:
raise exceptions.InvalidVirtualenv(
'To run in a virtualenv by its path, '
'use "vex --path {0}"'.format(ve_path))
ve_path = os.path.abspath(ve_path)
if not os.path.exists(ve_path):
raise exceptions.InvalidVirtualenv(
"no virtualenv found at {0!r}.".format(ve_path))
return ve_path
def get_command(options, vexrc, environ):
"""Get a command to run.
:returns:
a list of strings representing a command to be passed to Popen.
"""
command = options.rest
if not command:
command = vexrc.get_shell(environ)
if command and command[0].startswith('--'):
raise exceptions.InvalidCommand(
"don't put flags like '%s' after the virtualenv name."
% command[0])
if not command:
raise exceptions.InvalidCommand("no command given")
return command
def handle_version():
sys.stdout.write(VERSION + "\n")
return 0
def handle_list(ve_base, prefix=""):
if not os.path.isdir(ve_base):
sys.stderr.write("no virtualenvs directory at {0!r}\n".format(ve_base))
return 1
text = "\n".join(
sorted(
relative_path for relative_path in os.listdir(ve_base)
if (not relative_path.startswith("-"))
and relative_path.startswith(prefix)
and os.path.isdir(os.path.join(ve_base, relative_path))
)
)
sys.stdout.write(text + "\n")
return 0
def main():
"""The main command-line entry point, with system interactions.
"""
argv = sys.argv[1:]
returncode = 1
try:
returncode = _main(os.environ, argv)
except exceptions.InvalidArgument as error:
if error.message:
sys.stderr.write("Error: " + error.message + '\n')
else:
raise
sys.exit(returncode)
|
sashahart/vex | vex/main.py | main | python | def main():
argv = sys.argv[1:]
returncode = 1
try:
returncode = _main(os.environ, argv)
except exceptions.InvalidArgument as error:
if error.message:
sys.stderr.write("Error: " + error.message + '\n')
else:
raise
sys.exit(returncode) | The main command-line entry point, with system interactions. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/main.py#L185-L197 | [
"def _main(environ, argv):\n \"\"\"Logic for main(), with less direct system interaction.\n\n Routines called here raise InvalidArgument with messages that\n should be delivered on stderr, to be caught by main.\n \"\"\"\n options = get_options(argv)\n if options.version:\n return handle_version()\n vexrc = get_vexrc(options, environ)\n # Handle --shell-config as soon as its arguments are available.\n if options.shell_to_configure:\n return handle_shell_config(options.shell_to_configure, vexrc, environ)\n if options.list is not None:\n return handle_list(vexrc.get_ve_base(environ), options.list)\n\n # Do as much as possible before a possible make, so errors can raise\n # without leaving behind an unused virtualenv.\n # get_virtualenv_name is destructive and must happen before get_command\n cwd = get_cwd(options)\n ve_base = vexrc.get_ve_base(environ)\n ve_name = get_virtualenv_name(options)\n command = get_command(options, vexrc, environ)\n # Either we create ve_path, get it from options.path or find it\n # in ve_base.\n if options.make:\n if options.path:\n make_path = os.path.abspath(options.path)\n else:\n make_path = os.path.abspath(os.path.join(ve_base, ve_name))\n handle_make(environ, options, make_path)\n ve_path = make_path\n elif options.path:\n ve_path = os.path.abspath(options.path)\n if not os.path.exists(ve_path) or not os.path.isdir(ve_path):\n raise exceptions.InvalidVirtualenv(\n \"argument for --path is not a directory\")\n else:\n try:\n ve_path = get_virtualenv_path(ve_base, ve_name)\n except exceptions.NoVirtualenvName:\n options.print_help()\n raise\n # get_environ has to wait until ve_path is defined, which might\n # be after a make; of course we can't run until we have env.\n env = get_environ(environ, vexrc['env'], ve_path)\n returncode = run(command, env=env, cwd=cwd)\n if options.remove:\n handle_remove(ve_path)\n if returncode is None:\n raise exceptions.InvalidCommand(\n \"command not found: {0!r}\".format(command[0]))\n return returncode\n"
] | """Main command-line entry-point and any code tightly coupled to it.
"""
import sys
import os
from vex import config
from vex.options import get_options
from vex.run import get_environ, run
from vex.shell_config import handle_shell_config
from vex.make import handle_make
from vex.remove import handle_remove
from vex import exceptions
from vex._version import VERSION
def get_vexrc(options, environ):
"""Get a representation of the contents of the config file.
:returns:
a Vexrc instance.
"""
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc
def get_cwd(options):
"""Discover what directory the command should run in.
"""
if not options.cwd:
return None
if not os.path.exists(options.cwd):
raise exceptions.InvalidCwd(
"can't --cwd to invalid path {0!r}".format(options.cwd))
return options.cwd
def get_virtualenv_name(options):
if options.path:
return os.path.dirname(options.path)
else:
ve_name = options.rest.pop(0) if options.rest else ''
if not ve_name:
raise exceptions.NoVirtualenvName(
"could not find a virtualenv name in the command line."
)
return ve_name
def get_virtualenv_path(ve_base, ve_name):
"""Check a virtualenv path, raising exceptions to explain problems.
"""
if not ve_base:
raise exceptions.NoVirtualenvsDirectory(
"could not figure out a virtualenvs directory. "
"make sure $HOME is set, or $WORKON_HOME,"
" or set virtualenvs=something in your .vexrc")
# Using this requires get_ve_base to pass through nonexistent dirs
if not os.path.exists(ve_base):
message = (
"virtualenvs directory {0!r} not found. "
"Create it or use vex --make to get started."
).format(ve_base)
raise exceptions.NoVirtualenvsDirectory(message)
if not ve_name:
raise exceptions.InvalidVirtualenv("no virtualenv name")
# n.b.: if ve_name is absolute, ve_base is discarded by os.path.join,
# and an absolute path will be accepted as first arg.
# So we check if they gave an absolute path as ve_name.
# But we don't want this error if $PWD == $WORKON_HOME,
# in which case 'foo' is a valid relative path to virtualenv foo.
ve_path = os.path.join(ve_base, ve_name)
if ve_path == ve_name and os.path.basename(ve_name) != ve_name:
raise exceptions.InvalidVirtualenv(
'To run in a virtualenv by its path, '
'use "vex --path {0}"'.format(ve_path))
ve_path = os.path.abspath(ve_path)
if not os.path.exists(ve_path):
raise exceptions.InvalidVirtualenv(
"no virtualenv found at {0!r}.".format(ve_path))
return ve_path
def get_command(options, vexrc, environ):
"""Get a command to run.
:returns:
a list of strings representing a command to be passed to Popen.
"""
command = options.rest
if not command:
command = vexrc.get_shell(environ)
if command and command[0].startswith('--'):
raise exceptions.InvalidCommand(
"don't put flags like '%s' after the virtualenv name."
% command[0])
if not command:
raise exceptions.InvalidCommand("no command given")
return command
def handle_version():
sys.stdout.write(VERSION + "\n")
return 0
def handle_list(ve_base, prefix=""):
if not os.path.isdir(ve_base):
sys.stderr.write("no virtualenvs directory at {0!r}\n".format(ve_base))
return 1
text = "\n".join(
sorted(
relative_path for relative_path in os.listdir(ve_base)
if (not relative_path.startswith("-"))
and relative_path.startswith(prefix)
and os.path.isdir(os.path.join(ve_base, relative_path))
)
)
sys.stdout.write(text + "\n")
return 0
def _main(environ, argv):
"""Logic for main(), with less direct system interaction.
Routines called here raise InvalidArgument with messages that
should be delivered on stderr, to be caught by main.
"""
options = get_options(argv)
if options.version:
return handle_version()
vexrc = get_vexrc(options, environ)
# Handle --shell-config as soon as its arguments are available.
if options.shell_to_configure:
return handle_shell_config(options.shell_to_configure, vexrc, environ)
if options.list is not None:
return handle_list(vexrc.get_ve_base(environ), options.list)
# Do as much as possible before a possible make, so errors can raise
# without leaving behind an unused virtualenv.
# get_virtualenv_name is destructive and must happen before get_command
cwd = get_cwd(options)
ve_base = vexrc.get_ve_base(environ)
ve_name = get_virtualenv_name(options)
command = get_command(options, vexrc, environ)
# Either we create ve_path, get it from options.path or find it
# in ve_base.
if options.make:
if options.path:
make_path = os.path.abspath(options.path)
else:
make_path = os.path.abspath(os.path.join(ve_base, ve_name))
handle_make(environ, options, make_path)
ve_path = make_path
elif options.path:
ve_path = os.path.abspath(options.path)
if not os.path.exists(ve_path) or not os.path.isdir(ve_path):
raise exceptions.InvalidVirtualenv(
"argument for --path is not a directory")
else:
try:
ve_path = get_virtualenv_path(ve_base, ve_name)
except exceptions.NoVirtualenvName:
options.print_help()
raise
# get_environ has to wait until ve_path is defined, which might
# be after a make; of course we can't run until we have env.
env = get_environ(environ, vexrc['env'], ve_path)
returncode = run(command, env=env, cwd=cwd)
if options.remove:
handle_remove(ve_path)
if returncode is None:
raise exceptions.InvalidCommand(
"command not found: {0!r}".format(command[0]))
return returncode
|
sashahart/vex | vex/shell_config.py | scary_path | python | def scary_path(path):
if not path:
return True
assert isinstance(path, bytes)
return not NOT_SCARY.match(path) | Whitelist the WORKON_HOME strings we're willing to substitute in
to strings that we provide for user's shell to evaluate.
If it smells at all bad, return True. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/shell_config.py#L22-L31 | null | """
This is not needed to use vex.
It just lets us provide a convenient mechanism for people
with popular shells to set up autocompletion.
"""
import os
import sys
import re
from vex import exceptions
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
# (OSError, IOError)
NOT_SCARY = re.compile(br'[~]?(?:[/]+[\w _,.][\w _\-,.]+)*\Z')
def shell_config_for(shell, vexrc, environ):
"""return completion config for the named shell.
"""
here = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(here, 'shell_configs', shell)
try:
with open(path, 'rb') as inp:
data = inp.read()
except FileNotFoundError as error:
if error.errno != 2:
raise
return b''
ve_base = vexrc.get_ve_base(environ).encode('ascii')
if ve_base and not scary_path(ve_base) and os.path.exists(ve_base):
data = data.replace(b'$WORKON_HOME', ve_base)
return data
def handle_shell_config(shell, vexrc, environ):
"""Carry out the logic of the --shell-config option.
"""
from vex import shell_config
data = shell_config.shell_config_for(shell, vexrc, environ)
if not data:
raise exceptions.OtherShell("unknown shell: {0!r}".format(shell))
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(data)
else:
sys.stdout.write(data)
return 0
|
sashahart/vex | vex/shell_config.py | shell_config_for | python | def shell_config_for(shell, vexrc, environ):
here = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(here, 'shell_configs', shell)
try:
with open(path, 'rb') as inp:
data = inp.read()
except FileNotFoundError as error:
if error.errno != 2:
raise
return b''
ve_base = vexrc.get_ve_base(environ).encode('ascii')
if ve_base and not scary_path(ve_base) and os.path.exists(ve_base):
data = data.replace(b'$WORKON_HOME', ve_base)
return data | return completion config for the named shell. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/shell_config.py#L34-L49 | [
"def scary_path(path):\n \"\"\"Whitelist the WORKON_HOME strings we're willing to substitute in\n to strings that we provide for user's shell to evaluate.\n\n If it smells at all bad, return True.\n \"\"\"\n if not path:\n return True\n assert isinstance(path, bytes)\n return not NOT_SCARY.match(path)\n",
"def get_ve_base(self, environ):\n \"\"\"Find a directory to look for virtualenvs in.\n \"\"\"\n # set ve_base to a path we can look for virtualenvs:\n # 1. .vexrc\n # 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)\n # 3. $HOME/.virtualenvs\n # (unless we got --path, then we don't need it)\n ve_base_value = self.headings[self.default_heading].get('virtualenvs')\n if ve_base_value:\n ve_base = os.path.expanduser(ve_base_value)\n else:\n ve_base = environ.get('WORKON_HOME', '')\n if not ve_base:\n # On Cygwin os.name == 'posix' and we want $HOME.\n if platform.system() == 'Windows' and os.name == 'nt':\n _win_drive = environ.get('HOMEDRIVE')\n home = environ.get('HOMEPATH', '')\n if home:\n home = os.path.join(_win_drive, home)\n else:\n home = environ.get('HOME', '')\n if not home:\n home = os.path.expanduser('~')\n if not home:\n return ''\n ve_base = os.path.join(home, '.virtualenvs')\n # pass through invalid paths so messages can be generated\n # if not os.path.exists(ve_base) or os.path.isfile(ve_base):\n # return ''\n return ve_base or ''\n"
] | """
This is not needed to use vex.
It just lets us provide a convenient mechanism for people
with popular shells to set up autocompletion.
"""
import os
import sys
import re
from vex import exceptions
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
# (OSError, IOError)
NOT_SCARY = re.compile(br'[~]?(?:[/]+[\w _,.][\w _\-,.]+)*\Z')
def scary_path(path):
"""Whitelist the WORKON_HOME strings we're willing to substitute in
to strings that we provide for user's shell to evaluate.
If it smells at all bad, return True.
"""
if not path:
return True
assert isinstance(path, bytes)
return not NOT_SCARY.match(path)
def handle_shell_config(shell, vexrc, environ):
"""Carry out the logic of the --shell-config option.
"""
from vex import shell_config
data = shell_config.shell_config_for(shell, vexrc, environ)
if not data:
raise exceptions.OtherShell("unknown shell: {0!r}".format(shell))
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(data)
else:
sys.stdout.write(data)
return 0
|
sashahart/vex | vex/shell_config.py | handle_shell_config | python | def handle_shell_config(shell, vexrc, environ):
from vex import shell_config
data = shell_config.shell_config_for(shell, vexrc, environ)
if not data:
raise exceptions.OtherShell("unknown shell: {0!r}".format(shell))
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(data)
else:
sys.stdout.write(data)
return 0 | Carry out the logic of the --shell-config option. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/shell_config.py#L52-L63 | [
"def shell_config_for(shell, vexrc, environ):\n \"\"\"return completion config for the named shell.\n \"\"\"\n here = os.path.dirname(os.path.abspath(__file__))\n path = os.path.join(here, 'shell_configs', shell)\n try:\n with open(path, 'rb') as inp:\n data = inp.read()\n except FileNotFoundError as error:\n if error.errno != 2:\n raise\n return b''\n ve_base = vexrc.get_ve_base(environ).encode('ascii')\n if ve_base and not scary_path(ve_base) and os.path.exists(ve_base):\n data = data.replace(b'$WORKON_HOME', ve_base)\n return data\n"
] | """
This is not needed to use vex.
It just lets us provide a convenient mechanism for people
with popular shells to set up autocompletion.
"""
import os
import sys
import re
from vex import exceptions
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
# (OSError, IOError)
NOT_SCARY = re.compile(br'[~]?(?:[/]+[\w _,.][\w _\-,.]+)*\Z')
def scary_path(path):
"""Whitelist the WORKON_HOME strings we're willing to substitute in
to strings that we provide for user's shell to evaluate.
If it smells at all bad, return True.
"""
if not path:
return True
assert isinstance(path, bytes)
return not NOT_SCARY.match(path)
def shell_config_for(shell, vexrc, environ):
"""return completion config for the named shell.
"""
here = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(here, 'shell_configs', shell)
try:
with open(path, 'rb') as inp:
data = inp.read()
except FileNotFoundError as error:
if error.errno != 2:
raise
return b''
ve_base = vexrc.get_ve_base(environ).encode('ascii')
if ve_base and not scary_path(ve_base) and os.path.exists(ve_base):
data = data.replace(b'$WORKON_HOME', ve_base)
return data
|
sashahart/vex | vex/run.py | get_environ | python | def get_environ(environ, defaults, ve_path):
# Copy the parent environment, add in defaults from .vexrc.
env = environ.copy()
env.update(defaults)
# Leaving in existing PYTHONHOME can cause some errors
if 'PYTHONHOME' in env:
del env['PYTHONHOME']
# Now we have to adjust PATH to find scripts for the virtualenv...
# PATH being unset/empty is OK, but ve_path must be set
# or there is nothing for us to do here and it's bad.
if not ve_path:
raise exceptions.BadConfig('ve_path must be set')
if platform.system() == 'Windows':
ve_bin = os.path.join(ve_path, 'Scripts')
else:
ve_bin = os.path.join(ve_path, 'bin')
# If user is currently in a virtualenv, DON'T just prepend
# to its path (vex foo; echo $PATH -> " /foo/bin:/bar/bin")
# but don't incur this cost unless we're already in one.
# activate handles this by running 'deactivate' first, we don't
# have that so we have to use other ways.
# This would not be necessary and things would be simpler if vex
# did not have to interoperate with a ubiquitous existing tool.
# virtualenv doesn't...
current_ve = env.get('VIRTUAL_ENV', '')
system_path = environ.get('PATH', '')
segments = system_path.split(os.pathsep)
if current_ve:
# Since activate doesn't export _OLD_VIRTUAL_PATH, we are going to
# manually remove the virtualenv's bin.
# A virtualenv's bin should not normally be on PATH except
# via activate or similar, so I'm OK with this solution.
current_ve_bin = os.path.join(current_ve, 'bin')
try:
segments.remove(current_ve_bin)
except ValueError:
raise exceptions.BadConfig(
"something set VIRTUAL_ENV prior to this vex execution, "
"implying that a virtualenv is already activated "
"and PATH should contain the virtualenv's bin directory. "
"Unfortunately, it doesn't: it's {0!r}. "
"You might want to check that PATH is not "
"getting clobbered somewhere, e.g. in your shell's configs."
.format(system_path)
)
segments.insert(0, ve_bin)
env['PATH'] = os.pathsep.join(segments)
env['VIRTUAL_ENV'] = ve_path
return env | Make an environment to run with. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/run.py#L10-L64 | null | """Run subprocess.
"""
import os
import platform
import subprocess
import distutils.spawn
from vex import exceptions
def run(command, env, cwd):
"""Run the given command.
"""
assert command
if cwd:
assert os.path.exists(cwd)
if platform.system() == "Windows":
exe = distutils.spawn.find_executable(command[0], path=env['PATH'])
if exe:
command[0] = exe
_, command_name = os.path.split(command[0])
if (command_name in ('bash', 'zsh')
and 'VIRTUALENVWRAPPER_PYTHON' not in env):
env['VIRTUALENVWRAPPER_PYTHON'] = ':'
try:
process = subprocess.Popen(command, env=env, cwd=cwd)
process.wait()
except exceptions.CommandNotFoundError as error:
if error.errno != 2:
raise
return None
return process.returncode
|
sashahart/vex | vex/run.py | run | python | def run(command, env, cwd):
assert command
if cwd:
assert os.path.exists(cwd)
if platform.system() == "Windows":
exe = distutils.spawn.find_executable(command[0], path=env['PATH'])
if exe:
command[0] = exe
_, command_name = os.path.split(command[0])
if (command_name in ('bash', 'zsh')
and 'VIRTUALENVWRAPPER_PYTHON' not in env):
env['VIRTUALENVWRAPPER_PYTHON'] = ':'
try:
process = subprocess.Popen(command, env=env, cwd=cwd)
process.wait()
except exceptions.CommandNotFoundError as error:
if error.errno != 2:
raise
return None
return process.returncode | Run the given command. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/run.py#L67-L88 | null | """Run subprocess.
"""
import os
import platform
import subprocess
import distutils.spawn
from vex import exceptions
def get_environ(environ, defaults, ve_path):
"""Make an environment to run with.
"""
# Copy the parent environment, add in defaults from .vexrc.
env = environ.copy()
env.update(defaults)
# Leaving in existing PYTHONHOME can cause some errors
if 'PYTHONHOME' in env:
del env['PYTHONHOME']
# Now we have to adjust PATH to find scripts for the virtualenv...
# PATH being unset/empty is OK, but ve_path must be set
# or there is nothing for us to do here and it's bad.
if not ve_path:
raise exceptions.BadConfig('ve_path must be set')
if platform.system() == 'Windows':
ve_bin = os.path.join(ve_path, 'Scripts')
else:
ve_bin = os.path.join(ve_path, 'bin')
# If user is currently in a virtualenv, DON'T just prepend
# to its path (vex foo; echo $PATH -> " /foo/bin:/bar/bin")
# but don't incur this cost unless we're already in one.
# activate handles this by running 'deactivate' first, we don't
# have that so we have to use other ways.
# This would not be necessary and things would be simpler if vex
# did not have to interoperate with a ubiquitous existing tool.
# virtualenv doesn't...
current_ve = env.get('VIRTUAL_ENV', '')
system_path = environ.get('PATH', '')
segments = system_path.split(os.pathsep)
if current_ve:
# Since activate doesn't export _OLD_VIRTUAL_PATH, we are going to
# manually remove the virtualenv's bin.
# A virtualenv's bin should not normally be on PATH except
# via activate or similar, so I'm OK with this solution.
current_ve_bin = os.path.join(current_ve, 'bin')
try:
segments.remove(current_ve_bin)
except ValueError:
raise exceptions.BadConfig(
"something set VIRTUAL_ENV prior to this vex execution, "
"implying that a virtualenv is already activated "
"and PATH should contain the virtualenv's bin directory. "
"Unfortunately, it doesn't: it's {0!r}. "
"You might want to check that PATH is not "
"getting clobbered somewhere, e.g. in your shell's configs."
.format(system_path)
)
segments.insert(0, ve_bin)
env['PATH'] = os.pathsep.join(segments)
env['VIRTUAL_ENV'] = ve_path
return env
|
sashahart/vex | vex/config.py | extract_key_value | python | def extract_key_value(line, environ):
segments = line.split("=", 1)
if len(segments) < 2:
return None
key, value = segments
# foo passes through as-is (with spaces stripped)
# '{foo}' passes through literally
# "{foo}" substitutes from environ's foo
value = value.strip()
if value[0] == "'" and _SQUOTE_RE.match(value):
value = value[1:-1]
elif value[0] == '"' and _DQUOTE_RE.match(value):
template = value[1:-1]
value = template.format(**environ)
key = key.strip()
value = value.strip()
return key, value | Return key, value from given line if present, else return None. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L129-L147 | null | """Config file processing (.vexrc).
"""
import os
import sys
import re
import shlex
import platform
from collections import OrderedDict
_IDENTIFIER_PATTERN = '[a-zA-Z][_a-zA-Z0-9]*'
_SQUOTE_RE = re.compile(r"'([^']*)'\Z") # NO squotes inside
_DQUOTE_RE = re.compile(r'"([^"]*)"\Z') # NO dquotes inside
_HEADING_RE = re.compile(
r'^(' + _IDENTIFIER_PATTERN + r'):[ \t\n\r]*\Z')
_VAR_RE = re.compile(
r'[ \t]*(' + _IDENTIFIER_PATTERN + r') *= *(.*)[ \t\n\r]*$')
if sys.version_info < (3, 3):
FileNotFoundError = IOError
class InvalidConfigError(Exception):
"""Raised when there is an error during a .vexrc file parse.
"""
def __init__(self, filename, errors):
Exception.__init__(self)
self.filename = filename
self.errors = errors
def __str__(self):
return "errors in {0!r}, lines {1!r}".format(
self.filename,
list(tup[0] for tup in self.errors)
)
class Vexrc(object):
"""Parsed representation of a .vexrc config file.
"""
default_heading = "root"
default_encoding = "utf-8"
def __init__(self):
self.encoding = self.default_encoding
self.headings = OrderedDict()
self.headings[self.default_heading] = OrderedDict()
self.headings['env'] = OrderedDict()
def __getitem__(self, key):
return self.headings.get(key)
@classmethod
def from_file(cls, path, environ):
"""Make a Vexrc instance from given file in given environ.
"""
instance = cls()
instance.read(path, environ)
return instance
def read(self, path, environ):
"""Read data from file into this vexrc instance.
"""
try:
inp = open(path, 'rb')
except FileNotFoundError as error:
if error.errno != 2:
raise
return None
parsing = parse_vexrc(inp, environ)
for heading, key, value in parsing:
heading = self.default_heading if heading is None else heading
if heading not in self.headings:
self.headings[heading] = OrderedDict()
self.headings[heading][key] = value
parsing.close()
def get_ve_base(self, environ):
"""Find a directory to look for virtualenvs in.
"""
# set ve_base to a path we can look for virtualenvs:
# 1. .vexrc
# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)
# 3. $HOME/.virtualenvs
# (unless we got --path, then we don't need it)
ve_base_value = self.headings[self.default_heading].get('virtualenvs')
if ve_base_value:
ve_base = os.path.expanduser(ve_base_value)
else:
ve_base = environ.get('WORKON_HOME', '')
if not ve_base:
# On Cygwin os.name == 'posix' and we want $HOME.
if platform.system() == 'Windows' and os.name == 'nt':
_win_drive = environ.get('HOMEDRIVE')
home = environ.get('HOMEPATH', '')
if home:
home = os.path.join(_win_drive, home)
else:
home = environ.get('HOME', '')
if not home:
home = os.path.expanduser('~')
if not home:
return ''
ve_base = os.path.join(home, '.virtualenvs')
# pass through invalid paths so messages can be generated
# if not os.path.exists(ve_base) or os.path.isfile(ve_base):
# return ''
return ve_base or ''
def get_shell(self, environ):
"""Find a command to run.
"""
command = self.headings[self.default_heading].get('shell')
if not command and os.name != 'nt':
command = environ.get('SHELL', '')
command = shlex.split(command) if command else None
return command
def extract_heading(line):
"""Return heading in given line or None if it's not a heading.
"""
match = _HEADING_RE.match(line)
if match:
return match.group(1)
return None
def parse_vexrc(inp, environ):
"""Iterator yielding key/value pairs from given stream.
yields tuples of heading, key, value.
"""
heading = None
errors = []
with inp:
for line_number, line in enumerate(inp):
line = line.decode("utf-8")
if not line.strip():
continue
extracted_heading = extract_heading(line)
if extracted_heading is not None:
heading = extracted_heading
continue
kv_tuple = extract_key_value(line, environ)
if kv_tuple is None:
errors.append((line_number, line))
continue
try:
yield heading, kv_tuple[0], kv_tuple[1]
except GeneratorExit:
break
if errors:
raise InvalidConfigError(inp.name, errors)
|
sashahart/vex | vex/config.py | parse_vexrc | python | def parse_vexrc(inp, environ):
heading = None
errors = []
with inp:
for line_number, line in enumerate(inp):
line = line.decode("utf-8")
if not line.strip():
continue
extracted_heading = extract_heading(line)
if extracted_heading is not None:
heading = extracted_heading
continue
kv_tuple = extract_key_value(line, environ)
if kv_tuple is None:
errors.append((line_number, line))
continue
try:
yield heading, kv_tuple[0], kv_tuple[1]
except GeneratorExit:
break
if errors:
raise InvalidConfigError(inp.name, errors) | Iterator yielding key/value pairs from given stream.
yields tuples of heading, key, value. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L150-L175 | [
"def extract_heading(line):\n \"\"\"Return heading in given line or None if it's not a heading.\n \"\"\"\n match = _HEADING_RE.match(line)\n if match:\n return match.group(1)\n return None\n",
"def extract_key_value(line, environ):\n \"\"\"Return key, value from given line if present, else return None.\n \"\"\"\n segments = line.split(\"=\", 1)\n if len(segments) < 2:\n return None\n key, value = segments\n # foo passes through as-is (with spaces stripped)\n # '{foo}' passes through literally\n # \"{foo}\" substitutes from environ's foo\n value = value.strip()\n if value[0] == \"'\" and _SQUOTE_RE.match(value):\n value = value[1:-1]\n elif value[0] == '\"' and _DQUOTE_RE.match(value):\n template = value[1:-1]\n value = template.format(**environ)\n key = key.strip()\n value = value.strip()\n return key, value\n"
] | """Config file processing (.vexrc).
"""
import os
import sys
import re
import shlex
import platform
from collections import OrderedDict
_IDENTIFIER_PATTERN = '[a-zA-Z][_a-zA-Z0-9]*'
_SQUOTE_RE = re.compile(r"'([^']*)'\Z") # NO squotes inside
_DQUOTE_RE = re.compile(r'"([^"]*)"\Z') # NO dquotes inside
_HEADING_RE = re.compile(
r'^(' + _IDENTIFIER_PATTERN + r'):[ \t\n\r]*\Z')
_VAR_RE = re.compile(
r'[ \t]*(' + _IDENTIFIER_PATTERN + r') *= *(.*)[ \t\n\r]*$')
if sys.version_info < (3, 3):
FileNotFoundError = IOError
class InvalidConfigError(Exception):
"""Raised when there is an error during a .vexrc file parse.
"""
def __init__(self, filename, errors):
Exception.__init__(self)
self.filename = filename
self.errors = errors
def __str__(self):
return "errors in {0!r}, lines {1!r}".format(
self.filename,
list(tup[0] for tup in self.errors)
)
class Vexrc(object):
"""Parsed representation of a .vexrc config file.
"""
default_heading = "root"
default_encoding = "utf-8"
def __init__(self):
self.encoding = self.default_encoding
self.headings = OrderedDict()
self.headings[self.default_heading] = OrderedDict()
self.headings['env'] = OrderedDict()
def __getitem__(self, key):
return self.headings.get(key)
@classmethod
def from_file(cls, path, environ):
"""Make a Vexrc instance from given file in given environ.
"""
instance = cls()
instance.read(path, environ)
return instance
def read(self, path, environ):
"""Read data from file into this vexrc instance.
"""
try:
inp = open(path, 'rb')
except FileNotFoundError as error:
if error.errno != 2:
raise
return None
parsing = parse_vexrc(inp, environ)
for heading, key, value in parsing:
heading = self.default_heading if heading is None else heading
if heading not in self.headings:
self.headings[heading] = OrderedDict()
self.headings[heading][key] = value
parsing.close()
def get_ve_base(self, environ):
"""Find a directory to look for virtualenvs in.
"""
# set ve_base to a path we can look for virtualenvs:
# 1. .vexrc
# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)
# 3. $HOME/.virtualenvs
# (unless we got --path, then we don't need it)
ve_base_value = self.headings[self.default_heading].get('virtualenvs')
if ve_base_value:
ve_base = os.path.expanduser(ve_base_value)
else:
ve_base = environ.get('WORKON_HOME', '')
if not ve_base:
# On Cygwin os.name == 'posix' and we want $HOME.
if platform.system() == 'Windows' and os.name == 'nt':
_win_drive = environ.get('HOMEDRIVE')
home = environ.get('HOMEPATH', '')
if home:
home = os.path.join(_win_drive, home)
else:
home = environ.get('HOME', '')
if not home:
home = os.path.expanduser('~')
if not home:
return ''
ve_base = os.path.join(home, '.virtualenvs')
# pass through invalid paths so messages can be generated
# if not os.path.exists(ve_base) or os.path.isfile(ve_base):
# return ''
return ve_base or ''
def get_shell(self, environ):
"""Find a command to run.
"""
command = self.headings[self.default_heading].get('shell')
if not command and os.name != 'nt':
command = environ.get('SHELL', '')
command = shlex.split(command) if command else None
return command
def extract_heading(line):
"""Return heading in given line or None if it's not a heading.
"""
match = _HEADING_RE.match(line)
if match:
return match.group(1)
return None
def extract_key_value(line, environ):
"""Return key, value from given line if present, else return None.
"""
segments = line.split("=", 1)
if len(segments) < 2:
return None
key, value = segments
# foo passes through as-is (with spaces stripped)
# '{foo}' passes through literally
# "{foo}" substitutes from environ's foo
value = value.strip()
if value[0] == "'" and _SQUOTE_RE.match(value):
value = value[1:-1]
elif value[0] == '"' and _DQUOTE_RE.match(value):
template = value[1:-1]
value = template.format(**environ)
key = key.strip()
value = value.strip()
return key, value
|
sashahart/vex | vex/config.py | Vexrc.from_file | python | def from_file(cls, path, environ):
instance = cls()
instance.read(path, environ)
return instance | Make a Vexrc instance from given file in given environ. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L54-L59 | [
"def read(self, path, environ):\n \"\"\"Read data from file into this vexrc instance.\n \"\"\"\n try:\n inp = open(path, 'rb')\n except FileNotFoundError as error:\n if error.errno != 2:\n raise\n return None\n parsing = parse_vexrc(inp, environ)\n for heading, key, value in parsing:\n heading = self.default_heading if heading is None else heading\n if heading not in self.headings:\n self.headings[heading] = OrderedDict()\n self.headings[heading][key] = value\n parsing.close()\n"
] | class Vexrc(object):
"""Parsed representation of a .vexrc config file.
"""
default_heading = "root"
default_encoding = "utf-8"
def __init__(self):
self.encoding = self.default_encoding
self.headings = OrderedDict()
self.headings[self.default_heading] = OrderedDict()
self.headings['env'] = OrderedDict()
def __getitem__(self, key):
return self.headings.get(key)
@classmethod
def read(self, path, environ):
"""Read data from file into this vexrc instance.
"""
try:
inp = open(path, 'rb')
except FileNotFoundError as error:
if error.errno != 2:
raise
return None
parsing = parse_vexrc(inp, environ)
for heading, key, value in parsing:
heading = self.default_heading if heading is None else heading
if heading not in self.headings:
self.headings[heading] = OrderedDict()
self.headings[heading][key] = value
parsing.close()
def get_ve_base(self, environ):
"""Find a directory to look for virtualenvs in.
"""
# set ve_base to a path we can look for virtualenvs:
# 1. .vexrc
# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)
# 3. $HOME/.virtualenvs
# (unless we got --path, then we don't need it)
ve_base_value = self.headings[self.default_heading].get('virtualenvs')
if ve_base_value:
ve_base = os.path.expanduser(ve_base_value)
else:
ve_base = environ.get('WORKON_HOME', '')
if not ve_base:
# On Cygwin os.name == 'posix' and we want $HOME.
if platform.system() == 'Windows' and os.name == 'nt':
_win_drive = environ.get('HOMEDRIVE')
home = environ.get('HOMEPATH', '')
if home:
home = os.path.join(_win_drive, home)
else:
home = environ.get('HOME', '')
if not home:
home = os.path.expanduser('~')
if not home:
return ''
ve_base = os.path.join(home, '.virtualenvs')
# pass through invalid paths so messages can be generated
# if not os.path.exists(ve_base) or os.path.isfile(ve_base):
# return ''
return ve_base or ''
def get_shell(self, environ):
"""Find a command to run.
"""
command = self.headings[self.default_heading].get('shell')
if not command and os.name != 'nt':
command = environ.get('SHELL', '')
command = shlex.split(command) if command else None
return command
|
sashahart/vex | vex/config.py | Vexrc.read | python | def read(self, path, environ):
try:
inp = open(path, 'rb')
except FileNotFoundError as error:
if error.errno != 2:
raise
return None
parsing = parse_vexrc(inp, environ)
for heading, key, value in parsing:
heading = self.default_heading if heading is None else heading
if heading not in self.headings:
self.headings[heading] = OrderedDict()
self.headings[heading][key] = value
parsing.close() | Read data from file into this vexrc instance. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L61-L76 | [
"def parse_vexrc(inp, environ):\n \"\"\"Iterator yielding key/value pairs from given stream.\n\n yields tuples of heading, key, value.\n \"\"\"\n heading = None\n errors = []\n with inp:\n for line_number, line in enumerate(inp):\n line = line.decode(\"utf-8\")\n if not line.strip():\n continue\n extracted_heading = extract_heading(line)\n if extracted_heading is not None:\n heading = extracted_heading\n continue\n kv_tuple = extract_key_value(line, environ)\n if kv_tuple is None:\n errors.append((line_number, line))\n continue\n try:\n yield heading, kv_tuple[0], kv_tuple[1]\n except GeneratorExit:\n break\n if errors:\n raise InvalidConfigError(inp.name, errors)\n"
] | class Vexrc(object):
"""Parsed representation of a .vexrc config file.
"""
default_heading = "root"
default_encoding = "utf-8"
def __init__(self):
self.encoding = self.default_encoding
self.headings = OrderedDict()
self.headings[self.default_heading] = OrderedDict()
self.headings['env'] = OrderedDict()
def __getitem__(self, key):
return self.headings.get(key)
@classmethod
def from_file(cls, path, environ):
"""Make a Vexrc instance from given file in given environ.
"""
instance = cls()
instance.read(path, environ)
return instance
def get_ve_base(self, environ):
"""Find a directory to look for virtualenvs in.
"""
# set ve_base to a path we can look for virtualenvs:
# 1. .vexrc
# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)
# 3. $HOME/.virtualenvs
# (unless we got --path, then we don't need it)
ve_base_value = self.headings[self.default_heading].get('virtualenvs')
if ve_base_value:
ve_base = os.path.expanduser(ve_base_value)
else:
ve_base = environ.get('WORKON_HOME', '')
if not ve_base:
# On Cygwin os.name == 'posix' and we want $HOME.
if platform.system() == 'Windows' and os.name == 'nt':
_win_drive = environ.get('HOMEDRIVE')
home = environ.get('HOMEPATH', '')
if home:
home = os.path.join(_win_drive, home)
else:
home = environ.get('HOME', '')
if not home:
home = os.path.expanduser('~')
if not home:
return ''
ve_base = os.path.join(home, '.virtualenvs')
# pass through invalid paths so messages can be generated
# if not os.path.exists(ve_base) or os.path.isfile(ve_base):
# return ''
return ve_base or ''
def get_shell(self, environ):
"""Find a command to run.
"""
command = self.headings[self.default_heading].get('shell')
if not command and os.name != 'nt':
command = environ.get('SHELL', '')
command = shlex.split(command) if command else None
return command
|
sashahart/vex | vex/config.py | Vexrc.get_ve_base | python | def get_ve_base(self, environ):
# set ve_base to a path we can look for virtualenvs:
# 1. .vexrc
# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)
# 3. $HOME/.virtualenvs
# (unless we got --path, then we don't need it)
ve_base_value = self.headings[self.default_heading].get('virtualenvs')
if ve_base_value:
ve_base = os.path.expanduser(ve_base_value)
else:
ve_base = environ.get('WORKON_HOME', '')
if not ve_base:
# On Cygwin os.name == 'posix' and we want $HOME.
if platform.system() == 'Windows' and os.name == 'nt':
_win_drive = environ.get('HOMEDRIVE')
home = environ.get('HOMEPATH', '')
if home:
home = os.path.join(_win_drive, home)
else:
home = environ.get('HOME', '')
if not home:
home = os.path.expanduser('~')
if not home:
return ''
ve_base = os.path.join(home, '.virtualenvs')
# pass through invalid paths so messages can be generated
# if not os.path.exists(ve_base) or os.path.isfile(ve_base):
# return ''
return ve_base or '' | Find a directory to look for virtualenvs in. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L78-L108 | null | class Vexrc(object):
"""Parsed representation of a .vexrc config file.
"""
default_heading = "root"
default_encoding = "utf-8"
def __init__(self):
self.encoding = self.default_encoding
self.headings = OrderedDict()
self.headings[self.default_heading] = OrderedDict()
self.headings['env'] = OrderedDict()
def __getitem__(self, key):
return self.headings.get(key)
@classmethod
def from_file(cls, path, environ):
"""Make a Vexrc instance from given file in given environ.
"""
instance = cls()
instance.read(path, environ)
return instance
def read(self, path, environ):
"""Read data from file into this vexrc instance.
"""
try:
inp = open(path, 'rb')
except FileNotFoundError as error:
if error.errno != 2:
raise
return None
parsing = parse_vexrc(inp, environ)
for heading, key, value in parsing:
heading = self.default_heading if heading is None else heading
if heading not in self.headings:
self.headings[heading] = OrderedDict()
self.headings[heading][key] = value
parsing.close()
def get_shell(self, environ):
"""Find a command to run.
"""
command = self.headings[self.default_heading].get('shell')
if not command and os.name != 'nt':
command = environ.get('SHELL', '')
command = shlex.split(command) if command else None
return command
|
sashahart/vex | vex/config.py | Vexrc.get_shell | python | def get_shell(self, environ):
command = self.headings[self.default_heading].get('shell')
if not command and os.name != 'nt':
command = environ.get('SHELL', '')
command = shlex.split(command) if command else None
return command | Find a command to run. | train | https://github.com/sashahart/vex/blob/b7680c40897b8cbe6aae55ec9812b4fb11738192/vex/config.py#L110-L117 | null | class Vexrc(object):
"""Parsed representation of a .vexrc config file.
"""
default_heading = "root"
default_encoding = "utf-8"
def __init__(self):
self.encoding = self.default_encoding
self.headings = OrderedDict()
self.headings[self.default_heading] = OrderedDict()
self.headings['env'] = OrderedDict()
def __getitem__(self, key):
return self.headings.get(key)
@classmethod
def from_file(cls, path, environ):
"""Make a Vexrc instance from given file in given environ.
"""
instance = cls()
instance.read(path, environ)
return instance
def read(self, path, environ):
"""Read data from file into this vexrc instance.
"""
try:
inp = open(path, 'rb')
except FileNotFoundError as error:
if error.errno != 2:
raise
return None
parsing = parse_vexrc(inp, environ)
for heading, key, value in parsing:
heading = self.default_heading if heading is None else heading
if heading not in self.headings:
self.headings[heading] = OrderedDict()
self.headings[heading][key] = value
parsing.close()
def get_ve_base(self, environ):
"""Find a directory to look for virtualenvs in.
"""
# set ve_base to a path we can look for virtualenvs:
# 1. .vexrc
# 2. WORKON_HOME (as defined for virtualenvwrapper's benefit)
# 3. $HOME/.virtualenvs
# (unless we got --path, then we don't need it)
ve_base_value = self.headings[self.default_heading].get('virtualenvs')
if ve_base_value:
ve_base = os.path.expanduser(ve_base_value)
else:
ve_base = environ.get('WORKON_HOME', '')
if not ve_base:
# On Cygwin os.name == 'posix' and we want $HOME.
if platform.system() == 'Windows' and os.name == 'nt':
_win_drive = environ.get('HOMEDRIVE')
home = environ.get('HOMEPATH', '')
if home:
home = os.path.join(_win_drive, home)
else:
home = environ.get('HOME', '')
if not home:
home = os.path.expanduser('~')
if not home:
return ''
ve_base = os.path.join(home, '.virtualenvs')
# pass through invalid paths so messages can be generated
# if not os.path.exists(ve_base) or os.path.isfile(ve_base):
# return ''
return ve_base or ''
|
sensu-plugins/sensu-plugin-python | sensu_plugin/plugin.py | SensuPlugin.output | python | def output(self, args):
'''
Print the output message.
'''
print("SensuPlugin: {}".format(' '.join(str(a) for a in args))) | Print the output message. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/plugin.py#L51-L55 | null | class SensuPlugin(object):
'''
Base class used by both checks and metrics plugins.
'''
def __init__(self, autorun=True):
self.plugin_info = {
'check_name': None,
'message': None,
'status': None
}
# create a method for each of the exit codes
# and register as exiy functions
self._hook = ExitHook()
self._hook.hook()
self.exit_code = ExitCode(0, 1, 2, 3)
for field in self.exit_code._fields:
self.__make_dynamic(field)
atexit.register(self.__exitfunction)
# Prepare command line arguments
self.parser = argparse.ArgumentParser()
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
if autorun:
self.run()
def __make_dynamic(self, method):
'''
Create a method for each of the exit codes.
'''
def dynamic(*args):
self.plugin_info['status'] = method
if not args:
args = None
self.output(args)
sys.exit(getattr(self.exit_code, method))
method_lc = method.lower()
dynamic.__doc__ = "%s method" % method_lc
dynamic.__name__ = method_lc
setattr(self, dynamic.__name__, dynamic)
def run(self):
'''
Method should be overwritten by inherited classes.
'''
self.warning("Not implemented! You should override SensuPlugin.run()")
def __exitfunction(self):
'''
Method called by exit hook, ensures that both an exit code and
output is supplied, also catches errors.
'''
if self._hook.exit_code is None and self._hook.exception is None:
print("Check did not exit! You should call an exit code method.")
sys.stdout.flush()
os._exit(1)
elif self._hook.exception:
print("Check failed to run: %s, %s" %
(sys.last_type, traceback.format_tb(sys.last_traceback)))
sys.stdout.flush()
os._exit(2)
|
sensu-plugins/sensu-plugin-python | sensu_plugin/plugin.py | SensuPlugin.__make_dynamic | python | def __make_dynamic(self, method):
'''
Create a method for each of the exit codes.
'''
def dynamic(*args):
self.plugin_info['status'] = method
if not args:
args = None
self.output(args)
sys.exit(getattr(self.exit_code, method))
method_lc = method.lower()
dynamic.__doc__ = "%s method" % method_lc
dynamic.__name__ = method_lc
setattr(self, dynamic.__name__, dynamic) | Create a method for each of the exit codes. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/plugin.py#L57-L71 | null | class SensuPlugin(object):
'''
Base class used by both checks and metrics plugins.
'''
def __init__(self, autorun=True):
self.plugin_info = {
'check_name': None,
'message': None,
'status': None
}
# create a method for each of the exit codes
# and register as exiy functions
self._hook = ExitHook()
self._hook.hook()
self.exit_code = ExitCode(0, 1, 2, 3)
for field in self.exit_code._fields:
self.__make_dynamic(field)
atexit.register(self.__exitfunction)
# Prepare command line arguments
self.parser = argparse.ArgumentParser()
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
if autorun:
self.run()
def output(self, args):
'''
Print the output message.
'''
print("SensuPlugin: {}".format(' '.join(str(a) for a in args)))
def run(self):
'''
Method should be overwritten by inherited classes.
'''
self.warning("Not implemented! You should override SensuPlugin.run()")
def __exitfunction(self):
'''
Method called by exit hook, ensures that both an exit code and
output is supplied, also catches errors.
'''
if self._hook.exit_code is None and self._hook.exception is None:
print("Check did not exit! You should call an exit code method.")
sys.stdout.flush()
os._exit(1)
elif self._hook.exception:
print("Check failed to run: %s, %s" %
(sys.last_type, traceback.format_tb(sys.last_traceback)))
sys.stdout.flush()
os._exit(2)
|
sensu-plugins/sensu-plugin-python | sensu_plugin/plugin.py | SensuPlugin.__exitfunction | python | def __exitfunction(self):
'''
Method called by exit hook, ensures that both an exit code and
output is supplied, also catches errors.
'''
if self._hook.exit_code is None and self._hook.exception is None:
print("Check did not exit! You should call an exit code method.")
sys.stdout.flush()
os._exit(1)
elif self._hook.exception:
print("Check failed to run: %s, %s" %
(sys.last_type, traceback.format_tb(sys.last_traceback)))
sys.stdout.flush()
os._exit(2) | Method called by exit hook, ensures that both an exit code and
output is supplied, also catches errors. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/plugin.py#L79-L92 | null | class SensuPlugin(object):
'''
Base class used by both checks and metrics plugins.
'''
def __init__(self, autorun=True):
self.plugin_info = {
'check_name': None,
'message': None,
'status': None
}
# create a method for each of the exit codes
# and register as exiy functions
self._hook = ExitHook()
self._hook.hook()
self.exit_code = ExitCode(0, 1, 2, 3)
for field in self.exit_code._fields:
self.__make_dynamic(field)
atexit.register(self.__exitfunction)
# Prepare command line arguments
self.parser = argparse.ArgumentParser()
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
if autorun:
self.run()
def output(self, args):
'''
Print the output message.
'''
print("SensuPlugin: {}".format(' '.join(str(a) for a in args)))
def __make_dynamic(self, method):
'''
Create a method for each of the exit codes.
'''
def dynamic(*args):
self.plugin_info['status'] = method
if not args:
args = None
self.output(args)
sys.exit(getattr(self.exit_code, method))
method_lc = method.lower()
dynamic.__doc__ = "%s method" % method_lc
dynamic.__name__ = method_lc
setattr(self, dynamic.__name__, dynamic)
def run(self):
'''
Method should be overwritten by inherited classes.
'''
self.warning("Not implemented! You should override SensuPlugin.run()")
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.run | python | def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle() | Set up the event object, global settings and command line
arguments. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L31-L65 | [
"def get_settings():\n '''\n Get all currently loaded settings.\n '''\n settings = {}\n for config_file in config_files():\n config_contents = load_config(config_file)\n if config_contents is not None:\n settings = deep_merge(settings, config_contents)\n return settings\n",
"def map_v2_event_into_v1(event):\n '''\n Helper method to convert Sensu 2.x event into Sensu 1.x event.\n '''\n\n # return the event if it has already been mapped\n if \"v2_event_mapped_into_v1\" in event:\n return event\n\n # Trigger mapping code if enity exists and client does not\n if not bool(event.get('client')) and \"entity\" in event:\n event['client'] = event['entity']\n\n # Fill in missing client attributes\n if \"name\" not in event['client']:\n event['client']['name'] = event['entity']['id']\n\n if \"subscribers\" not in event['client']:\n event['client']['subscribers'] = event['entity']['subscriptions']\n\n # Fill in renamed check attributes expected in 1.4 event\n if \"subscribers\" not in event['check']:\n event['check']['subscribers'] = event['check']['subscriptions']\n\n if \"source\" not in event['check']:\n event['check']['source'] = event['check']['proxy_entity_id']\n\n # Mimic 1.4 event action based on 2.0 event state\n # action used in logs and fluentd plugins handlers\n action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',\n 'failing': 'create'}\n\n if \"state\" in event['check']:\n state = event['check']['state']\n else:\n state = \"unknown::2.0_event\"\n\n if \"action\" not in event and state.lower() in action_state_mapping:\n event['action'] = action_state_mapping[state.lower()]\n else:\n event['action'] = state\n\n # Mimic 1.4 event history based on 2.0 event history\n if \"history\" in event['check']:\n # save the original history\n event['check']['history_v2'] = deepcopy(event['check']['history'])\n legacy_history = []\n for history in event['check']['history']:\n if isinstance(history['status'], int):\n legacy_history.append(str(history['status']))\n else:\n legacy_history.append(\"3\")\n\n event['check']['history'] = legacy_history\n\n # Setting flag indicating this function has already been called\n event['v2_event_mapped_into_v1'] = True\n\n # return the updated event\n return event\n",
"def read_stdin(self):\n '''\n Read data piped from stdin.\n '''\n try:\n return sys.stdin.read()\n except Exception:\n raise ValueError('Nothing read from stdin')\n",
"def read_event(self, check_result):\n '''\n Convert the piped check result (json) into a global 'event' dict\n '''\n try:\n event = json.loads(check_result)\n event['occurrences'] = event.get('occurrences', 1)\n event['check'] = event.get('check', {})\n event['client'] = event.get('client', {})\n return event\n except Exception:\n raise ValueError('error reading event: ' + check_result)\n",
"def handle(self):\n '''\n Method that should be overwritten to provide handler logic.\n '''\n print(\"ignoring event -- no handler defined.\")\n",
"def filter(self):\n '''\n Filters exit the proccess if the event should not be handled.\n Filtering events is deprecated and will be removed in a future release.\n '''\n\n if self.deprecated_filtering_enabled():\n print('warning: event filtering in sensu-plugin is deprecated,' +\n 'see http://bit.ly/sensu-plugin')\n self.filter_disabled()\n self.filter_silenced()\n self.filter_dependencies()\n\n if self.deprecated_occurrence_filtering():\n print('warning: occurrence filtering in sensu-plugin is' +\n 'deprecated, see http://bit.ly/sensu-plugin')\n self.filter_repeated()\n",
"def get_api_settings(self):\n '''\n Return a dict of API settings derived first from ENV['SENSU_API_URL']\n if set, then Sensu config `api` scope if configured, and finally\n falling back to to ipv4 localhost address on default API port.\n\n return dict\n '''\n\n sensu_api_url = os.environ.get('SENSU_API_URL')\n if sensu_api_url:\n uri = urlparse(sensu_api_url)\n api_settings = {\n 'host': '{0}://{1}'.format(uri.scheme, uri.hostname),\n 'port': uri.port,\n 'user': uri.username,\n 'password': uri.password\n }\n else:\n api_settings = self.settings.get('api', {})\n api_settings['host'] = api_settings.get(\n 'host', '127.0.0.1')\n api_settings['port'] = api_settings.get(\n 'port', 4567)\n\n return api_settings\n"
] | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.read_event | python | def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result) | Convert the piped check result (json) into a global 'event' dict | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L76-L87 | null | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.filter | python | def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated() | Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L95-L111 | [
"def deprecated_filtering_enabled(self):\n '''\n Evaluates whether the event should be processed by any of the\n filter methods in this library. Defaults to true,\n i.e. deprecated filters are run by default.\n\n returns bool\n '''\n return self.event['check'].get('enable_deprecated_filtering', False)\n"
] | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.bail | python | def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0) | Gracefully terminate with message | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L135-L142 | null | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.get_api_settings | python | def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings | Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L144-L169 | null | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.api_request | python | def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req | Query Sensu api for information. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L172-L191 | null | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.event_exists | python | def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200 | Query Sensu API for event. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L199-L206 | [
"def api_request(self, method, path):\n '''\n Query Sensu api for information.\n '''\n if not hasattr(self, 'api_settings'):\n ValueError('api.json settings not found')\n\n if method.lower() == 'get':\n _request = requests.get\n elif method.lower() == 'post':\n _request = requests.post\n\n domain = self.api_settings['host']\n uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)\n if self.api_settings.get('user') and self.api_settings.get('password'):\n auth = (self.api_settings['user'], self.api_settings['password'])\n else:\n auth = ()\n req = _request(uri, auth=auth)\n return req\n"
] | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.filter_silenced | python | def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced') | Determine whether a check is silenced and shouldn't handle. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L216-L229 | [
"def stash_exists(self, path):\n '''\n Query Sensu API for stash data.\n '''\n return self.api_request('get', '/stash' + path).status_code == 200\n"
] | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.filter_dependencies | python | def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists') | Determine whether a check has dependencies. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L231-L250 | [
"def event_exists(self, client, check):\n '''\n Query Sensu API for event.\n '''\n return self.api_request(\n 'get',\n 'events/{}/{}'.format(client, check)\n ).status_code == 200\n"
] | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/handler.py | SensuHandler.filter_repeated | python | def filter_repeated(self):
'''
Determine whether a check is repeating.
'''
defaults = {
'occurrences': 1,
'interval': 30,
'refresh': 1800
}
# Override defaults with anything defined in the settings
if isinstance(self.settings['sensu_plugin'], dict):
defaults.update(self.settings['sensu_plugin'])
occurrences = int(self.event['check'].get(
'occurrences', defaults['occurrences']))
interval = int(self.event['check'].get(
'interval', defaults['interval']))
refresh = int(self.event['check'].get(
'refresh', defaults['refresh']))
if self.event['occurrences'] < occurrences:
self.bail('not enough occurrences')
if (self.event['occurrences'] > occurrences and
self.event['action'] == 'create'):
return
number = int(refresh / interval)
if (number == 0 or
(self.event['occurrences'] - occurrences) % number == 0):
return
self.bail('only handling every ' + str(number) + ' occurrences') | Determine whether a check is repeating. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/handler.py#L252-L285 | [
"def bail(self, msg):\n '''\n Gracefully terminate with message\n '''\n client_name = self.event['client'].get('name', 'error:no-client-name')\n check_name = self.event['check'].get('name', 'error:no-check-name')\n print('{}: {}/{}'.format(msg, client_name, check_name))\n sys.exit(0)\n"
] | class SensuHandler(object):
'''
Base class for Sensu Handlers.
'''
def __init__(self, autorun=True):
if autorun:
self.run()
def run(self):
'''
Set up the event object, global settings and command line
arguments.
'''
# Parse the stdin into a global event object
stdin = self.read_stdin()
self.event = self.read_event(stdin)
# Prepare global settings
self.settings = get_settings()
self.api_settings = self.get_api_settings()
# Prepare command line arguments and
self.parser = argparse.ArgumentParser()
# set up the 2.x to 1.x event mapping argument
self.parser.add_argument("--map-v2-event-into-v1",
action="store_true",
default=False,
dest="v2event")
if hasattr(self, 'setup'):
self.setup()
(self.options, self.remain) = self.parser.parse_known_args()
# map the event if required
if (self.options.v2event or
os.environ.get("SENSU_MAP_V2_EVENT_INTO_V1")):
self.event = map_v2_event_into_v1(self.event)
# Filter (deprecated) and handle
self.filter()
self.handle()
def read_stdin(self):
'''
Read data piped from stdin.
'''
try:
return sys.stdin.read()
except Exception:
raise ValueError('Nothing read from stdin')
def read_event(self, check_result):
'''
Convert the piped check result (json) into a global 'event' dict
'''
try:
event = json.loads(check_result)
event['occurrences'] = event.get('occurrences', 1)
event['check'] = event.get('check', {})
event['client'] = event.get('client', {})
return event
except Exception:
raise ValueError('error reading event: ' + check_result)
def handle(self):
'''
Method that should be overwritten to provide handler logic.
'''
print("ignoring event -- no handler defined.")
def filter(self):
'''
Filters exit the proccess if the event should not be handled.
Filtering events is deprecated and will be removed in a future release.
'''
if self.deprecated_filtering_enabled():
print('warning: event filtering in sensu-plugin is deprecated,' +
'see http://bit.ly/sensu-plugin')
self.filter_disabled()
self.filter_silenced()
self.filter_dependencies()
if self.deprecated_occurrence_filtering():
print('warning: occurrence filtering in sensu-plugin is' +
'deprecated, see http://bit.ly/sensu-plugin')
self.filter_repeated()
def deprecated_filtering_enabled(self):
'''
Evaluates whether the event should be processed by any of the
filter methods in this library. Defaults to true,
i.e. deprecated filters are run by default.
returns bool
'''
return self.event['check'].get('enable_deprecated_filtering', False)
def deprecated_occurrence_filtering(self):
'''
Evaluates whether the event should be processed by the
filter_repeated method. Defaults to true, i.e. filter_repeated
will filter events by default.
returns bool
'''
return self.event['check'].get(
'enable_deprecated_occurrence_filtering', False)
def bail(self, msg):
'''
Gracefully terminate with message
'''
client_name = self.event['client'].get('name', 'error:no-client-name')
check_name = self.event['check'].get('name', 'error:no-check-name')
print('{}: {}/{}'.format(msg, client_name, check_name))
sys.exit(0)
def get_api_settings(self):
'''
Return a dict of API settings derived first from ENV['SENSU_API_URL']
if set, then Sensu config `api` scope if configured, and finally
falling back to to ipv4 localhost address on default API port.
return dict
'''
sensu_api_url = os.environ.get('SENSU_API_URL')
if sensu_api_url:
uri = urlparse(sensu_api_url)
api_settings = {
'host': '{0}://{1}'.format(uri.scheme, uri.hostname),
'port': uri.port,
'user': uri.username,
'password': uri.password
}
else:
api_settings = self.settings.get('api', {})
api_settings['host'] = api_settings.get(
'host', '127.0.0.1')
api_settings['port'] = api_settings.get(
'port', 4567)
return api_settings
# API requests
def api_request(self, method, path):
'''
Query Sensu api for information.
'''
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req
def stash_exists(self, path):
'''
Query Sensu API for stash data.
'''
return self.api_request('get', '/stash' + path).status_code == 200
def event_exists(self, client, check):
'''
Query Sensu API for event.
'''
return self.api_request(
'get',
'events/{}/{}'.format(client, check)
).status_code == 200
# Filters
def filter_disabled(self):
'''
Determine whether a check is disabled and shouldn't handle.
'''
if self.event['check']['alert'] is False:
self.bail('alert disabled')
def filter_silenced(self):
'''
Determine whether a check is silenced and shouldn't handle.
'''
stashes = [
('client', '/silence/{}'.format(self.event['client']['name'])),
('check', '/silence/{}/{}'.format(
self.event['client']['name'],
self.event['check']['name'])),
('check', '/silence/all/{}'.format(self.event['check']['name']))
]
for scope, path in stashes:
if self.stash_exists(path):
self.bail(scope + ' alerts silenced')
def filter_dependencies(self):
'''
Determine whether a check has dependencies.
'''
dependencies = self.event['check'].get('dependencies', None)
if dependencies is None or not isinstance(dependencies, list):
return
for dependency in self.event['check']['dependencies']:
if not str(dependency):
continue
dependency_split = tuple(dependency.split('/'))
# If there's a dependency on a check from another client, then use
# that client name, otherwise assume same client.
if len(dependency_split) == 2:
client, check = dependency_split
else:
client = self.event['client']['name']
check = dependency_split[0]
if self.event_exists(client, check):
self.bail('check dependency event exists')
|
sensu-plugins/sensu-plugin-python | sensu_plugin/utils.py | config_files | python | def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
sensu_v1_config = '/etc/sensu/config.json'
sensu_v1_confd = '/etc/sensu/conf.d'
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = []
filenames = []
if os.path.isfile(sensu_v1_config):
files = [sensu_v1_config]
if os.path.isdir(sensu_v1_confd):
filenames = [f for f in os.listdir(sensu_v1_confd)
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('{}/{}'.format(sensu_v1_confd, filename))
return files | Get list of currently used config files. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/utils.py#L10-L34 | null | '''
Utilities for loading config files, etc.
'''
import os
import json
from copy import deepcopy
def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings
def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass
def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged
def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event
|
sensu-plugins/sensu-plugin-python | sensu_plugin/utils.py | get_settings | python | def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings | Get all currently loaded settings. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/utils.py#L37-L46 | [
"def config_files():\n '''\n Get list of currently used config files.\n '''\n sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')\n sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')\n sensu_v1_config = '/etc/sensu/config.json'\n sensu_v1_confd = '/etc/sensu/conf.d'\n if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):\n with open(sensu_loaded_tempfile, 'r') as tempfile:\n contents = tempfile.read()\n return contents.split(':')\n elif sensu_config_files:\n return sensu_config_files.split(':')\n else:\n files = []\n filenames = []\n if os.path.isfile(sensu_v1_config):\n files = [sensu_v1_config]\n if os.path.isdir(sensu_v1_confd):\n filenames = [f for f in os.listdir(sensu_v1_confd)\n if os.path.splitext(f)[1] == '.json']\n for filename in filenames:\n files.append('{}/{}'.format(sensu_v1_confd, filename))\n return files\n",
"def load_config(filename):\n '''\n Read contents of config file.\n '''\n try:\n with open(filename, 'r') as config_file:\n return json.loads(config_file.read())\n except IOError:\n pass\n",
"def deep_merge(dict_one, dict_two):\n '''\n Deep merge two dicts.\n '''\n merged = dict_one.copy()\n for key, value in dict_two.items():\n # value is equivalent to dict_two[key]\n if (key in dict_one and\n isinstance(dict_one[key], dict) and\n isinstance(value, dict)):\n merged[key] = deep_merge(dict_one[key], value)\n elif (key in dict_one and\n isinstance(dict_one[key], list) and\n isinstance(value, list)):\n merged[key] = list(set(dict_one[key] + value))\n else:\n merged[key] = value\n return merged\n"
] | '''
Utilities for loading config files, etc.
'''
import os
import json
from copy import deepcopy
def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
sensu_v1_config = '/etc/sensu/config.json'
sensu_v1_confd = '/etc/sensu/conf.d'
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = []
filenames = []
if os.path.isfile(sensu_v1_config):
files = [sensu_v1_config]
if os.path.isdir(sensu_v1_confd):
filenames = [f for f in os.listdir(sensu_v1_confd)
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('{}/{}'.format(sensu_v1_confd, filename))
return files
def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass
def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged
def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event
|
sensu-plugins/sensu-plugin-python | sensu_plugin/utils.py | load_config | python | def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass | Read contents of config file. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/utils.py#L49-L57 | null | '''
Utilities for loading config files, etc.
'''
import os
import json
from copy import deepcopy
def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
sensu_v1_config = '/etc/sensu/config.json'
sensu_v1_confd = '/etc/sensu/conf.d'
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = []
filenames = []
if os.path.isfile(sensu_v1_config):
files = [sensu_v1_config]
if os.path.isdir(sensu_v1_confd):
filenames = [f for f in os.listdir(sensu_v1_confd)
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('{}/{}'.format(sensu_v1_confd, filename))
return files
def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings
def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged
def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event
|
sensu-plugins/sensu-plugin-python | sensu_plugin/utils.py | deep_merge | python | def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged | Deep merge two dicts. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/utils.py#L60-L77 | [
"def deep_merge(dict_one, dict_two):\n '''\n Deep merge two dicts.\n '''\n merged = dict_one.copy()\n for key, value in dict_two.items():\n # value is equivalent to dict_two[key]\n if (key in dict_one and\n isinstance(dict_one[key], dict) and\n isinstance(value, dict)):\n merged[key] = deep_merge(dict_one[key], value)\n elif (key in dict_one and\n isinstance(dict_one[key], list) and\n isinstance(value, list)):\n merged[key] = list(set(dict_one[key] + value))\n else:\n merged[key] = value\n return merged\n"
] | '''
Utilities for loading config files, etc.
'''
import os
import json
from copy import deepcopy
def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
sensu_v1_config = '/etc/sensu/config.json'
sensu_v1_confd = '/etc/sensu/conf.d'
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = []
filenames = []
if os.path.isfile(sensu_v1_config):
files = [sensu_v1_config]
if os.path.isdir(sensu_v1_confd):
filenames = [f for f in os.listdir(sensu_v1_confd)
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('{}/{}'.format(sensu_v1_confd, filename))
return files
def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings
def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass
def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event
|
sensu-plugins/sensu-plugin-python | sensu_plugin/utils.py | map_v2_event_into_v1 | python | def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event | Helper method to convert Sensu 2.x event into Sensu 1.x event. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/utils.py#L80-L139 | null | '''
Utilities for loading config files, etc.
'''
import os
import json
from copy import deepcopy
def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
sensu_v1_config = '/etc/sensu/config.json'
sensu_v1_confd = '/etc/sensu/conf.d'
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = []
filenames = []
if os.path.isfile(sensu_v1_config):
files = [sensu_v1_config]
if os.path.isdir(sensu_v1_confd):
filenames = [f for f in os.listdir(sensu_v1_confd)
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('{}/{}'.format(sensu_v1_confd, filename))
return files
def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings
def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass
def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged
|
sensu-plugins/sensu-plugin-python | sensu_plugin/check.py | SensuPluginCheck.check_name | python | def check_name(self, name=None):
'''
Checks the plugin name and sets it accordingly.
Uses name if specified, class name if not set.
'''
if name:
self.plugin_info['check_name'] = name
if self.plugin_info['check_name'] is not None:
return self.plugin_info['check_name']
return self.__class__.__name__ | Checks the plugin name and sets it accordingly.
Uses name if specified, class name if not set. | train | https://github.com/sensu-plugins/sensu-plugin-python/blob/bd43a5ea4d191e5e63494c8679aab02ac072d9ed/sensu_plugin/check.py#L11-L22 | null | class SensuPluginCheck(SensuPlugin):
'''
Class that inherits from SensuPlugin.
'''
def message(self, *m):
self.plugin_info['message'] = m
def output(self, args):
msg = ''
if args is None or (args[0] is None and len(args) == 1):
args = self.plugin_info['message']
if args is not None and not (args[0] is None and len(args) == 1):
msg = ": {0}".format(' '.join(str(message) for message in args))
print("{0} {1}{2}".format(self.check_name(),
self.plugin_info['status'], msg))
|
aio-libs/aiomcache | aiomcache/pool.py | MemcachePool.clear | python | def clear(self):
while not self._pool.empty():
conn = yield from self._pool.get()
self._do_close(conn) | Clear pool connections. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/pool.py#L23-L27 | null | class MemcachePool:
def __init__(self, host, port, *, minsize, maxsize, loop=None):
loop = loop if loop is not None else asyncio.get_event_loop()
self._host = host
self._port = port
self._minsize = minsize
self._maxsize = maxsize
self._loop = loop
self._pool = asyncio.Queue(loop=loop)
self._in_use = set()
@asyncio.coroutine
def _do_close(self, conn):
conn.reader.feed_eof()
conn.writer.close()
@asyncio.coroutine
def acquire(self):
"""Acquire connection from the pool, or spawn new one
if pool maxsize permits.
:return: ``tuple`` (reader, writer)
"""
while self.size() == 0 or self.size() < self._minsize:
_conn = yield from self._create_new_conn()
if _conn is None:
break
self._pool.put_nowait(_conn)
conn = None
while not conn:
_conn = yield from self._pool.get()
if _conn.reader.at_eof() or _conn.reader.exception():
self._do_close(_conn)
conn = yield from self._create_new_conn()
else:
conn = _conn
self._in_use.add(conn)
return conn
def release(self, conn):
"""Releases connection back to the pool.
:param conn: ``namedtuple`` (reader, writer)
"""
self._in_use.remove(conn)
if conn.reader.at_eof() or conn.reader.exception():
self._do_close(conn)
else:
self._pool.put_nowait(conn)
@asyncio.coroutine
def _create_new_conn(self):
if self.size() < self._maxsize:
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
if self.size() < self._maxsize:
return _connection(reader, writer)
else:
reader.feed_eof()
writer.close()
return None
else:
return None
def size(self):
return self._pool.qsize() + len(self._in_use)
|
aio-libs/aiomcache | aiomcache/pool.py | MemcachePool.acquire | python | def acquire(self):
while self.size() == 0 or self.size() < self._minsize:
_conn = yield from self._create_new_conn()
if _conn is None:
break
self._pool.put_nowait(_conn)
conn = None
while not conn:
_conn = yield from self._pool.get()
if _conn.reader.at_eof() or _conn.reader.exception():
self._do_close(_conn)
conn = yield from self._create_new_conn()
else:
conn = _conn
self._in_use.add(conn)
return conn | Acquire connection from the pool, or spawn new one
if pool maxsize permits.
:return: ``tuple`` (reader, writer) | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/pool.py#L34-L56 | [
"def size(self):\n return self._pool.qsize() + len(self._in_use)\n"
] | class MemcachePool:
def __init__(self, host, port, *, minsize, maxsize, loop=None):
loop = loop if loop is not None else asyncio.get_event_loop()
self._host = host
self._port = port
self._minsize = minsize
self._maxsize = maxsize
self._loop = loop
self._pool = asyncio.Queue(loop=loop)
self._in_use = set()
@asyncio.coroutine
def clear(self):
"""Clear pool connections."""
while not self._pool.empty():
conn = yield from self._pool.get()
self._do_close(conn)
def _do_close(self, conn):
conn.reader.feed_eof()
conn.writer.close()
@asyncio.coroutine
def release(self, conn):
"""Releases connection back to the pool.
:param conn: ``namedtuple`` (reader, writer)
"""
self._in_use.remove(conn)
if conn.reader.at_eof() or conn.reader.exception():
self._do_close(conn)
else:
self._pool.put_nowait(conn)
@asyncio.coroutine
def _create_new_conn(self):
if self.size() < self._maxsize:
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
if self.size() < self._maxsize:
return _connection(reader, writer)
else:
reader.feed_eof()
writer.close()
return None
else:
return None
def size(self):
return self._pool.qsize() + len(self._in_use)
|
aio-libs/aiomcache | aiomcache/pool.py | MemcachePool.release | python | def release(self, conn):
self._in_use.remove(conn)
if conn.reader.at_eof() or conn.reader.exception():
self._do_close(conn)
else:
self._pool.put_nowait(conn) | Releases connection back to the pool.
:param conn: ``namedtuple`` (reader, writer) | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/pool.py#L58-L67 | [
"def _do_close(self, conn):\n conn.reader.feed_eof()\n conn.writer.close()\n"
] | class MemcachePool:
def __init__(self, host, port, *, minsize, maxsize, loop=None):
loop = loop if loop is not None else asyncio.get_event_loop()
self._host = host
self._port = port
self._minsize = minsize
self._maxsize = maxsize
self._loop = loop
self._pool = asyncio.Queue(loop=loop)
self._in_use = set()
@asyncio.coroutine
def clear(self):
"""Clear pool connections."""
while not self._pool.empty():
conn = yield from self._pool.get()
self._do_close(conn)
def _do_close(self, conn):
conn.reader.feed_eof()
conn.writer.close()
@asyncio.coroutine
def acquire(self):
"""Acquire connection from the pool, or spawn new one
if pool maxsize permits.
:return: ``tuple`` (reader, writer)
"""
while self.size() == 0 or self.size() < self._minsize:
_conn = yield from self._create_new_conn()
if _conn is None:
break
self._pool.put_nowait(_conn)
conn = None
while not conn:
_conn = yield from self._pool.get()
if _conn.reader.at_eof() or _conn.reader.exception():
self._do_close(_conn)
conn = yield from self._create_new_conn()
else:
conn = _conn
self._in_use.add(conn)
return conn
@asyncio.coroutine
def _create_new_conn(self):
if self.size() < self._maxsize:
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
if self.size() < self._maxsize:
return _connection(reader, writer)
else:
reader.feed_eof()
writer.close()
return None
else:
return None
def size(self):
return self._pool.qsize() + len(self._in_use)
|
aio-libs/aiomcache | aiomcache/client.py | Client.delete | python | def delete(self, conn, key):
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED | Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L125-L139 | [
"def _validate_key(self, key):\n if not isinstance(key, bytes): # avoid bugs subtle and otherwise\n raise ValidationException('key must be bytes', key)\n\n m = self._valid_key_re.match(key)\n if m:\n # in python re, $ matches either end of line or right before\n # \\n at end of line. We can't allow latter case, so\n # making sure length matches is simplest way to detect\n if len(m.group(0)) != len(key):\n raise ValidationException('trailing newline', key)\n else:\n raise ValidationException('invalid key', key)\n\n return key\n"
] | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.get | python | def get(self, conn, key, default=None):
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default) | Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L142-L150 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.gets | python | def gets(self, conn, key, default=None):
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key) | Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L153-L162 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.multi_get | python | def multi_get(self, conn, *keys):
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys) | Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L165-L174 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.stats | python | def stats(self, conn, args=None):
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result | Runs a stats command on the server. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L177-L204 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.set | python | def set(self, conn, key, value, exptime=0):
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp | Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L240-L253 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.cas | python | def cas(self, conn, key, value, cas_token, exptime=0):
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp | Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L256-L272 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.add | python | def add(self, conn, key, value, exptime=0):
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime)) | Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L275-L287 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.replace | python | def replace(self, conn, key, value, exptime=0):
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime)) | Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L290-L302 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.append | python | def append(self, conn, key, value, exptime=0):
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime)) | Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L305-L316 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.prepend | python | def prepend(self, conn, key, value, exptime=0):
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime)) | Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L319-L330 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.incr | python | def incr(self, conn, key, increment=1):
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp | Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L343-L359 | [
"def _validate_key(self, key):\n if not isinstance(key, bytes): # avoid bugs subtle and otherwise\n raise ValidationException('key must be bytes', key)\n\n m = self._valid_key_re.match(key)\n if m:\n # in python re, $ matches either end of line or right before\n # \\n at end of line. We can't allow latter case, so\n # making sure length matches is simplest way to detect\n if len(m.group(0)) != len(key):\n raise ValidationException('trailing newline', key)\n else:\n raise ValidationException('invalid key', key)\n\n return key\n"
] | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.decr | python | def decr(self, conn, key, decrement=1):
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp | Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L362-L378 | [
"def _validate_key(self, key):\n if not isinstance(key, bytes): # avoid bugs subtle and otherwise\n raise ValidationException('key must be bytes', key)\n\n m = self._valid_key_re.match(key)\n if m:\n # in python re, $ matches either end of line or right before\n # \\n at end of line. We can't allow latter case, so\n # making sure length matches is simplest way to detect\n if len(m.group(0)) != len(key):\n raise ValidationException('trailing newline', key)\n else:\n raise ValidationException('invalid key', key)\n\n return key\n"
] | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.touch | python | def touch(self, conn, key, exptime):
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED | The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L381-L397 | [
"def _validate_key(self, key):\n if not isinstance(key, bytes): # avoid bugs subtle and otherwise\n raise ValidationException('key must be bytes', key)\n\n m = self._valid_key_re.match(key)\n if m:\n # in python re, $ matches either end of line or right before\n # \\n at end of line. We can't allow latter case, so\n # making sure length matches is simplest way to detect\n if len(m.group(0)) != len(key):\n raise ValidationException('trailing newline', key)\n else:\n raise ValidationException('invalid key', key)\n\n return key\n"
] | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.version | python | def version(self, conn):
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number | Current version of the server.
:return: ``bytes``, memcached version for current the server. | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L400-L412 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
@acquire
def flush_all(self, conn):
"""Its effect is to invalidate all existing items immediately"""
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response)
|
aio-libs/aiomcache | aiomcache/client.py | Client.flush_all | python | def flush_all(self, conn):
command = b'flush_all\r\n'
response = yield from self._execute_simple_command(
conn, command)
if const.OK != response:
raise ClientException('Memcached flush_all failed', response) | Its effect is to invalidate all existing items immediately | train | https://github.com/aio-libs/aiomcache/blob/75d44b201aea91bc2856b10940922d5ebfbfcd7b/aiomcache/client.py#L415-L422 | null | class Client(object):
def __init__(self, host, port=11211, *,
pool_size=2, pool_minsize=None, loop=None):
if not pool_minsize:
pool_minsize = pool_size
self._pool = MemcachePool(
host, port, minsize=pool_minsize, maxsize=pool_size, loop=loop)
# key supports ascii sans space and control chars
# \x21 is !, right after space, and \x7e is -, right before DEL
# also 1 <= len <= 250 as per the spec
_valid_key_re = re.compile(b'^[\x21-\x7e]{1,250}$')
def _validate_key(self, key):
if not isinstance(key, bytes): # avoid bugs subtle and otherwise
raise ValidationException('key must be bytes', key)
m = self._valid_key_re.match(key)
if m:
# in python re, $ matches either end of line or right before
# \n at end of line. We can't allow latter case, so
# making sure length matches is simplest way to detect
if len(m.group(0)) != len(key):
raise ValidationException('trailing newline', key)
else:
raise ValidationException('invalid key', key)
return key
@asyncio.coroutine
def _execute_simple_command(self, conn, raw_command):
response, line = bytearray(), b''
conn.writer.write(raw_command)
yield from conn.writer.drain()
while not line.endswith(b'\r\n'):
line = yield from conn.reader.readline()
response.extend(line)
return response[:-2]
@asyncio.coroutine
def close(self):
"""Closes the sockets if its open."""
yield from self._pool.clear()
@asyncio.coroutine
def _multi_get(self, conn, *keys, with_cas=True):
# req - get <key> [<key> ...]\r\n
# resp - VALUE <key> <flags> <bytes> [<cas unique>]\r\n
# <data block>\r\n (if exists)
# [...]
# END\r\n
if not keys:
return {}, {}
[self._validate_key(key) for key in keys]
if len(set(keys)) != len(keys):
raise ClientException('duplicate keys passed to multi_get')
cmd = b'gets ' if with_cas else b'get '
conn.writer.write(cmd + b' '.join(keys) + b'\r\n')
received = {}
cas_tokens = {}
line = yield from conn.reader.readline()
while line != b'END\r\n':
terms = line.split()
if terms[0] == b'VALUE': # exists
key = terms[1]
flags = int(terms[2])
length = int(terms[3])
if flags != 0:
raise ClientException('received non zero flags')
val = (yield from conn.reader.readexactly(length+2))[:-2]
if key in received:
raise ClientException('duplicate results from server')
received[key] = val
cas_tokens[key] = int(terms[4]) if with_cas else None
else:
raise ClientException('get failed', line)
line = yield from conn.reader.readline()
if len(received) > len(keys):
raise ClientException('received too many responses')
return received, cas_tokens
@acquire
def delete(self, conn, key):
"""Deletes a key/value pair from the server.
:param key: is the key to delete.
:return: True if case values was deleted or False to indicate
that the item with this key was not found.
"""
assert self._validate_key(key)
command = b'delete ' + key + b'\r\n'
response = yield from self._execute_simple_command(conn, command)
if response not in (const.DELETED, const.NOT_FOUND):
raise ClientException('Memcached delete failed', response)
return response == const.DELETED
@acquire
def get(self, conn, key, default=None):
"""Gets a single value from the server.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, is the data for this specified key.
"""
values, _ = yield from self._multi_get(conn, key)
return values.get(key, default)
@acquire
def gets(self, conn, key, default=None):
"""Gets a single value from the server together with the cas token.
:param key: ``bytes``, is the key for the item being fetched
:param default: default value if there is no value.
:return: ``bytes``, ``bytes tuple with the value and the cas
"""
values, cas_tokens = yield from self._multi_get(
conn, key, with_cas=True)
return values.get(key, default), cas_tokens.get(key)
@acquire
def multi_get(self, conn, *keys):
"""Takes a list of keys and returns a list of values.
:param keys: ``list`` keys for the item being fetched.
:return: ``list`` of values for the specified keys.
:raises:``ValidationException``, ``ClientException``,
and socket errors
"""
values, _ = yield from self._multi_get(conn, *keys)
return tuple(values.get(key) for key in keys)
@acquire
def stats(self, conn, args=None):
"""Runs a stats command on the server."""
# req - stats [additional args]\r\n
# resp - STAT <name> <value>\r\n (one per result)
# END\r\n
if args is None:
args = b''
conn.writer.write(b''.join((b'stats ', args, b'\r\n')))
result = {}
resp = yield from conn.reader.readline()
while resp != b'END\r\n':
terms = resp.split()
if len(terms) == 2 and terms[0] == b'STAT':
result[terms[1]] = None
elif len(terms) == 3 and terms[0] == b'STAT':
result[terms[1]] = terms[2]
elif len(terms) >= 3 and terms[0] == b'STAT':
result[terms[1]] = b' '.join(terms[2:])
else:
raise ClientException('stats failed', resp)
resp = yield from conn.reader.readline()
return result
@asyncio.coroutine
def _storage_command(self, conn, command, key, value,
flags=0, exptime=0, cas=None):
# req - set <key> <flags> <exptime> <bytes> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# req - set <key> <flags> <exptime> <bytes> <cas> [noreply]\r\n
# <data block>\r\n
# resp - STORED\r\n (or others)
# typically, if val is > 1024**2 bytes server returns:
# SERVER_ERROR object too large for cache\r\n
# however custom-compiled memcached can have different limit
# so, we'll let the server decide what's too much
assert self._validate_key(key)
if not isinstance(exptime, int):
raise ValidationException('exptime not int', exptime)
elif exptime < 0:
raise ValidationException('exptime negative', exptime)
args = [str(a).encode('utf-8') for a in (flags, exptime, len(value))]
_cmd = b' '.join([command, key] + args)
if cas:
_cmd += b' ' + str(cas).encode('utf-8')
cmd = _cmd + b'\r\n' + value + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (
const.STORED, const.NOT_STORED, const.EXISTS, const.NOT_FOUND):
raise ClientException('stats {} failed'.format(command), resp)
return resp == const.STORED
@acquire
def set(self, conn, key, value, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'set', key, value, flags, exptime)
return resp
@acquire
def cas(self, conn, key, value, cas_token, exptime=0):
"""Sets a key to a value on the server
with an optional exptime (0 means don't auto-expire)
only if value hasn't change from first retrieval
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int``, is expiration time. If it's 0, the
item never expires.
:param cas_token: ``int``, unique cas token retrieve from previous
``gets``
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
resp = yield from self._storage_command(
conn, b'cas', key, value, flags, exptime, cas=cas_token)
return resp
@acquire
def add(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *doesn't* already
hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'add', key, value, flags, exptime))
@acquire
def replace(self, conn, key, value, exptime=0):
"""Store this data, but only if the server *does*
already hold data for this key.
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'replace', key, value, flags, exptime))
@acquire
def append(self, conn, key, value, exptime=0):
"""Add data to an existing key after existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'append', key, value, flags, exptime))
@acquire
def prepend(self, conn, key, value, exptime=0):
"""Add data to an existing key before existing data
:param key: ``bytes``, is the key of the item.
:param value: ``bytes``, data to store.
:param exptime: ``int`` is expiration time. If it's 0, the
item never expires.
:return: ``bool``, True in case of success.
"""
flags = 0 # TODO: fix when exception removed
return (yield from self._storage_command(
conn, b'prepend', key, value, flags, exptime))
@asyncio.coroutine
def _incr_decr(self, conn, command, key, delta):
delta_byte = str(delta).encode('utf-8')
cmd = b' '.join([command, key, delta_byte]) + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if not resp.isdigit() or resp == const.NOT_FOUND:
raise ClientException(
'Memcached {} command failed'.format(str(command)), resp)
return int(resp) if resp.isdigit() else None
@acquire
def incr(self, conn, key, increment=1):
"""Command is used to change data for some item in-place,
incrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param increment: ``int``, is the amount by which the client
wants to increase the item.
:return: ``int``, new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'incr', key, increment)
return resp
@acquire
def decr(self, conn, key, decrement=1):
"""Command is used to change data for some item in-place,
decrementing it. The data for the item is treated as decimal
representation of a 64-bit unsigned integer.
:param key: ``bytes``, is the key of the item the client wishes
to change
:param decrement: ``int``, is the amount by which the client
wants to decrease the item.
:return: ``int`` new value of the item's data,
after the increment or ``None`` to indicate the item with
this value was not found
"""
assert self._validate_key(key)
resp = yield from self._incr_decr(
conn, b'decr', key, decrement)
return resp
@acquire
def touch(self, conn, key, exptime):
"""The command is used to update the expiration time of
an existing item without fetching it.
:param key: ``bytes``, is the key to update expiration time
:param exptime: ``int``, is expiration time. This replaces the existing
expiration time.
:return: ``bool``, True in case of success.
"""
assert self._validate_key(key)
_cmd = b' '.join([b'touch', key, str(exptime).encode('utf-8')])
cmd = _cmd + b'\r\n'
resp = yield from self._execute_simple_command(conn, cmd)
if resp not in (const.TOUCHED, const.NOT_FOUND):
raise ClientException('Memcached touch failed', resp)
return resp == const.TOUCHED
@acquire
def version(self, conn):
"""Current version of the server.
:return: ``bytes``, memcached version for current the server.
"""
command = b'version\r\n'
response = yield from self._execute_simple_command(
conn, command)
if not response.startswith(const.VERSION):
raise ClientException('Memcached version failed', response)
version, number = response.split()
return number
@acquire
|
jacobtomlinson/datapoint-python | datapoint/__init__.py | connection | python | def connection(profile_name='default', api_key=None):
if api_key is None:
profile_fname = datapoint.profile.API_profile_fname(profile_name)
if not os.path.exists(profile_fname):
raise ValueError('Profile not found in {}. Please install your API \n'
'key with datapoint.profile.install_API_key('
'"<YOUR-KEY>")'.format(profile_fname))
with open(profile_fname) as fh:
api_key = fh.readlines()
return Manager(api_key=api_key) | Connect to DataPoint with the given API key profile name. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/__init__.py#L12-L22 | [
"def API_profile_fname(profile_name='default'):\n \"\"\"Get the API key profile filename.\"\"\"\n return os.path.join(appdirs.user_data_dir('DataPoint'),\n profile_name + '.key')\n"
] | """Datapoint API to retrieve Met Office data"""
__author__ = "Jacob Tomlinson"
__author_email__ = "jacob.tomlinson@metoffice.gov.uk"
import os.path
from datapoint.Manager import Manager
import datapoint.profile
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
jacobtomlinson/datapoint-python | datapoint/Timestep.py | Timestep.elements | python | def elements(self):
elements = []
for el in ct:
if isinstance(el[1], datapoint.Element.Element):
elements.append(el[1])
return elements | Return a list of the elements which are not None | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Timestep.py#L25-L34 | null | class Timestep(object):
def __init__(self, api_key=""):
self.api_key = api_key
self.name = None
self.date = None
self.weather = None
self.temperature = None
self.feels_like_temperature = None
self.wind_speed = None
self.wind_direction = None
self.wind_gust = None
self.visibility = None
self.uv = None
self.precipitation = None
self.humidity = None
self.pressure = None
self.pressure_tendency = None
self.dew_point = None
def __iter__(self):
for attr, value in self.__dict__.items():
yield attr, value
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.__retry_session | python | def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session | Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L109-L131 | null | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.__call_api | python | def __call_api(self, path, params=None, api_url=FORECAST_URL):
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data | Call the datapoint api using the requests module | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L133-L165 | [
"def __retry_session(self, retries=10, backoff_factor=0.3,\n status_forcelist=(500, 502, 504),\n session=None):\n \"\"\"\n Retry the connection using requests if it fails. Use this as a wrapper\n to request from datapoint\n \"\"\"\n\n # requests.Session allows finer control, which is needed to use the\n # retrying code\n the_session = session or requests.Session()\n\n # The Retry object manages the actual retrying\n retry = Retry(total=retries, read=retries, connect=retries,\n backoff_factor=backoff_factor,\n status_forcelist=status_forcelist)\n\n adapter = HTTPAdapter(max_retries=retry)\n\n the_session.mount('http://', adapter)\n the_session.mount('https://', adapter)\n\n return the_session\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager._distance_between_coords | python | def _distance_between_coords(self, lon1, lat1, lon2, lat2):
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav | Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L167-L186 | null | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager._get_wx_units | python | def _get_wx_units(self, params, name):
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units | Give the Wx array returned from datapoint and an element
name and return the units for that element. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L188-L197 | null | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager._visibility_to_text | python | def _visibility_to_text(self, distance):
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX' | Convert observed visibility in metres to text used in forecast | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L207-L228 | null | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_forecast_sites | python | def get_forecast_sites(self):
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites | This function returns a list of Site object. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L239-L278 | [
"def __call_api(self, path, params=None, api_url=FORECAST_URL):\n \"\"\"\n Call the datapoint api using the requests module\n\n \"\"\"\n if not params:\n params = dict()\n payload = {'key': self.api_key}\n payload.update(params)\n url = \"%s/%s\" % (api_url, path)\n\n # Add a timeout to the request.\n # The value of 1 second is based on attempting 100 connections to\n # datapoint and taking ten times the mean connection time (rounded up).\n # Could expose to users in the functions which need to call the api.\n #req = requests.get(url, params=payload, timeout=1)\n # The wrapper function __retry_session returns a requests.Session\n # object. This has a .get() function like requests.get(), so the use\n # doesn't change here.\n\n sess = self.__retry_session()\n req = sess.get(url, params=payload, timeout=1)\n\n try:\n data = req.json()\n except ValueError:\n raise APIException(\"DataPoint has not returned any data, this could be due to an incorrect API key\")\n self.call_response = data\n if req.status_code != 200:\n msg = [data[m] for m in (\"message\", \"error_message\", \"status\") \\\n if m in data][0]\n raise Exception(msg)\n return data\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_nearest_site | python | def get_nearest_site(self, latitude=None, longitude=None):
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude) | Deprecated. This function returns nearest Site object to the specified
coordinates. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L280-L288 | [
"def get_nearest_forecast_site(self, latitude=None, longitude=None):\n \"\"\"\n This function returns the nearest Site object to the specified\n coordinates.\n \"\"\"\n if longitude is None:\n print('ERROR: No latitude given.')\n return False\n\n if latitude is None:\n print('ERROR: No latitude given.')\n return False\n\n nearest = False\n distance = None\n sites = self.get_forecast_sites()\n # Sometimes there is a TypeError exception here: sites is None\n # So, sometimes self.get_all_sites() has returned None.\n for site in sites:\n new_distance = \\\n self._distance_between_coords(\n float(site.longitude),\n float(site.latitude),\n float(longitude),\n float(latitude))\n\n if ((distance == None) or (new_distance < distance)):\n distance = new_distance\n nearest = site\n\n # If the nearest site is more than 30km away, raise an error\n\n if distance > 30:\n raise APIException(\"There is no site within 30km.\")\n\n return nearest\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_nearest_forecast_site | python | def get_nearest_forecast_site(self, latitude=None, longitude=None):
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest | This function returns the nearest Site object to the specified
coordinates. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L290-L325 | [
"def _distance_between_coords(self, lon1, lat1, lon2, lat2):\n \"\"\"\n Calculate the great circle distance between two points\n on the earth (specified in decimal degrees).\n Haversine formula states that:\n\n d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +\n cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))\n\n where r is the radius of the sphere. This assumes the earth is spherical.\n \"\"\"\n\n # Convert the coordinates of the points to radians.\n lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])\n r = 6371\n\n d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \\\n cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))\n\n return d_hav\n",
"def get_forecast_sites(self):\n \"\"\"\n This function returns a list of Site object.\n \"\"\"\n\n time_now = time()\n if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:\n\n data = self.__call_api(\"sitelist/\")\n sites = list()\n for jsoned in data['Locations']['Location']:\n site = Site()\n site.name = jsoned['name']\n site.id = jsoned['id']\n site.latitude = jsoned['latitude']\n site.longitude = jsoned['longitude']\n\n if 'region' in jsoned:\n site.region = jsoned['region']\n\n if 'elevation' in jsoned:\n site.elevation = jsoned['elevation']\n\n if 'unitaryAuthArea' in jsoned:\n site.unitaryAuthArea = jsoned['unitaryAuthArea']\n\n if 'nationalPark' in jsoned:\n site.nationalPark = jsoned['nationalPark']\n\n site.api_key = self.api_key\n\n sites.append(site)\n self.forecast_sites_last_request = sites\n # Only set self.sites_last_update once self.sites_last_request has\n # been set\n self.forecast_sites_last_update = time_now\n else:\n sites = self.forecast_sites_last_request\n\n return sites\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_forecast_for_site | python | def get_forecast_for_site(self, site_id, frequency="daily"):
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast | Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC) | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L327-L430 | [
"def __call_api(self, path, params=None, api_url=FORECAST_URL):\n \"\"\"\n Call the datapoint api using the requests module\n\n \"\"\"\n if not params:\n params = dict()\n payload = {'key': self.api_key}\n payload.update(params)\n url = \"%s/%s\" % (api_url, path)\n\n # Add a timeout to the request.\n # The value of 1 second is based on attempting 100 connections to\n # datapoint and taking ten times the mean connection time (rounded up).\n # Could expose to users in the functions which need to call the api.\n #req = requests.get(url, params=payload, timeout=1)\n # The wrapper function __retry_session returns a requests.Session\n # object. This has a .get() function like requests.get(), so the use\n # doesn't change here.\n\n sess = self.__retry_session()\n req = sess.get(url, params=payload, timeout=1)\n\n try:\n data = req.json()\n except ValueError:\n raise APIException(\"DataPoint has not returned any data, this could be due to an incorrect API key\")\n self.call_response = data\n if req.status_code != 200:\n msg = [data[m] for m in (\"message\", \"error_message\", \"status\") \\\n if m in data][0]\n raise Exception(msg)\n return data\n",
"def _get_wx_units(self, params, name):\n \"\"\"\n Give the Wx array returned from datapoint and an element\n name and return the units for that element.\n \"\"\"\n units = \"\"\n for param in params:\n if str(name) == str(param['name']):\n units = param['units']\n return units\n",
"def _weather_to_text(self, code):\n if not isinstance(code, (int, long)):\n raise ValueError(\"Weather code must be an integer not\", type(code))\n if code < 0 or code > 30:\n raise ValueError(\"Weather code outof bounds, should be 0-30\")\n text = WEATHER_CODES[str(code)]\n return text\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_observation_sites | python | def get_observation_sites(self):
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites | This function returns a list of Site objects for which observations are available. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L433-L467 | [
"def __call_api(self, path, params=None, api_url=FORECAST_URL):\n \"\"\"\n Call the datapoint api using the requests module\n\n \"\"\"\n if not params:\n params = dict()\n payload = {'key': self.api_key}\n payload.update(params)\n url = \"%s/%s\" % (api_url, path)\n\n # Add a timeout to the request.\n # The value of 1 second is based on attempting 100 connections to\n # datapoint and taking ten times the mean connection time (rounded up).\n # Could expose to users in the functions which need to call the api.\n #req = requests.get(url, params=payload, timeout=1)\n # The wrapper function __retry_session returns a requests.Session\n # object. This has a .get() function like requests.get(), so the use\n # doesn't change here.\n\n sess = self.__retry_session()\n req = sess.get(url, params=payload, timeout=1)\n\n try:\n data = req.json()\n except ValueError:\n raise APIException(\"DataPoint has not returned any data, this could be due to an incorrect API key\")\n self.call_response = data\n if req.status_code != 200:\n msg = [data[m] for m in (\"message\", \"error_message\", \"status\") \\\n if m in data][0]\n raise Exception(msg)\n return data\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_nearest_observation_site | python | def get_nearest_observation_site(self, latitude=None, longitude=None):
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest | This function returns the nearest Site to the specified
coordinates that supports observations | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L469-L501 | [
"def _distance_between_coords(self, lon1, lat1, lon2, lat2):\n \"\"\"\n Calculate the great circle distance between two points\n on the earth (specified in decimal degrees).\n Haversine formula states that:\n\n d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +\n cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))\n\n where r is the radius of the sphere. This assumes the earth is spherical.\n \"\"\"\n\n # Convert the coordinates of the points to radians.\n lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])\n r = 6371\n\n d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \\\n cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))\n\n return d_hav\n",
"def get_observation_sites(self):\n \"\"\"\n This function returns a list of Site objects for which observations are available.\n \"\"\"\n if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:\n self.observation_sites_last_update = time()\n data = self.__call_api(\"sitelist/\", None, OBSERVATION_URL)\n sites = list()\n for jsoned in data['Locations']['Location']:\n site = Site()\n site.name = jsoned['name']\n site.id = jsoned['id']\n site.latitude = jsoned['latitude']\n site.longitude = jsoned['longitude']\n\n if 'region' in jsoned:\n site.region = jsoned['region']\n\n if 'elevation' in jsoned:\n site.elevation = jsoned['elevation']\n\n if 'unitaryAuthArea' in jsoned:\n site.unitaryAuthArea = jsoned['unitaryAuthArea']\n\n if 'nationalPark' in jsoned:\n site.nationalPark = jsoned['nationalPark']\n\n site.api_key = self.api_key\n\n sites.append(site)\n self.observation_sites_last_request = sites\n else:\n sites = observation_self.sites_last_request\n\n return sites\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_observations_for_site(self, site_id, frequency='hourly'):
"""
Get observations for the provided site
Returns hourly observations for the previous 24 hours
"""
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation
|
jacobtomlinson/datapoint-python | datapoint/Manager.py | Manager.get_observations_for_site | python | def get_observations_for_site(self, site_id, frequency='hourly'):
data = self.__call_api(site_id,{"res":frequency}, OBSERVATION_URL)
params = data['SiteRep']['Wx']['Param']
observation = Observation()
observation.data_date = data['SiteRep']['DV']['dataDate']
observation.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
observation.continent = data['SiteRep']['DV']['Location']['continent']
observation.country = data['SiteRep']['DV']['Location']['country']
observation.name = data['SiteRep']['DV']['Location']['name']
observation.longitude = data['SiteRep']['DV']['Location']['lon']
observation.latitude = data['SiteRep']['DV']['Location']['lat']
observation.id = data['SiteRep']['DV']['Location']['i']
observation.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
# If the day only has 1 timestep, put it into a list by itself so it can be treated
# the same as a day with multiple timesteps
if type(day['Rep']) is not list:
day['Rep'] = [day['Rep']]
for timestep in day['Rep']:
# As stated in
# https://www.metoffice.gov.uk/datapoint/product/uk-hourly-site-specific-observations,
# some sites do not have all parameters available for
# observations. The documentation does not state which
# fields may be absent. If the parameter is not available,
# nothing is returned from the API. If this happens the
# value of the element is set to 'Not reported'. This may
# change to the element not being assigned to the timestep.
new_timestep = Timestep()
# Assume the '$' field is always present.
new_timestep.name = int(timestep['$'])
cur_elements = ELEMENTS['Observation']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) + timedelta(minutes=int(timestep['$']))
if cur_elements['W'] in timestep:
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = \
self._weather_to_text(int(timestep[cur_elements['W']]))
else:
new_timestep.weather = \
Element(cur_elements['W'],
'Not reported')
if cur_elements['T'] in timestep:
new_timestep.temperature = \
Element(cur_elements['T'],
float(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
else:
new_timestep.temperature = \
Element(cur_elements['T'],
'Not reported')
if 'S' in timestep:
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
else:
new_timestep.wind_speed = \
Element(cur_elements['S'],
'Not reported')
if 'D' in timestep:
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
else:
new_timestep.wind_direction = \
Element(cur_elements['D'],
'Not reported')
if cur_elements['V'] in timestep:
new_timestep.visibility = \
Element(cur_elements['V'],
int(timestep[cur_elements['V']]),
self._get_wx_units(params, cur_elements['V']))
new_timestep.visibility.text = self._visibility_to_text(int(timestep[cur_elements['V']]))
else:
new_timestep.visibility = \
Element(cur_elements['V'],
'Not reported')
if cur_elements['H'] in timestep:
new_timestep.humidity = \
Element(cur_elements['H'],
float(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
else:
new_timestep.humidity = \
Element(cur_elements['H'],
'Not reported')
if cur_elements['Dp'] in timestep:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
float(timestep[cur_elements['Dp']]),
self._get_wx_units(params,
cur_elements['Dp']))
else:
new_timestep.dew_point = \
Element(cur_elements['Dp'],
'Not reported')
if cur_elements['P'] in timestep:
new_timestep.pressure = \
Element(cur_elements['P'],
float(timestep[cur_elements['P']]),
self._get_wx_units(params, cur_elements['P']))
else:
new_timestep.pressure = \
Element(cur_elements['P'],
'Not reported')
if cur_elements['Pt'] in timestep:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
timestep[cur_elements['Pt']],
self._get_wx_units(params, cur_elements['Pt']))
else:
new_timestep.pressure_tendency = \
Element(cur_elements['Pt'],
'Not reported')
new_day.timesteps.append(new_timestep)
observation.days.append(new_day)
return observation | Get observations for the provided site
Returns hourly observations for the previous 24 hours | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Manager.py#L504-L649 | [
"def __call_api(self, path, params=None, api_url=FORECAST_URL):\n \"\"\"\n Call the datapoint api using the requests module\n\n \"\"\"\n if not params:\n params = dict()\n payload = {'key': self.api_key}\n payload.update(params)\n url = \"%s/%s\" % (api_url, path)\n\n # Add a timeout to the request.\n # The value of 1 second is based on attempting 100 connections to\n # datapoint and taking ten times the mean connection time (rounded up).\n # Could expose to users in the functions which need to call the api.\n #req = requests.get(url, params=payload, timeout=1)\n # The wrapper function __retry_session returns a requests.Session\n # object. This has a .get() function like requests.get(), so the use\n # doesn't change here.\n\n sess = self.__retry_session()\n req = sess.get(url, params=payload, timeout=1)\n\n try:\n data = req.json()\n except ValueError:\n raise APIException(\"DataPoint has not returned any data, this could be due to an incorrect API key\")\n self.call_response = data\n if req.status_code != 200:\n msg = [data[m] for m in (\"message\", \"error_message\", \"status\") \\\n if m in data][0]\n raise Exception(msg)\n return data\n",
"def _get_wx_units(self, params, name):\n \"\"\"\n Give the Wx array returned from datapoint and an element\n name and return the units for that element.\n \"\"\"\n units = \"\"\n for param in params:\n if str(name) == str(param['name']):\n units = param['units']\n return units\n",
"def _weather_to_text(self, code):\n if not isinstance(code, (int, long)):\n raise ValueError(\"Weather code must be an integer not\", type(code))\n if code < 0 or code > 30:\n raise ValueError(\"Weather code outof bounds, should be 0-30\")\n text = WEATHER_CODES[str(code)]\n return text\n",
"def _visibility_to_text(self, distance):\n \"\"\"\n Convert observed visibility in metres to text used in forecast\n \"\"\"\n\n if not isinstance(distance, (int, long)):\n raise ValueError(\"Distance must be an integer not\", type(distance))\n if distance < 0:\n raise ValueError(\"Distance out of bounds, should be 0 or greater\")\n\n if 0 <= distance < 1000:\n return 'VP'\n elif 1000 <= distance < 4000:\n return 'PO'\n elif 4000 <= distance < 10000:\n return 'MO'\n elif 10000 <= distance < 20000:\n return 'GO'\n elif 20000 <= distance < 40000:\n return 'VG'\n else:\n return 'EX'\n"
] | class Manager(object):
"""
Datapoint Manager object
"""
def __init__(self, api_key=""):
self.api_key = api_key
self.call_response = None
# The list of sites changes infrequently so limit to requesting it
# every hour.
self.forecast_sites_last_update = 0
self.forecast_sites_last_request = None
self.forecast_sites_update_time = 3600
self.observation_sites_last_update = 0
self.observation_sites_last_request = None
self.observation_sites_update_time = 3600
self.regions = RegionManager(self.api_key)
def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session
def __call_api(self, path, params=None, api_url=FORECAST_URL):
"""
Call the datapoint api using the requests module
"""
if not params:
params = dict()
payload = {'key': self.api_key}
payload.update(params)
url = "%s/%s" % (api_url, path)
# Add a timeout to the request.
# The value of 1 second is based on attempting 100 connections to
# datapoint and taking ten times the mean connection time (rounded up).
# Could expose to users in the functions which need to call the api.
#req = requests.get(url, params=payload, timeout=1)
# The wrapper function __retry_session returns a requests.Session
# object. This has a .get() function like requests.get(), so the use
# doesn't change here.
sess = self.__retry_session()
req = sess.get(url, params=payload, timeout=1)
try:
data = req.json()
except ValueError:
raise APIException("DataPoint has not returned any data, this could be due to an incorrect API key")
self.call_response = data
if req.status_code != 200:
msg = [data[m] for m in ("message", "error_message", "status") \
if m in data][0]
raise Exception(msg)
return data
def _distance_between_coords(self, lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees).
Haversine formula states that:
d = 2 * r * arcsin(sqrt(sin^2((lat1 - lat2) / 2 +
cos(lat1)cos(lat2)sin^2((lon1 - lon2) / 2))))
where r is the radius of the sphere. This assumes the earth is spherical.
"""
# Convert the coordinates of the points to radians.
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
r = 6371
d_hav = 2 * r * asin(sqrt((sin((lat1 - lat2) / 2))**2 + \
cos(lat1) * cos(lat2) * (sin((lon1 - lon2) / 2)**2 )))
return d_hav
def _get_wx_units(self, params, name):
"""
Give the Wx array returned from datapoint and an element
name and return the units for that element.
"""
units = ""
for param in params:
if str(name) == str(param['name']):
units = param['units']
return units
def _weather_to_text(self, code):
if not isinstance(code, (int, long)):
raise ValueError("Weather code must be an integer not", type(code))
if code < 0 or code > 30:
raise ValueError("Weather code outof bounds, should be 0-30")
text = WEATHER_CODES[str(code)]
return text
def _visibility_to_text(self, distance):
"""
Convert observed visibility in metres to text used in forecast
"""
if not isinstance(distance, (int, long)):
raise ValueError("Distance must be an integer not", type(distance))
if distance < 0:
raise ValueError("Distance out of bounds, should be 0 or greater")
if 0 <= distance < 1000:
return 'VP'
elif 1000 <= distance < 4000:
return 'PO'
elif 4000 <= distance < 10000:
return 'MO'
elif 10000 <= distance < 20000:
return 'GO'
elif 20000 <= distance < 40000:
return 'VG'
else:
return 'EX'
def get_all_sites(self):
"""
Deprecated. This function returns a list of Site object.
"""
warning_message = 'This function is deprecated. Use get_forecast_sites() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_forecast_sites()
def get_forecast_sites(self):
"""
This function returns a list of Site object.
"""
time_now = time()
if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None:
data = self.__call_api("sitelist/")
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.forecast_sites_last_request = sites
# Only set self.sites_last_update once self.sites_last_request has
# been set
self.forecast_sites_last_update = time_now
else:
sites = self.forecast_sites_last_request
return sites
def get_nearest_site(self, latitude=None, longitude=None):
"""
Deprecated. This function returns nearest Site object to the specified
coordinates.
"""
warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead'
warn(warning_message, DeprecationWarning, stacklevel=2)
return self.get_nearest_forecast_site(latitude, longitude)
def get_nearest_forecast_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site object to the specified
coordinates.
"""
if longitude is None:
print('ERROR: No latitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_forecast_sites()
# Sometimes there is a TypeError exception here: sites is None
# So, sometimes self.get_all_sites() has returned None.
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 30km away, raise an error
if distance > 30:
raise APIException("There is no site within 30km.")
return nearest
def get_forecast_for_site(self, site_id, frequency="daily"):
"""
Get a forecast for the provided site
A frequency of "daily" will return two timesteps:
"Day" and "Night".
A frequency of "3hourly" will return 8 timesteps:
0, 180, 360 ... 1260 (minutes since midnight UTC)
"""
data = self.__call_api(site_id, {"res":frequency})
params = data['SiteRep']['Wx']['Param']
forecast = Forecast()
forecast.data_date = data['SiteRep']['DV']['dataDate']
forecast.data_date = datetime.strptime(data['SiteRep']['DV']['dataDate'], DATA_DATE_FORMAT).replace(tzinfo=pytz.UTC)
forecast.continent = data['SiteRep']['DV']['Location']['continent']
forecast.country = data['SiteRep']['DV']['Location']['country']
forecast.name = data['SiteRep']['DV']['Location']['name']
forecast.longitude = data['SiteRep']['DV']['Location']['lon']
forecast.latitude = data['SiteRep']['DV']['Location']['lat']
forecast.id = data['SiteRep']['DV']['Location']['i']
forecast.elevation = data['SiteRep']['DV']['Location']['elevation']
for day in data['SiteRep']['DV']['Location']['Period']:
new_day = Day()
new_day.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
for timestep in day['Rep']:
new_timestep = Timestep()
if timestep['$'] == "Day":
cur_elements = ELEMENTS['Day']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(hours=12)
elif timestep['$'] == "Night":
cur_elements = ELEMENTS['Night']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC)
else:
cur_elements = ELEMENTS['Default']
new_timestep.date = datetime.strptime(day['value'], DATE_FORMAT).replace(tzinfo=pytz.UTC) \
+ timedelta(minutes=int(timestep['$']))
if frequency == 'daily':
new_timestep.name = timestep['$']
elif frequency == '3hourly':
new_timestep.name = int(timestep['$'])
new_timestep.weather = \
Element(cur_elements['W'],
timestep[cur_elements['W']],
self._get_wx_units(params, cur_elements['W']))
new_timestep.weather.text = self._weather_to_text(int(timestep[cur_elements['W']]))
new_timestep.temperature = \
Element(cur_elements['T'],
int(timestep[cur_elements['T']]),
self._get_wx_units(params, cur_elements['T']))
new_timestep.feels_like_temperature = \
Element(cur_elements['F'],
int(timestep[cur_elements['F']]),
self._get_wx_units(params, cur_elements['F']))
new_timestep.wind_speed = \
Element(cur_elements['S'],
int(timestep[cur_elements['S']]),
self._get_wx_units(params, cur_elements['S']))
new_timestep.wind_direction = \
Element(cur_elements['D'],
timestep[cur_elements['D']],
self._get_wx_units(params, cur_elements['D']))
new_timestep.wind_gust = \
Element(cur_elements['G'],
int(timestep[cur_elements['G']]),
self._get_wx_units(params, cur_elements['G']))
new_timestep.visibility = \
Element(cur_elements['V'],
timestep[cur_elements['V']],
self._get_wx_units(params, cur_elements['V']))
new_timestep.precipitation = \
Element(cur_elements['Pp'],
int(timestep[cur_elements['Pp']]),
self._get_wx_units(params, cur_elements['Pp']))
new_timestep.humidity = \
Element(cur_elements['H'],
int(timestep[cur_elements['H']]),
self._get_wx_units(params, cur_elements['H']))
if 'U' in cur_elements and cur_elements['U'] in timestep:
new_timestep.uv = \
Element(cur_elements['U'],
timestep[cur_elements['U']],
self._get_wx_units(params, cur_elements['U']))
new_day.timesteps.append(new_timestep)
forecast.days.append(new_day)
return forecast
def get_observation_sites(self):
"""
This function returns a list of Site objects for which observations are available.
"""
if (time() - self.observation_sites_last_update) > self.observation_sites_update_time:
self.observation_sites_last_update = time()
data = self.__call_api("sitelist/", None, OBSERVATION_URL)
sites = list()
for jsoned in data['Locations']['Location']:
site = Site()
site.name = jsoned['name']
site.id = jsoned['id']
site.latitude = jsoned['latitude']
site.longitude = jsoned['longitude']
if 'region' in jsoned:
site.region = jsoned['region']
if 'elevation' in jsoned:
site.elevation = jsoned['elevation']
if 'unitaryAuthArea' in jsoned:
site.unitaryAuthArea = jsoned['unitaryAuthArea']
if 'nationalPark' in jsoned:
site.nationalPark = jsoned['nationalPark']
site.api_key = self.api_key
sites.append(site)
self.observation_sites_last_request = sites
else:
sites = observation_self.sites_last_request
return sites
def get_nearest_observation_site(self, latitude=None, longitude=None):
"""
This function returns the nearest Site to the specified
coordinates that supports observations
"""
if longitude is None:
print('ERROR: No longitude given.')
return False
if latitude is None:
print('ERROR: No latitude given.')
return False
nearest = False
distance = None
sites = self.get_observation_sites()
for site in sites:
new_distance = \
self._distance_between_coords(
float(site.longitude),
float(site.latitude),
float(longitude),
float(latitude))
if ((distance == None) or (new_distance < distance)):
distance = new_distance
nearest = site
# If the nearest site is more than 20km away, raise an error
if distance > 20:
raise APIException("There is no site within 30km.")
return nearest
|
jacobtomlinson/datapoint-python | datapoint/regions/RegionManager.py | RegionManager.call_api | python | def call_api(self, path, **kwargs):
'''
Call datapoint api
'''
if 'key' not in kwargs:
kwargs['key'] = self.api_key
req = requests.get('{0}{1}'.format(self.base_url, path), params=kwargs)
if req.status_code != requests.codes.ok:
req.raise_for_status()
return req.json() | Call datapoint api | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/regions/RegionManager.py#L27-L38 | null | class RegionManager(object):
'''
Datapoint Manager for national and regional text forecasts
'''
def __init__(self, api_key, base_url=None):
self.api_key = api_key
self.all_regions_path = '/sitelist'
if not base_url:
self.base_url = REGIONS_BASE_URL
# The list of regions changes infrequently so limit to requesting it
# every hour.
self.regions_last_update = 0
self.regions_last_request = None
self.regions_update_time = 3600
def get_all_regions(self):
'''
Request a list of regions from Datapoint. Returns each Region
as a Site object. Regions rarely change, so we cache the response
for one hour to minimise requests to API.
'''
if (time() - self.regions_last_update) < self.regions_update_time:
return self.regions_last_request
response = self.call_api(self.all_regions_path)
regions = []
for location in response['Locations']['Location']:
region = Site()
region.id = location['@id']
region.region = location['@name']
region.name = REGION_NAMES[location['@name']]
regions.append(region)
self.regions_last_update = time()
self.regions_last_request = regions
return regions
def get_raw_forecast(self, region_id):
'''
Request unformatted forecast for a specific region_id.
'''
return self.call_api('/{0}'.format(region_id))
|
jacobtomlinson/datapoint-python | datapoint/regions/RegionManager.py | RegionManager.get_all_regions | python | def get_all_regions(self):
'''
Request a list of regions from Datapoint. Returns each Region
as a Site object. Regions rarely change, so we cache the response
for one hour to minimise requests to API.
'''
if (time() - self.regions_last_update) < self.regions_update_time:
return self.regions_last_request
response = self.call_api(self.all_regions_path)
regions = []
for location in response['Locations']['Location']:
region = Site()
region.id = location['@id']
region.region = location['@name']
region.name = REGION_NAMES[location['@name']]
regions.append(region)
self.regions_last_update = time()
self.regions_last_request = regions
return regions | Request a list of regions from Datapoint. Returns each Region
as a Site object. Regions rarely change, so we cache the response
for one hour to minimise requests to API. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/regions/RegionManager.py#L40-L60 | [
"def call_api(self, path, **kwargs):\n '''\n Call datapoint api\n '''\n if 'key' not in kwargs:\n kwargs['key'] = self.api_key\n req = requests.get('{0}{1}'.format(self.base_url, path), params=kwargs)\n\n if req.status_code != requests.codes.ok:\n req.raise_for_status()\n\n return req.json()\n"
] | class RegionManager(object):
'''
Datapoint Manager for national and regional text forecasts
'''
def __init__(self, api_key, base_url=None):
self.api_key = api_key
self.all_regions_path = '/sitelist'
if not base_url:
self.base_url = REGIONS_BASE_URL
# The list of regions changes infrequently so limit to requesting it
# every hour.
self.regions_last_update = 0
self.regions_last_request = None
self.regions_update_time = 3600
def call_api(self, path, **kwargs):
'''
Call datapoint api
'''
if 'key' not in kwargs:
kwargs['key'] = self.api_key
req = requests.get('{0}{1}'.format(self.base_url, path), params=kwargs)
if req.status_code != requests.codes.ok:
req.raise_for_status()
return req.json()
def get_raw_forecast(self, region_id):
'''
Request unformatted forecast for a specific region_id.
'''
return self.call_api('/{0}'.format(region_id))
|
jacobtomlinson/datapoint-python | datapoint/Forecast.py | Forecast.now | python | def now(self):
# From the comments in issue 19: forecast.days[0] is dated for the
# previous day shortly after midnight
now = None
# Set the time now to be in the same time zone as the first timestep in
# the forecast. This shouldn't cause problems with daylight savings as
# the change is far enough after midnight.
d = datetime.datetime.now(tz=self.days[0].date.tzinfo)
# d is something like datetime.datetime(2019, 1, 19, 17, 5, 28, 337439)
# d.replace(...) is datetime.datetime(2019, 1, 19, 0, 0)
# for_total_seconds is then: datetime.timedelta(seconds=61528,
# microseconds=337439)
# In this example, this is (17*60*60) + (5*60) + 28 = 61528
# this is the number of seconds through the day
for_total_seconds = d - \
d.replace(hour=0, minute=0, second=0, microsecond=0)
# In the example time,
# for_total_seconds.total_seconds() = 61528 + 0.337439
# This is the number of seconds after midnight
# msm is then the number of minutes after midnight
msm = for_total_seconds.total_seconds() / 60
# If the date now and the date in the forecast are the same, proceed
if self.days[0].date.strftime("%Y-%m-%dZ") == d.strftime("%Y-%m-%dZ"):
# We have determined that the date in the forecast and the date now
# are the same.
#
# Now, test if timestep.name is larger than the number of minutes
# since midnight for each timestep.
# The timestep we keep is the one with the largest timestep.name
# which is less than the number of minutes since midnight
for timestep in self.days[0].timesteps:
if timestep.name > msm:
# break here stops the for loop
break
# now is assigned to the last timestep that did not break the
# loop
now = timestep
return now
# Bodge to get around problems near midnight:
# Previous method does not account for the end of the month. The test
# trying to be evaluated is that the absolute difference between the
# last timestep of the first day and the current time is less than 4
# hours. 4 hours is because the final timestep of the previous day is
# for 21:00
elif abs(self.days[0].timesteps[-1].date - d).total_seconds() < 14400:
# This is verbose to check that the returned data makes sense
timestep_to_return = self.days[0].timesteps[-1]
return timestep_to_return
else:
return False | Function to return just the current timestep from this forecast | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Forecast.py#L22-L80 | null | class Forecast(object):
def __init__(self, api_key=""):
self.api_key = api_key
self.data_date = None
self.continent = None
self.country = None
self.name = None
self.longitude = None
self.latitude = None
self.id = None
self.elevation = None
self.days = []
def timedelta_total_seconds(self, timedelta):
return (
timedelta.microseconds + 0.0 +
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
def future(self,in_days=None,in_hours=None,in_minutes=None,in_seconds=None):
"""
Function to return a future timestep
"""
future = None
# Initialize variables to 0
dd, hh, mm, ss = [0 for i in range(4)]
if (in_days != None):
dd = dd + in_days
if (in_hours != None):
hh = hh + in_hours
if (in_minutes != None):
mm = mm + in_minutes
if (in_seconds != None):
ss = ss + in_seconds
# Set the hours, minutes and seconds from now (minus the days)
dnow = datetime.datetime.utcnow() # Now
d = dnow + \
datetime.timedelta(hours=hh, minutes=mm, seconds = ss)
# Time from midnight
for_total_seconds = d - \
d.replace(hour=0, minute=0, second=0, microsecond=0)
# Convert into minutes since midnight
try:
msm = for_total_seconds.total_seconds()/60.
except:
# For versions before 2.7
msm = self.timedelta_total_seconds(for_total_seconds)/60.
if (dd<len(self.days)):
for timestep in self.days[dd].timesteps:
if timestep.name >= msm:
future = timestep
return future
else:
print('ERROR: requested date is outside the forecast range selected,' + str(len(self.days)))
return False
|
jacobtomlinson/datapoint-python | datapoint/Forecast.py | Forecast.future | python | def future(self,in_days=None,in_hours=None,in_minutes=None,in_seconds=None):
future = None
# Initialize variables to 0
dd, hh, mm, ss = [0 for i in range(4)]
if (in_days != None):
dd = dd + in_days
if (in_hours != None):
hh = hh + in_hours
if (in_minutes != None):
mm = mm + in_minutes
if (in_seconds != None):
ss = ss + in_seconds
# Set the hours, minutes and seconds from now (minus the days)
dnow = datetime.datetime.utcnow() # Now
d = dnow + \
datetime.timedelta(hours=hh, minutes=mm, seconds = ss)
# Time from midnight
for_total_seconds = d - \
d.replace(hour=0, minute=0, second=0, microsecond=0)
# Convert into minutes since midnight
try:
msm = for_total_seconds.total_seconds()/60.
except:
# For versions before 2.7
msm = self.timedelta_total_seconds(for_total_seconds)/60.
if (dd<len(self.days)):
for timestep in self.days[dd].timesteps:
if timestep.name >= msm:
future = timestep
return future
else:
print('ERROR: requested date is outside the forecast range selected,' + str(len(self.days)))
return False | Function to return a future timestep | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/Forecast.py#L82-L121 | [
"def timedelta_total_seconds(self, timedelta):\n return (\n timedelta.microseconds + 0.0 +\n (timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6\n"
] | class Forecast(object):
def __init__(self, api_key=""):
self.api_key = api_key
self.data_date = None
self.continent = None
self.country = None
self.name = None
self.longitude = None
self.latitude = None
self.id = None
self.elevation = None
self.days = []
def timedelta_total_seconds(self, timedelta):
return (
timedelta.microseconds + 0.0 +
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
def now(self):
"""
Function to return just the current timestep from this forecast
"""
# From the comments in issue 19: forecast.days[0] is dated for the
# previous day shortly after midnight
now = None
# Set the time now to be in the same time zone as the first timestep in
# the forecast. This shouldn't cause problems with daylight savings as
# the change is far enough after midnight.
d = datetime.datetime.now(tz=self.days[0].date.tzinfo)
# d is something like datetime.datetime(2019, 1, 19, 17, 5, 28, 337439)
# d.replace(...) is datetime.datetime(2019, 1, 19, 0, 0)
# for_total_seconds is then: datetime.timedelta(seconds=61528,
# microseconds=337439)
# In this example, this is (17*60*60) + (5*60) + 28 = 61528
# this is the number of seconds through the day
for_total_seconds = d - \
d.replace(hour=0, minute=0, second=0, microsecond=0)
# In the example time,
# for_total_seconds.total_seconds() = 61528 + 0.337439
# This is the number of seconds after midnight
# msm is then the number of minutes after midnight
msm = for_total_seconds.total_seconds() / 60
# If the date now and the date in the forecast are the same, proceed
if self.days[0].date.strftime("%Y-%m-%dZ") == d.strftime("%Y-%m-%dZ"):
# We have determined that the date in the forecast and the date now
# are the same.
#
# Now, test if timestep.name is larger than the number of minutes
# since midnight for each timestep.
# The timestep we keep is the one with the largest timestep.name
# which is less than the number of minutes since midnight
for timestep in self.days[0].timesteps:
if timestep.name > msm:
# break here stops the for loop
break
# now is assigned to the last timestep that did not break the
# loop
now = timestep
return now
# Bodge to get around problems near midnight:
# Previous method does not account for the end of the month. The test
# trying to be evaluated is that the absolute difference between the
# last timestep of the first day and the current time is less than 4
# hours. 4 hours is because the final timestep of the previous day is
# for 21:00
elif abs(self.days[0].timesteps[-1].date - d).total_seconds() < 14400:
# This is verbose to check that the returned data makes sense
timestep_to_return = self.days[0].timesteps[-1]
return timestep_to_return
else:
return False
|
jacobtomlinson/datapoint-python | datapoint/profile.py | install_API_key | python | def install_API_key(api_key, profile_name='default'):
fname = API_profile_fname(profile_name)
if not os.path.isdir(os.path.dirname(fname)):
os.makedirs(os.path.dirname(fname))
with open(fname, 'w') as fh:
fh.write(api_key) | Put the given API key into the given profile name. | train | https://github.com/jacobtomlinson/datapoint-python/blob/1d3f596f21975f42c1484f5a9c3ff057de0b47ae/datapoint/profile.py#L12-L18 | [
"def API_profile_fname(profile_name='default'):\n \"\"\"Get the API key profile filename.\"\"\"\n return os.path.join(appdirs.user_data_dir('DataPoint'),\n profile_name + '.key')\n"
] | import os
import appdirs
def API_profile_fname(profile_name='default'):
"""Get the API key profile filename."""
return os.path.join(appdirs.user_data_dir('DataPoint'),
profile_name + '.key')
|
openai/pachi-py | pachi_py/pachi/tools/twogtp.py | GTP_game.writesgf | python | def writesgf(self, sgffilename):
"Write the game to an SGF file after a game"
size = self.size
outfile = open(sgffilename, "w")
if not outfile:
print "Couldn't create " + sgffilename
return
black_name = self.blackplayer.get_program_name()
white_name = self.whiteplayer.get_program_name()
black_seed = self.blackplayer.get_random_seed()
white_seed = self.whiteplayer.get_random_seed()
handicap = self.handicap
komi = self.komi
result = self.resultw
outfile.write("(;GM[1]FF[4]RU[Japanese]SZ[%s]HA[%s]KM[%s]RE[%s]\n" %
(size, handicap, komi, result))
outfile.write("PW[%s (random seed %s)]PB[%s (random seed %s)]\n" %
(white_name, white_seed, black_name, black_seed))
outfile.write(self.sgffilestart)
if handicap > 1:
outfile.write("AB");
for stone in self.handicap_stones:
outfile.write("[%s]" %(coords_to_sgf(size, stone)))
outfile.write("PL[W]\n")
to_play = self.first_to_play
for move in self.moves:
sgfmove = coords_to_sgf(size, move)
outfile.write(";%s[%s]\n" % (to_play, sgfmove))
if to_play == "B":
to_play = "W"
else:
to_play = "B"
outfile.write(")\n")
outfile.close | Write the game to an SGF file after a game | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/twogtp.py#L272-L310 | [
"def coords_to_sgf(size, board_coords):\n global debug\n\n board_coords = string.lower(board_coords)\n if board_coords == \"pass\":\n return \"\"\n if debug:\n print \"Coords: <\" + board_coords + \">\"\n letter = board_coords[0]\n digits = board_coords[1:]\n if letter > \"i\":\n sgffirst = chr(ord(letter) - 1)\n else:\n sgffirst = letter\n sgfsecond = chr(ord(\"a\") + int(size) - int(digits))\n return sgffirst + sgfsecond\n"
] | class GTP_game:
# Class members:
# whiteplayer GTP_player
# blackplayer GTP_player
# size int
# komi float
# handicap int
# handicap_type string
# handicap_stones int
# moves list of string
# resultw
# resultb
def __init__(self, whitecommand, blackcommand, size, komi, handicap,
handicap_type, endgamefile):
self.whiteplayer = GTP_player(whitecommand)
self.blackplayer = GTP_player(blackcommand)
self.size = size
self.komi = komi
self.handicap = handicap
self.handicap_type = handicap_type
self.endgamefile = endgamefile
self.sgffilestart = ""
if endgamefile != "":
self.init_endgame_contest_game()
else:
self.sgffilestart = ""
def init_endgame_contest_game(self):
infile = open(self.endgamefile)
if not infile:
print "Couldn't read " + self.endgamefile
sys.exit(2)
sgflines = infile.readlines()
infile.close
size = re.compile("SZ\[[0-9]+\]")
move = re.compile(";[BW]\[[a-z]{0,2}\]")
sgf_start = []
for line in sgflines:
match = size.search(line)
if match:
self.size = match.group()[3:-1]
match = move.search(line)
while match:
sgf_start.append("A" + match.group()[1:])
line = line[match.end():]
match = move.search(line)
self.endgame_start = len(sgf_start) - endgame_start_at
self.sgffilestart = ";" + string.join(
sgf_start[:self.endgame_start-1], "") + "\n"
if self.endgame_start % 2 == 0:
self.first_to_play = "W"
else:
self.first_to_play = "B"
def get_position_from_engine(self, engine):
black_stones = engine.list_stones("black")
white_stones = engine.list_stones("white")
self.sgffilestart = ";"
if len(black_stones) > 0:
self.sgffilestart += "AB"
for stone in black_stones:
self.sgffilestart += "[%s]" % coords_to_sgf(self.size, stone)
self.sgffilestart += "\n"
if len(white_stones) > 0:
self.sgffilestart += "AW"
for stone in white_stones:
self.sgffilestart += "[%s]" % coords_to_sgf(self.size, stone)
self.sgffilestart += "\n"
def set_handicap(self, handicap):
self.handicap = handicap
def swap_players(self):
temp = self.whiteplayer
self.whiteplayer = self.blackplayer
self.blackplayer = temp
def play(self, sgffile):
"Play a game"
global verbose
if verbose >= 1:
print "Setting boardsize and komi for black\n"
self.blackplayer.boardsize(self.size)
self.blackplayer.komi(self.komi)
if verbose >= 1:
print "Setting boardsize and komi for white\n"
self.whiteplayer.boardsize(self.size)
self.whiteplayer.komi(self.komi)
self.handicap_stones = []
if self.endgamefile == "":
if self.handicap < 2:
self.first_to_play = "B"
else:
self.handicap_stones = self.blackplayer.handicap(self.handicap, self.handicap_type)
for stone in self.handicap_stones:
self.whiteplayer.black(stone)
self.first_to_play = "W"
else:
self.blackplayer.loadsgf(self.endgamefile, self.endgame_start)
self.blackplayer.set_random_seed("0")
self.whiteplayer.loadsgf(self.endgamefile, self.endgame_start)
self.whiteplayer.set_random_seed("0")
if self.blackplayer.is_known_command("list_stones"):
self.get_position_from_engine(self.blackplayer)
elif self.whiteplayer.is_known_command("list_stones"):
self.get_position_from_engine(self.whiteplayer)
to_play = self.first_to_play
self.moves = []
passes = 0
won_by_resignation = ""
while passes < 2:
if to_play == "B":
move = self.blackplayer.genmove("black")
if move[:5] == "ERROR":
# FIXME: write_sgf
sys.exit(1)
if move[:6] == "resign":
if verbose >= 1:
print "Black resigns"
won_by_resignation = "W+Resign"
break
else:
self.moves.append(move)
if string.lower(move[:4]) == "pass":
passes = passes + 1
if verbose >= 1:
print "Black passes"
else:
passes = 0
self.whiteplayer.black(move)
if verbose >= 1:
print "Black plays " + move
to_play = "W"
else:
move = self.whiteplayer.genmove("white")
if move[:5] == "ERROR":
# FIXME: write_sgf
sys.exit(1)
if move[:6] == "resign":
if verbose >= 1:
print "White resigns"
won_by_resignation = "B+Resign"
break
else:
self.moves.append(move)
if string.lower(move[:4]) == "pass":
passes = passes + 1
if verbose >= 1:
print "White passes"
else:
passes = 0
self.blackplayer.white(move)
if verbose >= 1:
print "White plays " + move
to_play = "B"
if verbose >= 2:
print self.whiteplayer.showboard() + "\n"
if won_by_resignation == "":
self.resultw = self.whiteplayer.final_score()
self.resultb = self.blackplayer.final_score()
else:
self.resultw = won_by_resignation;
self.resultb = won_by_resignation;
# if self.resultb == self.resultw:
# print "Result: ", self.resultw
# else:
# print "Result according to W: ", self.resultw
# print "Result according to B: ", self.resultb
# FIXME: $self->writesgf($sgffile) if defined $sgffile;
if sgffile != "":
self.writesgf(sgffile)
def result(self):
return (self.resultw, self.resultb)
def cputime(self):
cputime = {}
cputime["white"] = self.whiteplayer.cputime()
cputime["black"] = self.blackplayer.cputime()
return cputime
def quit(self):
self.blackplayer.quit()
self.whiteplayer.quit()
|
openai/pachi-py | pachi_py/pachi/tools/twogtp.py | GTP_game.play | python | def play(self, sgffile):
"Play a game"
global verbose
if verbose >= 1:
print "Setting boardsize and komi for black\n"
self.blackplayer.boardsize(self.size)
self.blackplayer.komi(self.komi)
if verbose >= 1:
print "Setting boardsize and komi for white\n"
self.whiteplayer.boardsize(self.size)
self.whiteplayer.komi(self.komi)
self.handicap_stones = []
if self.endgamefile == "":
if self.handicap < 2:
self.first_to_play = "B"
else:
self.handicap_stones = self.blackplayer.handicap(self.handicap, self.handicap_type)
for stone in self.handicap_stones:
self.whiteplayer.black(stone)
self.first_to_play = "W"
else:
self.blackplayer.loadsgf(self.endgamefile, self.endgame_start)
self.blackplayer.set_random_seed("0")
self.whiteplayer.loadsgf(self.endgamefile, self.endgame_start)
self.whiteplayer.set_random_seed("0")
if self.blackplayer.is_known_command("list_stones"):
self.get_position_from_engine(self.blackplayer)
elif self.whiteplayer.is_known_command("list_stones"):
self.get_position_from_engine(self.whiteplayer)
to_play = self.first_to_play
self.moves = []
passes = 0
won_by_resignation = ""
while passes < 2:
if to_play == "B":
move = self.blackplayer.genmove("black")
if move[:5] == "ERROR":
# FIXME: write_sgf
sys.exit(1)
if move[:6] == "resign":
if verbose >= 1:
print "Black resigns"
won_by_resignation = "W+Resign"
break
else:
self.moves.append(move)
if string.lower(move[:4]) == "pass":
passes = passes + 1
if verbose >= 1:
print "Black passes"
else:
passes = 0
self.whiteplayer.black(move)
if verbose >= 1:
print "Black plays " + move
to_play = "W"
else:
move = self.whiteplayer.genmove("white")
if move[:5] == "ERROR":
# FIXME: write_sgf
sys.exit(1)
if move[:6] == "resign":
if verbose >= 1:
print "White resigns"
won_by_resignation = "B+Resign"
break
else:
self.moves.append(move)
if string.lower(move[:4]) == "pass":
passes = passes + 1
if verbose >= 1:
print "White passes"
else:
passes = 0
self.blackplayer.white(move)
if verbose >= 1:
print "White plays " + move
to_play = "B"
if verbose >= 2:
print self.whiteplayer.showboard() + "\n"
if won_by_resignation == "":
self.resultw = self.whiteplayer.final_score()
self.resultb = self.blackplayer.final_score()
else:
self.resultw = won_by_resignation;
self.resultb = won_by_resignation;
# if self.resultb == self.resultw:
# print "Result: ", self.resultw
# else:
# print "Result according to W: ", self.resultw
# print "Result according to B: ", self.resultb
# FIXME: $self->writesgf($sgffile) if defined $sgffile;
if sgffile != "":
self.writesgf(sgffile) | Play a game | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/twogtp.py#L320-L424 | [
"def is_known_command(self, command):\n return self.connection.exec_cmd(\"known_command \" + command) == \"true\"\n",
"def genmove(self, color):\n if color[0] in [\"b\", \"B\"]:\n command = \"black\"\n elif color[0] in [\"w\", \"W\"]:\n command = \"white\"\n if self.protocol_version == \"1\":\n command = \"genmove_\" + command\n else:\n command = \"genmove \" + command\n\n return self.connection.exec_cmd(command)\n",
"def black(self, move):\n if self.protocol_version == \"1\":\n self.connection.exec_cmd(\"black \" + move)\n else:\n self.connection.exec_cmd(\"play black \" + move)\n",
"def komi(self, komi):\n self.connection.exec_cmd(\"komi \" + komi)\n",
"def boardsize(self, size):\n self.connection.exec_cmd(\"boardsize \" + size)\n if self.protocol_version != \"1\":\n self.connection.exec_cmd(\"clear_board\")\n",
"def handicap(self, handicap, handicap_type):\n if handicap_type == \"fixed\":\n result = self.connection.exec_cmd(\"fixed_handicap %d\" % (handicap))\n else:\n result = self.connection.exec_cmd(\"place_free_handicap %d\"\n % (handicap))\n\n return string.split(result, \" \")\n",
"def loadsgf(self, endgamefile, move_number):\n self.connection.exec_cmd(string.join([\"loadsgf\", endgamefile,\n str(move_number)]))\n",
"def set_random_seed(self, seed):\n self.connection.exec_cmd(\"set_random_seed \" + seed)\n",
"def writesgf(self, sgffilename):\n \"Write the game to an SGF file after a game\"\n\n size = self.size\n outfile = open(sgffilename, \"w\")\n if not outfile:\n print \"Couldn't create \" + sgffilename\n return\n black_name = self.blackplayer.get_program_name()\n white_name = self.whiteplayer.get_program_name()\n black_seed = self.blackplayer.get_random_seed()\n white_seed = self.whiteplayer.get_random_seed()\n handicap = self.handicap\n komi = self.komi\n result = self.resultw\n\n outfile.write(\"(;GM[1]FF[4]RU[Japanese]SZ[%s]HA[%s]KM[%s]RE[%s]\\n\" %\n (size, handicap, komi, result))\n outfile.write(\"PW[%s (random seed %s)]PB[%s (random seed %s)]\\n\" %\n (white_name, white_seed, black_name, black_seed))\n outfile.write(self.sgffilestart)\n\n if handicap > 1:\n outfile.write(\"AB\");\n for stone in self.handicap_stones:\n outfile.write(\"[%s]\" %(coords_to_sgf(size, stone)))\n outfile.write(\"PL[W]\\n\")\n\n to_play = self.first_to_play\n\n for move in self.moves:\n sgfmove = coords_to_sgf(size, move)\n outfile.write(\";%s[%s]\\n\" % (to_play, sgfmove))\n if to_play == \"B\":\n to_play = \"W\"\n else:\n to_play = \"B\"\n outfile.write(\")\\n\")\n outfile.close\n"
] | class GTP_game:
# Class members:
# whiteplayer GTP_player
# blackplayer GTP_player
# size int
# komi float
# handicap int
# handicap_type string
# handicap_stones int
# moves list of string
# resultw
# resultb
def __init__(self, whitecommand, blackcommand, size, komi, handicap,
handicap_type, endgamefile):
self.whiteplayer = GTP_player(whitecommand)
self.blackplayer = GTP_player(blackcommand)
self.size = size
self.komi = komi
self.handicap = handicap
self.handicap_type = handicap_type
self.endgamefile = endgamefile
self.sgffilestart = ""
if endgamefile != "":
self.init_endgame_contest_game()
else:
self.sgffilestart = ""
def init_endgame_contest_game(self):
infile = open(self.endgamefile)
if not infile:
print "Couldn't read " + self.endgamefile
sys.exit(2)
sgflines = infile.readlines()
infile.close
size = re.compile("SZ\[[0-9]+\]")
move = re.compile(";[BW]\[[a-z]{0,2}\]")
sgf_start = []
for line in sgflines:
match = size.search(line)
if match:
self.size = match.group()[3:-1]
match = move.search(line)
while match:
sgf_start.append("A" + match.group()[1:])
line = line[match.end():]
match = move.search(line)
self.endgame_start = len(sgf_start) - endgame_start_at
self.sgffilestart = ";" + string.join(
sgf_start[:self.endgame_start-1], "") + "\n"
if self.endgame_start % 2 == 0:
self.first_to_play = "W"
else:
self.first_to_play = "B"
def get_position_from_engine(self, engine):
black_stones = engine.list_stones("black")
white_stones = engine.list_stones("white")
self.sgffilestart = ";"
if len(black_stones) > 0:
self.sgffilestart += "AB"
for stone in black_stones:
self.sgffilestart += "[%s]" % coords_to_sgf(self.size, stone)
self.sgffilestart += "\n"
if len(white_stones) > 0:
self.sgffilestart += "AW"
for stone in white_stones:
self.sgffilestart += "[%s]" % coords_to_sgf(self.size, stone)
self.sgffilestart += "\n"
def writesgf(self, sgffilename):
"Write the game to an SGF file after a game"
size = self.size
outfile = open(sgffilename, "w")
if not outfile:
print "Couldn't create " + sgffilename
return
black_name = self.blackplayer.get_program_name()
white_name = self.whiteplayer.get_program_name()
black_seed = self.blackplayer.get_random_seed()
white_seed = self.whiteplayer.get_random_seed()
handicap = self.handicap
komi = self.komi
result = self.resultw
outfile.write("(;GM[1]FF[4]RU[Japanese]SZ[%s]HA[%s]KM[%s]RE[%s]\n" %
(size, handicap, komi, result))
outfile.write("PW[%s (random seed %s)]PB[%s (random seed %s)]\n" %
(white_name, white_seed, black_name, black_seed))
outfile.write(self.sgffilestart)
if handicap > 1:
outfile.write("AB");
for stone in self.handicap_stones:
outfile.write("[%s]" %(coords_to_sgf(size, stone)))
outfile.write("PL[W]\n")
to_play = self.first_to_play
for move in self.moves:
sgfmove = coords_to_sgf(size, move)
outfile.write(";%s[%s]\n" % (to_play, sgfmove))
if to_play == "B":
to_play = "W"
else:
to_play = "B"
outfile.write(")\n")
outfile.close
def set_handicap(self, handicap):
self.handicap = handicap
def swap_players(self):
temp = self.whiteplayer
self.whiteplayer = self.blackplayer
self.blackplayer = temp
def result(self):
return (self.resultw, self.resultb)
def cputime(self):
cputime = {}
cputime["white"] = self.whiteplayer.cputime()
cputime["black"] = self.blackplayer.cputime()
return cputime
def quit(self):
self.blackplayer.quit()
self.whiteplayer.quit()
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/typelib.py | MutableSequence.sort | python | def sort(self, func=None):
if func:
self.data.sort(func)
else:
self.data.sort() | Sorts 'self.data' in-place. Argument:
- func : optional, default 'None' --
- If 'func' not given, sorting will be in ascending
order.
- If 'func' given, it will determine the sort order.
'func' must be a two-argument comparison function
which returns -1, 0, or 1, to mean before, same,
or after ordering. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/typelib.py#L459-L472 | null | class MutableSequence(Sequence, MutableMixin):
""" Superclass for classes which emulate mutable (modifyable in-place)
sequences ('List')."""
def __setslice__(self, low, high, seq):
""" On 'self[low:high]=seq'."""
self.data[low:high] = seq
def __delslice__(self, low, high):
""" On 'del self[low:high]'."""
del self.data[low:high]
def append(self, x):
""" Inserts object 'x' at the end of 'self.data' in-place."""
self.data.append(x)
def count(self, x):
""" Returns the number of occurrences of 'x' in 'self.data'."""
return self.data.count(x)
def extend(self, x):
""" Concatenates sequence 'x' to the end of 'self' in-place
(like 'self=self+x')."""
self.data.extend(x)
def index(self, x):
""" Returns the offset of the first occurrence of object 'x' in
'self.data'; raises an exception if not found."""
return self.data.index(x)
def insert(self, i, x):
""" Inserts object 'x' into 'self.data' at offset 'i'
(like 'self[i:i]=[x]')."""
self.data.insert(i, x)
def pop(self, i=-1):
""" Returns and deletes the last item of 'self.data' (or item
'self.data[i]' if 'i' given)."""
return self.data.pop(i)
def remove(self, x):
""" Deletes the first occurrence of object 'x' from 'self.data';
raise an exception if not found."""
self.data.remove(x)
def reverse(self):
""" Reverses items in 'self.data' in-place."""
self.data.reverse()
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | _escapeText | python | def _escapeText(text):
output = ""
index = 0
match = reCharsToEscape.search(text, index)
while match:
output = output + text[index:match.start()] + '\\' + text[match.start()]
index = match.end()
match = reCharsToEscape.search(text, index)
output = output + text[index:]
return output | Adds backslash-escapes to property value characters that need them. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L579-L589 | null | #!/usr/local/bin/python
# sgflib.py (Smart Game Format Parser Library)
# Copyright (C) 2000 David John Goodger (dgoodger@bigfoot.com)
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# (lgpl.txt) along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# The license is currently available on the Internet at:
# http://www.gnu.org/copyleft/lesser.html
"""
=============================================
Smart Game Format Parser Library: sgflib.py
=============================================
version 1.0 (2000-03-27)
Homepage: [[http://gotools.sourceforge.net]]
Copyright (C) 2000 David John Goodger ([[mailto:dgoodger@bigfoot.com]]; davidg
on NNGS, IGS, goclub.org). sgflib.py comes with ABSOLUTELY NO WARRANTY. This is
free software, and you are welcome to redistribute it and/or modify it under the
terms of the GNU Lesser General Public License; see the source code for details.
Description
===========
This library contains a parser and classes for SGF, the Smart Game Format. SGF
is a text only, tree based file format designed to store game records of board
games for two players, most commonly for the game of go. (See the official SGF
specification at [[http://www.POBoxes.com/sgf/]]).
Given a string containing a complete SGF data instance, the 'SGFParser' class
will create a 'Collection' object consisting of one or more 'GameTree''s (one
'GameTree' per game in the SGF file), each containing a sequence of 'Node''s and
(potentially) two or more variation 'GameTree''s (branches). Each 'Node'
contains an ordered dictionary of 'Property' ID/value pairs (note that values
are lists, and can have multiple entries).
Tree traversal methods are provided through the 'Cursor' class.
The default representation (using 'str()' or 'print') of each class of SGF
objects is the Smart Game Format itself."""
# Revision History:
#
# 1.0 (2000-03-27): First public release.
# - Ready for prime time.
#
# 0.1 (2000-01-16):
# - Initial idea & started coding.
import string, re
from typelib import List, Dictionary
# Parsing Exceptions
class EndOfDataParseError(Exception):
""" Raised by 'SGFParser.parseVariations()', 'SGFParser.parseNode()'."""
pass
class GameTreeParseError(Exception):
""" Raised by 'SGFParser.parseGameTree()'."""
pass
class NodePropertyParseError(Exception):
""" Raised by 'SGFParser.parseNode()'."""
pass
class PropertyValueParseError(Exception):
""" Raised by 'SGFParser.parsePropertyValue()'."""
pass
# Tree Construction Exceptions
class DirectAccessError(Exception):
""" Raised by 'Node.__setitem__()', 'Node.update()'."""
pass
class DuplicatePropertyError(Exception):
""" Raised by 'Node.addProperty()'."""
pass
# Tree Navigation Exceptions
class GameTreeNavigationError(Exception):
""" Raised by 'Cursor.next()'."""
pass
class GameTreeEndError(Exception):
""" Raised by 'Cursor.next()', 'Cursor.previous()'."""
pass
# for type checking
INT_TYPE = type(0) # constant
# miscellaneous constants
MAX_LINE_LEN = 76 # constant; for line breaks
class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parse(self):
""" Parses the SGF data stored in 'self.data', and returns a 'Collection'."""
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c
def parseOneGame(self):
""" Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached."""
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None
def parseGameTree(self):
""" Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered."""
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g
def parseVariations(self):
""" Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'."""
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError
def parseNode(self):
""" Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree)."""
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
class RootNodeSGFParser(SGFParser):
""" For parsing only the first 'GameTree''s root Node of an SGF file."""
def parseNode(self):
""" Calls 'SGFParser.parseNode()', sets 'self.index' to point to the end
of the data (effectively ending the 'GameTree' and 'Collection'),
and returns the single (root) 'Node' parsed."""
n = SGFParser.parseNode(self) # process one Node as usual
self.index = self.datalen # set end of data
return n # we're only interested in the root node
class Collection(List):
"""
An SGF collection: multiple 'GameTree''s. Instance atributes:
- self[.data] : list of 'GameTree' -- One 'GameTree' per game."""
def __str__(self):
""" SGF representation. Separates game trees with a blank line."""
return string.join(map(str, self.data), "\n"*2)
def cursor(self, gamenum=0):
""" Returns a 'Cursor' object for navigation of the given 'GameTree'."""
return Cursor(self[gamenum])
class GameTree(List):
"""
An SGF game tree: a game or variation. Instance attributes:
- self[.data] : list of 'Node' -- game tree 'trunk'.
- self.variations : list of 'GameTree' -- 0 or 2+ variations.
'self.variations[0]' contains the main branch (sequence actually played)."""
def __init__(self, nodelist=None, variations=None):
"""
Initialize the 'GameTree'. Arguments:
- nodelist : 'GameTree' or list of 'Node' -- Stored in 'self.data'.
- variations : list of 'GameTree' -- Stored in 'self.variations'."""
List.__init__(self, nodelist)
self.variations = variations or []
def __str__(self):
""" SGF representation, with proper line breaks between nodes."""
if len(self):
s = "(" + str(self[0]) # append the first Node automatically
l = len(string.split(s, "\n")[-1]) # accounts for line breaks within Nodes
for n in map(str, self[1:]):
if l + len(string.split(n, "\n")[0]) > MAX_LINE_LEN:
s = s + "\n"
l = 0
s = s + n
l = len(string.split(s, "\n")[-1])
return s + string.join(map(str, [""] + self.variations), "\n") + ")"
else:
return "" # empty GameTree illegal; "()" illegal
def mainline(self):
""" Returns the main line of the game (variation A) as a 'GameTree'."""
if self.variations:
return GameTree(self.data + self.variations[0].mainline())
else:
return self
def makeNode(self, plist):
"""
Create a new 'Node' containing the properties contained in 'plist'.
Override/extend to create 'Node' subclass instances (move, setup).
Argument:
- plist : 'Node' or list of 'Property'"""
return Node(plist)
def cursor(self):
""" Returns a 'Cursor' object for navigation of this 'GameTree'."""
return Cursor(self)
def propertySearch(self, pid, getall=0):
"""
Searches this 'GameTree' for nodes containing matching properties.
Returns a 'GameTree' containing the matched node(s). Arguments:
- pid : string -- ID of properties to search for.
- getall : boolean -- Set to true (1) to return all 'Node''s that
match, or to false (0) to return only the first match."""
matches = []
for n in self:
if n.has_key(pid):
matches.append(n)
if not getall:
break
else: # getall or not matches:
for v in self.variations:
matches = matches + v.propertySearch(pid, getall)
if not getall and matches:
break
return GameTree(matches)
class Node(Dictionary):
"""
An SGF node. Instance Attributes:
- self[.data] : ordered dictionary -- '{Property.id:Property}' mapping.
(Ordered dictionary: allows offset-indexed retrieval). Properties *must*
be added using 'self.addProperty()'.
Example: Let 'n' be a 'Node' parsed from ';B[aa]BL[250]C[comment]':
- 'str(n["BL"])' => '"BL[250]"'
- 'str(n[0])' => '"B[aa]"'
- 'map(str, n)' => '["B[aa]","BL[250]","C[comment]"]'"""
def __init__(self, plist=[]):
"""
Initializer. Argument:
- plist: Node or list of 'Property'."""
Dictionary.__init__(self)
self.order = []
for p in plist:
self.addProperty(p)
def __getitem__(self, key):
""" On 'self[key]', 'x in self', 'for x in self'. Implements all
indexing-related operations. Allows both key- and offset-indexed
retrieval. Membership and iteration ('in', 'for') repeatedly index
from 0 until 'IndexError'."""
if type(key) is INT_TYPE:
return self.order[key]
else:
return self.data[key]
def __setitem__(self, key, x):
""" On 'self[key]=x'. Allows assignment to existing items only. Raises
'DirectAccessError' on new item assignment."""
if self.has_key(key):
self.order[self.order.index(self[key])] = x
Dictionary.__setitem__(self, key, x)
else:
raise DirectAccessError(
"Properties may not be added directly; use addProperty() instead.")
def __delitem__(self, key):
""" On 'del self[key]'. Updates 'self.order' to maintain consistency."""
self.order.remove(self[key])
Dictionary.__delitem__(self, key)
def __getslice__(self, low, high):
""" On 'self[low:high]'."""
return self.order[low:high]
def __str__(self):
""" SGF representation, with proper line breaks between properties."""
if len(self):
s = ";" + str(self[0])
l = len(string.split(s, "\n")[-1]) # accounts for line breaks within Properties
for p in map(str, self[1:]):
if l + len(string.split(p, "\n")[0]) > MAX_LINE_LEN:
s = s + "\n"
l = 0
s = s + p
l = len(string.split(s, "\n")[-1])
return s
else:
return ";"
def update(self, dict):
""" 'Dictionary' method not applicable to 'Node'. Raises
'DirectAccessError'."""
raise DirectAccessError(
"The update() method is not supported by Node; use addProperty() instead.")
def addProperty(self, property):
"""
Adds a 'Property' to this 'Node'. Checks for duplicate properties
(illegal), and maintains the property order. Argument:
- property : 'Property'"""
if self.has_key(property.id):
raise DuplicatePropertyError
else:
self.data[property.id] = property
self.order.append(property)
def makeProperty(self, id, valuelist):
"""
Create a new 'Property'. Override/extend to create 'Property'
subclass instances (move, setup, game-info, etc.). Arguments:
- id : string
- valuelist : 'Property' or list of values"""
return Property(id, valuelist)
class Property(List):
"""
An SGF property: a set of label and value(s). Instance attributes:
- self[.data] : list -- property values.
- self.id : string -- SGF standard property label.
- self.name : string -- actual label used in the SGF data. For example, the
property 'CoPyright[...]' has name 'CoPyright' and id 'CP'."""
def __init__(self, id, values, name=None):
"""
Initialize the 'Property'. Arguments:
- id : string
- name : string (optional) -- If not given, 'self.name'
- nodelist : 'GameTree' or list of 'Node' -- Stored in 'self.data'.
- variations : list of 'GameTree' -- Stored in 'self.variations'."""
List.__init__(self, values) # XXX will _convert work here?
self.id = id
self.name = name or id
def __str__(self):
return self.name + "[" + string.join(map(_escapeText, self), "][") + "]"
class Cursor:
"""
'GameTree' navigation tool. Instance attributes:
- self.game : 'GameTree' -- The root 'GameTree'.
- self.gametree : 'GameTree' -- The current 'GameTree'.
- self.node : 'Node' -- The current Node.
- self.nodenum : integer -- The offset of 'self.node' from the root of
'self.game'. The nodenum of the root node is 0.
- self.index : integer -- The offset of 'self.node' within 'self.gametree'.
- self.stack : list of 'GameTree' -- A record of 'GameTree''s traversed.
- self.children : list of 'Node' -- All child nodes of the current node.
- self.atEnd : boolean -- Flags if we are at the end of a branch.
- self.atStart : boolean -- Flags if we are at the start of the game."""
def __init__(self, gametree):
""" Initialize root 'GameTree' and instance variables."""
self.game = gametree # root GameTree
self.reset()
def reset(self):
""" Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'."""
self.gametree = self.game
self.nodenum = 0
self.index = 0
self.stack = []
self.node = self.gametree[self.index]
self._setChildren()
self._setFlags()
def next(self, varnum=0):
"""
Moves the 'Cursor' to & returns the next 'Node'. Raises
'GameTreeEndError' if the end of a branch is exceeded. Raises
'GameTreeNavigationError' if a non-existent variation is accessed.
Argument:
- varnum : integer, default 0 -- Variation number. Non-zero only
valid at a branching, where variations exist."""
if self.index + 1 < len(self.gametree): # more main line?
if varnum != 0:
raise GameTreeNavigationError("Nonexistent variation.")
self.index = self.index + 1
elif self.gametree.variations: # variations exist?
if varnum < len(self.gametree.variations):
self.stack.append(self.gametree)
self.gametree = self.gametree.variations[varnum]
self.index = 0
else:
raise GameTreeNavigationError("Nonexistent variation.")
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum + 1
self._setChildren()
self._setFlags()
return self.node
def previous(self):
""" Moves the 'Cursor' to & returns the previous 'Node'. Raises
'GameTreeEndError' if the start of a branch is exceeded."""
if self.index - 1 >= 0: # more main line?
self.index = self.index - 1
elif self.stack: # were we in a variation?
self.gametree = self.stack.pop()
self.index = len(self.gametree) - 1
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum - 1
self._setChildren()
self._setFlags()
return self.node
def _setChildren(self):
""" Sets up 'self.children'."""
if self.index + 1 < len(self.gametree):
self.children = [self.gametree[self.index+1]]
else:
self.children = map(lambda list: list[0], self.gametree.variations)
def _setFlags(self):
""" Sets up the flags 'self.atEnd' and 'self.atStart'."""
self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))
self.atStart = not self.stack and (self.index == 0)
reCharsToEscape = re.compile(r'\]|\\') # characters that need to be \escaped
def selfTest1(onConsole=0):
""" Canned data test case"""
sgfdata = r""" (;GM [1]US[someone]CoPyright[\
Permission to reproduce this game is given.]GN[a-b]EV[None]RE[B+Resign]
PW[a]WR[2k*]PB[b]BR[4k*]PC[somewhere]DT[2000-01-16]SZ[19]TM[300]KM[4.5]
HA[3]AB[pd][dp][dd];W[pp];B[nq];W[oq]C[ x started observation.
](;B[qc]C[ [b\]: \\ hi x! ;-) \\];W[kc])(;B[hc];W[oe])) """
print "\n\n********** Self-Test 1 **********\n"
print "Input data:\n"
print sgfdata
print "\n\nParsed data: "
col = SGFParser(sgfdata).parse()
print "done\n"
cstr = str(col)
print cstr, "\n"
print "Mainline:\n"
m = col[0].mainline()
print m, "\n"
##print "as GameTree:\n"
##print GameTree(m), "\n"
print "Tree traversal (forward):\n"
c = col.cursor()
while 1:
print "nodenum: %s; index: %s; children: %s; node: %s" % (c.nodenum, c.index, len(c.children), c.node)
if c.atEnd: break
c.next()
print "\nTree traversal (backward):\n"
while 1:
print "nodenum: %s; index: %s; children: %s; node: %s" % (c.nodenum, c.index, len(c.children), c.node)
if c.atStart: break
c.previous()
print "\nSearch for property 'B':"
print col[0].propertySearch("B", 1)
print "\nSearch for property 'C':"
print col[0].propertySearch("C", 1)
pass
def selfTest2(onConsole=0):
""" Macintosh-based SGF file test"""
import macfs
print "\n\n********** Self-Test 2 (Mac) **********\n"
thefile = macfs.PromptGetFile("Please choose an SGF file:")
if not thefile[1]:
return
srcpath = thefile[0].as_pathname()
src = open(srcpath, 'r')
sgfdata = src.read()
print "Input data:\n"
print sgfdata
print "\n\nParsed data:"
col = SGFParser(sgfdata).parse()
print "done\n"
print str(col)
if __name__ == '__main__':
print __doc__ # show module's documentation string
selfTest1()
import os
if os.name == 'mac':
selfTest2()
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | SGFParser.parse | python | def parse(self):
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c | Parses the SGF data stored in 'self.data', and returns a 'Collection'. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L153-L162 | [
"def append(self, x):\n\t\"\"\" Inserts object 'x' at the end of 'self.data' in-place.\"\"\"\n\tself.data.append(x)\n",
"def parseOneGame(self):\n\t\"\"\" Parses one game from 'self.data'. Returns a 'GameTree' containing\n\t\tone game, or 'None' if the end of 'self.data' has been reached.\"\"\"\n\tif self.index < self.datalen:\n\t\tmatch = self.reGameTreeStart.match(self.data, self.index)\n\t\tif match:\n\t\t\tself.index = match.end()\n\t\t\treturn self.parseGameTree()\n\treturn None\n"
] | class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parseOneGame(self):
""" Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached."""
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None
def parseGameTree(self):
""" Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered."""
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g
def parseVariations(self):
""" Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'."""
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError
def parseNode(self):
""" Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree)."""
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | SGFParser.parseOneGame | python | def parseOneGame(self):
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None | Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L164-L172 | [
"def parseGameTree(self):\n\t\"\"\" Called when \"(\" encountered, ends when a matching \")\" encountered.\n\t\tParses and returns one 'GameTree' from 'self.data'. Raises\n\t\t'GameTreeParseError' if a problem is encountered.\"\"\"\n\tg = GameTree()\n\twhile self.index < self.datalen:\n\t\tmatch = self.reGameTreeNext.match(self.data, self.index)\n\t\tif match:\n\t\t\tself.index = match.end()\n\t\t\tif match.group(1) == \";\":\t\t\t\t# found start of node\n\t\t\t\tif g.variations:\n\t\t\t\t\traise GameTreeParseError(\n\t\t\t\t\t\t\t\t\"A node was encountered after a variation.\")\n\t\t\t\tg.append(g.makeNode(self.parseNode()))\n\t\t\telif match.group(1) == \"(\":\t\t\t\t# found start of variation\n\t\t\t\tg.variations = self.parseVariations()\n\t\t\telse:\t\t\t\t\t\t\t\t\t# found end of GameTree \")\"\n\t\t\t\treturn g\n\t\telse:\t\t\t\t\t\t\t\t\t\t# error\n\t\t\traise GameTreeParseError\n\treturn g\n"
] | class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parse(self):
""" Parses the SGF data stored in 'self.data', and returns a 'Collection'."""
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c
def parseGameTree(self):
""" Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered."""
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g
def parseVariations(self):
""" Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'."""
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError
def parseNode(self):
""" Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree)."""
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | SGFParser.parseGameTree | python | def parseGameTree(self):
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g | Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L174-L194 | [
"def append(self, x):\n\t\"\"\" Inserts object 'x' at the end of 'self.data' in-place.\"\"\"\n\tself.data.append(x)\n",
"def parseNode(self):\n\t\"\"\" Called when \";\" encountered (& is consumed). Parses and returns one\n\t\t'Node', which can be empty. Raises 'NodePropertyParseError' if no\n\t\tproperty values are extracted. Raises 'EndOfDataParseError' if the\n\t\tend of 'self.data' is reached before the end of the node (i.e., the\n\t\tstart of the next node, the start of a variation, or the end of the\n\t\tenclosing game tree).\"\"\"\n\tn = Node()\n\twhile self.index < self.datalen:\n\t\tmatch = self.reNodeContents.match(self.data, self.index)\n\t\tif match:\n\t\t\tself.index = match.end()\n\t\t\tpvlist = self.parsePropertyValue()\n\t\t\tif pvlist:\n\t\t\t\tn.addProperty(n.makeProperty(match.group(1), pvlist))\n\t\t\telse:\n\t\t\t\traise NodePropertyParseError\n\t\telse:\t\t\t\t\t\t\t\t\t\t# reached end of Node\n\t\t\treturn n\n\traise EndOfDataParseError\n",
"def makeNode(self, plist):\n\t\"\"\"\n\t\tCreate a new 'Node' containing the properties contained in 'plist'.\n\t\tOverride/extend to create 'Node' subclass instances (move, setup).\n\t\tArgument:\n\t\t- plist : 'Node' or list of 'Property'\"\"\"\n\treturn Node(plist)\n"
] | class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parse(self):
""" Parses the SGF data stored in 'self.data', and returns a 'Collection'."""
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c
def parseOneGame(self):
""" Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached."""
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None
def parseVariations(self):
""" Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'."""
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError
def parseNode(self):
""" Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree)."""
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | SGFParser.parseVariations | python | def parseVariations(self):
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError | Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L196-L214 | [
"def parseGameTree(self):\n\t\"\"\" Called when \"(\" encountered, ends when a matching \")\" encountered.\n\t\tParses and returns one 'GameTree' from 'self.data'. Raises\n\t\t'GameTreeParseError' if a problem is encountered.\"\"\"\n\tg = GameTree()\n\twhile self.index < self.datalen:\n\t\tmatch = self.reGameTreeNext.match(self.data, self.index)\n\t\tif match:\n\t\t\tself.index = match.end()\n\t\t\tif match.group(1) == \";\":\t\t\t\t# found start of node\n\t\t\t\tif g.variations:\n\t\t\t\t\traise GameTreeParseError(\n\t\t\t\t\t\t\t\t\"A node was encountered after a variation.\")\n\t\t\t\tg.append(g.makeNode(self.parseNode()))\n\t\t\telif match.group(1) == \"(\":\t\t\t\t# found start of variation\n\t\t\t\tg.variations = self.parseVariations()\n\t\t\telse:\t\t\t\t\t\t\t\t\t# found end of GameTree \")\"\n\t\t\t\treturn g\n\t\telse:\t\t\t\t\t\t\t\t\t\t# error\n\t\t\traise GameTreeParseError\n\treturn g\n"
] | class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parse(self):
""" Parses the SGF data stored in 'self.data', and returns a 'Collection'."""
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c
def parseOneGame(self):
""" Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached."""
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None
def parseGameTree(self):
""" Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered."""
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g
def parseNode(self):
""" Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree)."""
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | SGFParser.parseNode | python | def parseNode(self):
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError | Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree). | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L216-L235 | [
"def parsePropertyValue(self):\n\t\"\"\" Called when \"[\" encountered (but not consumed), ends when the next\n\t\tproperty, node, or variation encountered. Parses and returns a list\n\t\tof property values. Raises 'PropertyValueParseError' if there is a\n\t\tproblem.\"\"\"\n\tpvlist = []\n\twhile self.index < self.datalen:\n\t\tmatch = self.rePropertyStart.match(self.data, self.index)\n\t\tif match:\n\t\t\tself.index = match.end()\n\t\t\tv = \"\"\t\t\t\t\t\t\t\t\t# value\n\t\t\t# scan for escaped characters (using '\\'), unescape them (remove linebreaks)\n\t\t\tmend = self.rePropertyEnd.search(self.data, self.index)\n\t\t\tmesc = self.reEscape.search(self.data, self.index)\n\t\t\twhile mesc and mend and (mesc.end() < mend.end()):\n\t\t\t\t\t\t\t\t\t\t\t\t\t# copy up to '\\', but remove '\\'\n\t\t\t\tv = v + self.data[self.index:mesc.start()]\n\t\t\t\tmbreak = self.reLineBreak.match(self.data, mesc.end())\n\t\t\t\tif mbreak:\n\t\t\t\t\tself.index = mbreak.end()\t\t# remove linebreak\n\t\t\t\telse:\n\t\t\t\t\tv = v + self.data[mesc.end()]\t# copy escaped character\n\t\t\t\t\tself.index = mesc.end() + 1\t\t# move to point after escaped char\n\t\t\t\tmend = self.rePropertyEnd.search(self.data, self.index)\n\t\t\t\tmesc = self.reEscape.search(self.data, self.index)\n\t\t\tif mend:\n\t\t\t\tv = v + self.data[self.index:mend.start()]\n\t\t\t\tself.index = mend.end()\n\t\t\t\tpvlist.append(self._convertControlChars(v))\n\t\t\telse:\n\t\t\t\traise PropertyValueParseError\n\t\telse:\t\t\t\t\t\t\t\t\t\t# reached end of Property\n\t\t\tbreak\n\tif len(pvlist) >= 1:\n\t\treturn pvlist\n\telse:\n\t\traise PropertyValueParseError\n",
"def addProperty(self, property):\n\t\"\"\"\n\t\tAdds a 'Property' to this 'Node'. Checks for duplicate properties\n\t\t(illegal), and maintains the property order. Argument:\n\t\t- property : 'Property'\"\"\"\n\tif self.has_key(property.id):\n\t\traise DuplicatePropertyError\n\telse:\n\t\tself.data[property.id] = property\n\t\tself.order.append(property)\n",
"def makeProperty(self, id, valuelist):\n\t\"\"\"\n\t\tCreate a new 'Property'. Override/extend to create 'Property'\n\t\tsubclass instances (move, setup, game-info, etc.). Arguments:\n\t\t- id : string\n\t\t- valuelist : 'Property' or list of values\"\"\"\n\treturn Property(id, valuelist)\n"
] | class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parse(self):
""" Parses the SGF data stored in 'self.data', and returns a 'Collection'."""
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c
def parseOneGame(self):
""" Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached."""
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None
def parseGameTree(self):
""" Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered."""
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g
def parseVariations(self):
""" Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'."""
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError
def parsePropertyValue(self):
""" Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem."""
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | SGFParser.parsePropertyValue | python | def parsePropertyValue(self):
pvlist = []
while self.index < self.datalen:
match = self.rePropertyStart.match(self.data, self.index)
if match:
self.index = match.end()
v = "" # value
# scan for escaped characters (using '\'), unescape them (remove linebreaks)
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
while mesc and mend and (mesc.end() < mend.end()):
# copy up to '\', but remove '\'
v = v + self.data[self.index:mesc.start()]
mbreak = self.reLineBreak.match(self.data, mesc.end())
if mbreak:
self.index = mbreak.end() # remove linebreak
else:
v = v + self.data[mesc.end()] # copy escaped character
self.index = mesc.end() + 1 # move to point after escaped char
mend = self.rePropertyEnd.search(self.data, self.index)
mesc = self.reEscape.search(self.data, self.index)
if mend:
v = v + self.data[self.index:mend.start()]
self.index = mend.end()
pvlist.append(self._convertControlChars(v))
else:
raise PropertyValueParseError
else: # reached end of Property
break
if len(pvlist) >= 1:
return pvlist
else:
raise PropertyValueParseError | Called when "[" encountered (but not consumed), ends when the next
property, node, or variation encountered. Parses and returns a list
of property values. Raises 'PropertyValueParseError' if there is a
problem. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L237-L273 | [
"def _convertControlChars(self, text):\n\t\"\"\" Converts control characters in 'text' to spaces, using the\n\t\t'self.ctrltrans' translation table. Override for variant\n\t\tbehaviour.\"\"\"\n\treturn string.translate(text, self.ctrltrans)\n"
] | class SGFParser:
"""
Parser for SGF data. Creates a tree structure based on the SGF standard
itself. 'SGFParser.parse()' will return a 'Collection' object for the entire
data.
Instance Attributes:
- self.data : string -- The complete SGF data instance.
- self.datalen : integer -- Length of 'self.data'.
- self.index : integer -- Current parsing position in 'self.data'.
Class Attributes:
- re* : re.RegexObject -- Regular expression text matching patterns.
- ctrltrans: string[256] -- Control character translation table for
string.translate(), used to remove all control characters from Property
values. May be overridden (preferably in instances)."""
# text matching patterns
reGameTreeStart = re.compile(r'\s*\(')
reGameTreeEnd = re.compile(r'\s*\)')
reGameTreeNext = re.compile(r'\s*(;|\(|\))')
reNodeContents = re.compile(r'\s*([A-Za-z]+(?=\s*\[))')
rePropertyStart = re.compile(r'\s*\[')
rePropertyEnd = re.compile(r'\]')
reEscape = re.compile(r'\\')
reLineBreak = re.compile(r'\r\n?|\n\r?') # CR, LF, CR/LF, LF/CR
# character translation tables
# for control characters (except LF \012 & CR \015): convert to spaces
ctrltrans = string.maketrans("\000\001\002\003\004\005\006\007" +
"\010\011\013\014\016\017\020\021\022\023\024\025\026\027" +
"\030\031\032\033\034\035\036\037", " "*30)
def __init__(self, data):
""" Initialize the instance attributes. See the class itself for info."""
self.data = data
self.datalen = len(data)
self.index = 0
def parse(self):
""" Parses the SGF data stored in 'self.data', and returns a 'Collection'."""
c = Collection()
while self.index < self.datalen:
g = self.parseOneGame()
if g:
c.append(g)
else:
break
return c
def parseOneGame(self):
""" Parses one game from 'self.data'. Returns a 'GameTree' containing
one game, or 'None' if the end of 'self.data' has been reached."""
if self.index < self.datalen:
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
return self.parseGameTree()
return None
def parseGameTree(self):
""" Called when "(" encountered, ends when a matching ")" encountered.
Parses and returns one 'GameTree' from 'self.data'. Raises
'GameTreeParseError' if a problem is encountered."""
g = GameTree()
while self.index < self.datalen:
match = self.reGameTreeNext.match(self.data, self.index)
if match:
self.index = match.end()
if match.group(1) == ";": # found start of node
if g.variations:
raise GameTreeParseError(
"A node was encountered after a variation.")
g.append(g.makeNode(self.parseNode()))
elif match.group(1) == "(": # found start of variation
g.variations = self.parseVariations()
else: # found end of GameTree ")"
return g
else: # error
raise GameTreeParseError
return g
def parseVariations(self):
""" Called when "(" encountered inside a 'GameTree', ends when a
non-matching ")" encountered. Returns a list of variation
'GameTree''s. Raises 'EndOfDataParseError' if the end of 'self.data'
is reached before the end of the enclosing 'GameTree'."""
v = []
while self.index < self.datalen:
# check for ")" at end of GameTree, but don't consume it
match = self.reGameTreeEnd.match(self.data, self.index)
if match:
return v
g = self.parseGameTree()
if g:
v.append(g)
# check for next variation, and consume "("
match = self.reGameTreeStart.match(self.data, self.index)
if match:
self.index = match.end()
raise EndOfDataParseError
def parseNode(self):
""" Called when ";" encountered (& is consumed). Parses and returns one
'Node', which can be empty. Raises 'NodePropertyParseError' if no
property values are extracted. Raises 'EndOfDataParseError' if the
end of 'self.data' is reached before the end of the node (i.e., the
start of the next node, the start of a variation, or the end of the
enclosing game tree)."""
n = Node()
while self.index < self.datalen:
match = self.reNodeContents.match(self.data, self.index)
if match:
self.index = match.end()
pvlist = self.parsePropertyValue()
if pvlist:
n.addProperty(n.makeProperty(match.group(1), pvlist))
else:
raise NodePropertyParseError
else: # reached end of Node
return n
raise EndOfDataParseError
def _convertControlChars(self, text):
""" Converts control characters in 'text' to spaces, using the
'self.ctrltrans' translation table. Override for variant
behaviour."""
return string.translate(text, self.ctrltrans)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | RootNodeSGFParser.parseNode | python | def parseNode(self):
n = SGFParser.parseNode(self) # process one Node as usual
self.index = self.datalen # set end of data
return n | Calls 'SGFParser.parseNode()', sets 'self.index' to point to the end
of the data (effectively ending the 'GameTree' and 'Collection'),
and returns the single (root) 'Node' parsed. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L285-L291 | [
"def parseNode(self):\n\t\"\"\" Called when \";\" encountered (& is consumed). Parses and returns one\n\t\t'Node', which can be empty. Raises 'NodePropertyParseError' if no\n\t\tproperty values are extracted. Raises 'EndOfDataParseError' if the\n\t\tend of 'self.data' is reached before the end of the node (i.e., the\n\t\tstart of the next node, the start of a variation, or the end of the\n\t\tenclosing game tree).\"\"\"\n\tn = Node()\n\twhile self.index < self.datalen:\n\t\tmatch = self.reNodeContents.match(self.data, self.index)\n\t\tif match:\n\t\t\tself.index = match.end()\n\t\t\tpvlist = self.parsePropertyValue()\n\t\t\tif pvlist:\n\t\t\t\tn.addProperty(n.makeProperty(match.group(1), pvlist))\n\t\t\telse:\n\t\t\t\traise NodePropertyParseError\n\t\telse:\t\t\t\t\t\t\t\t\t\t# reached end of Node\n\t\t\treturn n\n\traise EndOfDataParseError\n"
] | class RootNodeSGFParser(SGFParser):
""" For parsing only the first 'GameTree''s root Node of an SGF file."""
# we're only interested in the root node
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | GameTree.mainline | python | def mainline(self):
if self.variations:
return GameTree(self.data + self.variations[0].mainline())
else:
return self | Returns the main line of the game (variation A) as a 'GameTree'. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L338-L343 | null | class GameTree(List):
"""
An SGF game tree: a game or variation. Instance attributes:
- self[.data] : list of 'Node' -- game tree 'trunk'.
- self.variations : list of 'GameTree' -- 0 or 2+ variations.
'self.variations[0]' contains the main branch (sequence actually played)."""
def __init__(self, nodelist=None, variations=None):
"""
Initialize the 'GameTree'. Arguments:
- nodelist : 'GameTree' or list of 'Node' -- Stored in 'self.data'.
- variations : list of 'GameTree' -- Stored in 'self.variations'."""
List.__init__(self, nodelist)
self.variations = variations or []
def __str__(self):
""" SGF representation, with proper line breaks between nodes."""
if len(self):
s = "(" + str(self[0]) # append the first Node automatically
l = len(string.split(s, "\n")[-1]) # accounts for line breaks within Nodes
for n in map(str, self[1:]):
if l + len(string.split(n, "\n")[0]) > MAX_LINE_LEN:
s = s + "\n"
l = 0
s = s + n
l = len(string.split(s, "\n")[-1])
return s + string.join(map(str, [""] + self.variations), "\n") + ")"
else:
return "" # empty GameTree illegal; "()" illegal
def makeNode(self, plist):
"""
Create a new 'Node' containing the properties contained in 'plist'.
Override/extend to create 'Node' subclass instances (move, setup).
Argument:
- plist : 'Node' or list of 'Property'"""
return Node(plist)
def cursor(self):
""" Returns a 'Cursor' object for navigation of this 'GameTree'."""
return Cursor(self)
def propertySearch(self, pid, getall=0):
"""
Searches this 'GameTree' for nodes containing matching properties.
Returns a 'GameTree' containing the matched node(s). Arguments:
- pid : string -- ID of properties to search for.
- getall : boolean -- Set to true (1) to return all 'Node''s that
match, or to false (0) to return only the first match."""
matches = []
for n in self:
if n.has_key(pid):
matches.append(n)
if not getall:
break
else: # getall or not matches:
for v in self.variations:
matches = matches + v.propertySearch(pid, getall)
if not getall and matches:
break
return GameTree(matches)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | GameTree.propertySearch | python | def propertySearch(self, pid, getall=0):
matches = []
for n in self:
if n.has_key(pid):
matches.append(n)
if not getall:
break
else: # getall or not matches:
for v in self.variations:
matches = matches + v.propertySearch(pid, getall)
if not getall and matches:
break
return GameTree(matches) | Searches this 'GameTree' for nodes containing matching properties.
Returns a 'GameTree' containing the matched node(s). Arguments:
- pid : string -- ID of properties to search for.
- getall : boolean -- Set to true (1) to return all 'Node''s that
match, or to false (0) to return only the first match. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L357-L375 | null | class GameTree(List):
"""
An SGF game tree: a game or variation. Instance attributes:
- self[.data] : list of 'Node' -- game tree 'trunk'.
- self.variations : list of 'GameTree' -- 0 or 2+ variations.
'self.variations[0]' contains the main branch (sequence actually played)."""
def __init__(self, nodelist=None, variations=None):
"""
Initialize the 'GameTree'. Arguments:
- nodelist : 'GameTree' or list of 'Node' -- Stored in 'self.data'.
- variations : list of 'GameTree' -- Stored in 'self.variations'."""
List.__init__(self, nodelist)
self.variations = variations or []
def __str__(self):
""" SGF representation, with proper line breaks between nodes."""
if len(self):
s = "(" + str(self[0]) # append the first Node automatically
l = len(string.split(s, "\n")[-1]) # accounts for line breaks within Nodes
for n in map(str, self[1:]):
if l + len(string.split(n, "\n")[0]) > MAX_LINE_LEN:
s = s + "\n"
l = 0
s = s + n
l = len(string.split(s, "\n")[-1])
return s + string.join(map(str, [""] + self.variations), "\n") + ")"
else:
return "" # empty GameTree illegal; "()" illegal
def mainline(self):
""" Returns the main line of the game (variation A) as a 'GameTree'."""
if self.variations:
return GameTree(self.data + self.variations[0].mainline())
else:
return self
def makeNode(self, plist):
"""
Create a new 'Node' containing the properties contained in 'plist'.
Override/extend to create 'Node' subclass instances (move, setup).
Argument:
- plist : 'Node' or list of 'Property'"""
return Node(plist)
def cursor(self):
""" Returns a 'Cursor' object for navigation of this 'GameTree'."""
return Cursor(self)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | Node.addProperty | python | def addProperty(self, property):
"""
Adds a 'Property' to this 'Node'. Checks for duplicate properties
(illegal), and maintains the property order. Argument:
- property : 'Property'"""
if self.has_key(property.id):
raise DuplicatePropertyError
else:
self.data[property.id] = property
self.order.append(property) | Adds a 'Property' to this 'Node'. Checks for duplicate properties
(illegal), and maintains the property order. Argument:
- property : 'Property | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L449-L458 | [
"def has_key(self, key):\n\t\"\"\" Returns 1 (true) if 'self.data' has a key 'key', or 0 otherwise.\"\"\"\n\treturn self.data.has_key(key)\n"
] | class Node(Dictionary):
"""
An SGF node. Instance Attributes:
- self[.data] : ordered dictionary -- '{Property.id:Property}' mapping.
(Ordered dictionary: allows offset-indexed retrieval). Properties *must*
be added using 'self.addProperty()'.
Example: Let 'n' be a 'Node' parsed from ';B[aa]BL[250]C[comment]':
- 'str(n["BL"])' => '"BL[250]"'
- 'str(n[0])' => '"B[aa]"'
- 'map(str, n)' => '["B[aa]","BL[250]","C[comment]"]'"""
def __init__(self, plist=[]):
"""
Initializer. Argument:
- plist: Node or list of 'Property'."""
Dictionary.__init__(self)
self.order = []
for p in plist:
self.addProperty(p)
def __getitem__(self, key):
""" On 'self[key]', 'x in self', 'for x in self'. Implements all
indexing-related operations. Allows both key- and offset-indexed
retrieval. Membership and iteration ('in', 'for') repeatedly index
from 0 until 'IndexError'."""
if type(key) is INT_TYPE:
return self.order[key]
else:
return self.data[key]
def __setitem__(self, key, x):
""" On 'self[key]=x'. Allows assignment to existing items only. Raises
'DirectAccessError' on new item assignment."""
if self.has_key(key):
self.order[self.order.index(self[key])] = x
Dictionary.__setitem__(self, key, x)
else:
raise DirectAccessError(
"Properties may not be added directly; use addProperty() instead.")
def __delitem__(self, key):
""" On 'del self[key]'. Updates 'self.order' to maintain consistency."""
self.order.remove(self[key])
Dictionary.__delitem__(self, key)
def __getslice__(self, low, high):
""" On 'self[low:high]'."""
return self.order[low:high]
def __str__(self):
""" SGF representation, with proper line breaks between properties."""
if len(self):
s = ";" + str(self[0])
l = len(string.split(s, "\n")[-1]) # accounts for line breaks within Properties
for p in map(str, self[1:]):
if l + len(string.split(p, "\n")[0]) > MAX_LINE_LEN:
s = s + "\n"
l = 0
s = s + p
l = len(string.split(s, "\n")[-1])
return s
else:
return ";"
def update(self, dict):
""" 'Dictionary' method not applicable to 'Node'. Raises
'DirectAccessError'."""
raise DirectAccessError(
"The update() method is not supported by Node; use addProperty() instead.")
def makeProperty(self, id, valuelist):
"""
Create a new 'Property'. Override/extend to create 'Property'
subclass instances (move, setup, game-info, etc.). Arguments:
- id : string
- valuelist : 'Property' or list of values"""
return Property(id, valuelist)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | Cursor.reset | python | def reset(self):
self.gametree = self.game
self.nodenum = 0
self.index = 0
self.stack = []
self.node = self.gametree[self.index]
self._setChildren()
self._setFlags() | Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L511-L519 | [
"def _setChildren(self):\n\t\"\"\" Sets up 'self.children'.\"\"\"\n\tif self.index + 1 < len(self.gametree):\n\t\tself.children = [self.gametree[self.index+1]]\n\telse:\n\t\tself.children = map(lambda list: list[0], self.gametree.variations)\n",
"def _setFlags(self):\n\t\"\"\" Sets up the flags 'self.atEnd' and 'self.atStart'.\"\"\"\n\tself.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))\n\tself.atStart = not self.stack and (self.index == 0)\n"
] | class Cursor:
"""
'GameTree' navigation tool. Instance attributes:
- self.game : 'GameTree' -- The root 'GameTree'.
- self.gametree : 'GameTree' -- The current 'GameTree'.
- self.node : 'Node' -- The current Node.
- self.nodenum : integer -- The offset of 'self.node' from the root of
'self.game'. The nodenum of the root node is 0.
- self.index : integer -- The offset of 'self.node' within 'self.gametree'.
- self.stack : list of 'GameTree' -- A record of 'GameTree''s traversed.
- self.children : list of 'Node' -- All child nodes of the current node.
- self.atEnd : boolean -- Flags if we are at the end of a branch.
- self.atStart : boolean -- Flags if we are at the start of the game."""
def __init__(self, gametree):
""" Initialize root 'GameTree' and instance variables."""
self.game = gametree # root GameTree
self.reset()
def next(self, varnum=0):
"""
Moves the 'Cursor' to & returns the next 'Node'. Raises
'GameTreeEndError' if the end of a branch is exceeded. Raises
'GameTreeNavigationError' if a non-existent variation is accessed.
Argument:
- varnum : integer, default 0 -- Variation number. Non-zero only
valid at a branching, where variations exist."""
if self.index + 1 < len(self.gametree): # more main line?
if varnum != 0:
raise GameTreeNavigationError("Nonexistent variation.")
self.index = self.index + 1
elif self.gametree.variations: # variations exist?
if varnum < len(self.gametree.variations):
self.stack.append(self.gametree)
self.gametree = self.gametree.variations[varnum]
self.index = 0
else:
raise GameTreeNavigationError("Nonexistent variation.")
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum + 1
self._setChildren()
self._setFlags()
return self.node
def previous(self):
""" Moves the 'Cursor' to & returns the previous 'Node'. Raises
'GameTreeEndError' if the start of a branch is exceeded."""
if self.index - 1 >= 0: # more main line?
self.index = self.index - 1
elif self.stack: # were we in a variation?
self.gametree = self.stack.pop()
self.index = len(self.gametree) - 1
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum - 1
self._setChildren()
self._setFlags()
return self.node
def _setChildren(self):
""" Sets up 'self.children'."""
if self.index + 1 < len(self.gametree):
self.children = [self.gametree[self.index+1]]
else:
self.children = map(lambda list: list[0], self.gametree.variations)
def _setFlags(self):
""" Sets up the flags 'self.atEnd' and 'self.atStart'."""
self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))
self.atStart = not self.stack and (self.index == 0)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | Cursor.next | python | def next(self, varnum=0):
if self.index + 1 < len(self.gametree): # more main line?
if varnum != 0:
raise GameTreeNavigationError("Nonexistent variation.")
self.index = self.index + 1
elif self.gametree.variations: # variations exist?
if varnum < len(self.gametree.variations):
self.stack.append(self.gametree)
self.gametree = self.gametree.variations[varnum]
self.index = 0
else:
raise GameTreeNavigationError("Nonexistent variation.")
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum + 1
self._setChildren()
self._setFlags()
return self.node | Moves the 'Cursor' to & returns the next 'Node'. Raises
'GameTreeEndError' if the end of a branch is exceeded. Raises
'GameTreeNavigationError' if a non-existent variation is accessed.
Argument:
- varnum : integer, default 0 -- Variation number. Non-zero only
valid at a branching, where variations exist. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L521-L546 | [
"def _setChildren(self):\n\t\"\"\" Sets up 'self.children'.\"\"\"\n\tif self.index + 1 < len(self.gametree):\n\t\tself.children = [self.gametree[self.index+1]]\n\telse:\n\t\tself.children = map(lambda list: list[0], self.gametree.variations)\n",
"def _setFlags(self):\n\t\"\"\" Sets up the flags 'self.atEnd' and 'self.atStart'.\"\"\"\n\tself.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))\n\tself.atStart = not self.stack and (self.index == 0)\n"
] | class Cursor:
"""
'GameTree' navigation tool. Instance attributes:
- self.game : 'GameTree' -- The root 'GameTree'.
- self.gametree : 'GameTree' -- The current 'GameTree'.
- self.node : 'Node' -- The current Node.
- self.nodenum : integer -- The offset of 'self.node' from the root of
'self.game'. The nodenum of the root node is 0.
- self.index : integer -- The offset of 'self.node' within 'self.gametree'.
- self.stack : list of 'GameTree' -- A record of 'GameTree''s traversed.
- self.children : list of 'Node' -- All child nodes of the current node.
- self.atEnd : boolean -- Flags if we are at the end of a branch.
- self.atStart : boolean -- Flags if we are at the start of the game."""
def __init__(self, gametree):
""" Initialize root 'GameTree' and instance variables."""
self.game = gametree # root GameTree
self.reset()
def reset(self):
""" Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'."""
self.gametree = self.game
self.nodenum = 0
self.index = 0
self.stack = []
self.node = self.gametree[self.index]
self._setChildren()
self._setFlags()
def previous(self):
""" Moves the 'Cursor' to & returns the previous 'Node'. Raises
'GameTreeEndError' if the start of a branch is exceeded."""
if self.index - 1 >= 0: # more main line?
self.index = self.index - 1
elif self.stack: # were we in a variation?
self.gametree = self.stack.pop()
self.index = len(self.gametree) - 1
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum - 1
self._setChildren()
self._setFlags()
return self.node
def _setChildren(self):
""" Sets up 'self.children'."""
if self.index + 1 < len(self.gametree):
self.children = [self.gametree[self.index+1]]
else:
self.children = map(lambda list: list[0], self.gametree.variations)
def _setFlags(self):
""" Sets up the flags 'self.atEnd' and 'self.atStart'."""
self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))
self.atStart = not self.stack and (self.index == 0)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | Cursor.previous | python | def previous(self):
if self.index - 1 >= 0: # more main line?
self.index = self.index - 1
elif self.stack: # were we in a variation?
self.gametree = self.stack.pop()
self.index = len(self.gametree) - 1
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum - 1
self._setChildren()
self._setFlags()
return self.node | Moves the 'Cursor' to & returns the previous 'Node'. Raises
'GameTreeEndError' if the start of a branch is exceeded. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L548-L562 | [
"def _setChildren(self):\n\t\"\"\" Sets up 'self.children'.\"\"\"\n\tif self.index + 1 < len(self.gametree):\n\t\tself.children = [self.gametree[self.index+1]]\n\telse:\n\t\tself.children = map(lambda list: list[0], self.gametree.variations)\n",
"def _setFlags(self):\n\t\"\"\" Sets up the flags 'self.atEnd' and 'self.atStart'.\"\"\"\n\tself.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))\n\tself.atStart = not self.stack and (self.index == 0)\n"
] | class Cursor:
"""
'GameTree' navigation tool. Instance attributes:
- self.game : 'GameTree' -- The root 'GameTree'.
- self.gametree : 'GameTree' -- The current 'GameTree'.
- self.node : 'Node' -- The current Node.
- self.nodenum : integer -- The offset of 'self.node' from the root of
'self.game'. The nodenum of the root node is 0.
- self.index : integer -- The offset of 'self.node' within 'self.gametree'.
- self.stack : list of 'GameTree' -- A record of 'GameTree''s traversed.
- self.children : list of 'Node' -- All child nodes of the current node.
- self.atEnd : boolean -- Flags if we are at the end of a branch.
- self.atStart : boolean -- Flags if we are at the start of the game."""
def __init__(self, gametree):
""" Initialize root 'GameTree' and instance variables."""
self.game = gametree # root GameTree
self.reset()
def reset(self):
""" Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'."""
self.gametree = self.game
self.nodenum = 0
self.index = 0
self.stack = []
self.node = self.gametree[self.index]
self._setChildren()
self._setFlags()
def next(self, varnum=0):
"""
Moves the 'Cursor' to & returns the next 'Node'. Raises
'GameTreeEndError' if the end of a branch is exceeded. Raises
'GameTreeNavigationError' if a non-existent variation is accessed.
Argument:
- varnum : integer, default 0 -- Variation number. Non-zero only
valid at a branching, where variations exist."""
if self.index + 1 < len(self.gametree): # more main line?
if varnum != 0:
raise GameTreeNavigationError("Nonexistent variation.")
self.index = self.index + 1
elif self.gametree.variations: # variations exist?
if varnum < len(self.gametree.variations):
self.stack.append(self.gametree)
self.gametree = self.gametree.variations[varnum]
self.index = 0
else:
raise GameTreeNavigationError("Nonexistent variation.")
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum + 1
self._setChildren()
self._setFlags()
return self.node
def _setChildren(self):
""" Sets up 'self.children'."""
if self.index + 1 < len(self.gametree):
self.children = [self.gametree[self.index+1]]
else:
self.children = map(lambda list: list[0], self.gametree.variations)
def _setFlags(self):
""" Sets up the flags 'self.atEnd' and 'self.atStart'."""
self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))
self.atStart = not self.stack and (self.index == 0)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | Cursor._setChildren | python | def _setChildren(self):
if self.index + 1 < len(self.gametree):
self.children = [self.gametree[self.index+1]]
else:
self.children = map(lambda list: list[0], self.gametree.variations) | Sets up 'self.children'. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L564-L569 | null | class Cursor:
"""
'GameTree' navigation tool. Instance attributes:
- self.game : 'GameTree' -- The root 'GameTree'.
- self.gametree : 'GameTree' -- The current 'GameTree'.
- self.node : 'Node' -- The current Node.
- self.nodenum : integer -- The offset of 'self.node' from the root of
'self.game'. The nodenum of the root node is 0.
- self.index : integer -- The offset of 'self.node' within 'self.gametree'.
- self.stack : list of 'GameTree' -- A record of 'GameTree''s traversed.
- self.children : list of 'Node' -- All child nodes of the current node.
- self.atEnd : boolean -- Flags if we are at the end of a branch.
- self.atStart : boolean -- Flags if we are at the start of the game."""
def __init__(self, gametree):
""" Initialize root 'GameTree' and instance variables."""
self.game = gametree # root GameTree
self.reset()
def reset(self):
""" Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'."""
self.gametree = self.game
self.nodenum = 0
self.index = 0
self.stack = []
self.node = self.gametree[self.index]
self._setChildren()
self._setFlags()
def next(self, varnum=0):
"""
Moves the 'Cursor' to & returns the next 'Node'. Raises
'GameTreeEndError' if the end of a branch is exceeded. Raises
'GameTreeNavigationError' if a non-existent variation is accessed.
Argument:
- varnum : integer, default 0 -- Variation number. Non-zero only
valid at a branching, where variations exist."""
if self.index + 1 < len(self.gametree): # more main line?
if varnum != 0:
raise GameTreeNavigationError("Nonexistent variation.")
self.index = self.index + 1
elif self.gametree.variations: # variations exist?
if varnum < len(self.gametree.variations):
self.stack.append(self.gametree)
self.gametree = self.gametree.variations[varnum]
self.index = 0
else:
raise GameTreeNavigationError("Nonexistent variation.")
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum + 1
self._setChildren()
self._setFlags()
return self.node
def previous(self):
""" Moves the 'Cursor' to & returns the previous 'Node'. Raises
'GameTreeEndError' if the start of a branch is exceeded."""
if self.index - 1 >= 0: # more main line?
self.index = self.index - 1
elif self.stack: # were we in a variation?
self.gametree = self.stack.pop()
self.index = len(self.gametree) - 1
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum - 1
self._setChildren()
self._setFlags()
return self.node
def _setFlags(self):
""" Sets up the flags 'self.atEnd' and 'self.atStart'."""
self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))
self.atStart = not self.stack and (self.index == 0)
|
openai/pachi-py | pachi_py/pachi/tools/sgflib/sgflib.py | Cursor._setFlags | python | def _setFlags(self):
self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree))
self.atStart = not self.stack and (self.index == 0) | Sets up the flags 'self.atEnd' and 'self.atStart'. | train | https://github.com/openai/pachi-py/blob/65f29fdd28747d34f2c3001f4016913e4aaeb8fc/pachi_py/pachi/tools/sgflib/sgflib.py#L571-L574 | null | class Cursor:
"""
'GameTree' navigation tool. Instance attributes:
- self.game : 'GameTree' -- The root 'GameTree'.
- self.gametree : 'GameTree' -- The current 'GameTree'.
- self.node : 'Node' -- The current Node.
- self.nodenum : integer -- The offset of 'self.node' from the root of
'self.game'. The nodenum of the root node is 0.
- self.index : integer -- The offset of 'self.node' within 'self.gametree'.
- self.stack : list of 'GameTree' -- A record of 'GameTree''s traversed.
- self.children : list of 'Node' -- All child nodes of the current node.
- self.atEnd : boolean -- Flags if we are at the end of a branch.
- self.atStart : boolean -- Flags if we are at the start of the game."""
def __init__(self, gametree):
""" Initialize root 'GameTree' and instance variables."""
self.game = gametree # root GameTree
self.reset()
def reset(self):
""" Set 'Cursor' to point to the start of the root 'GameTree', 'self.game'."""
self.gametree = self.game
self.nodenum = 0
self.index = 0
self.stack = []
self.node = self.gametree[self.index]
self._setChildren()
self._setFlags()
def next(self, varnum=0):
"""
Moves the 'Cursor' to & returns the next 'Node'. Raises
'GameTreeEndError' if the end of a branch is exceeded. Raises
'GameTreeNavigationError' if a non-existent variation is accessed.
Argument:
- varnum : integer, default 0 -- Variation number. Non-zero only
valid at a branching, where variations exist."""
if self.index + 1 < len(self.gametree): # more main line?
if varnum != 0:
raise GameTreeNavigationError("Nonexistent variation.")
self.index = self.index + 1
elif self.gametree.variations: # variations exist?
if varnum < len(self.gametree.variations):
self.stack.append(self.gametree)
self.gametree = self.gametree.variations[varnum]
self.index = 0
else:
raise GameTreeNavigationError("Nonexistent variation.")
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum + 1
self._setChildren()
self._setFlags()
return self.node
def previous(self):
""" Moves the 'Cursor' to & returns the previous 'Node'. Raises
'GameTreeEndError' if the start of a branch is exceeded."""
if self.index - 1 >= 0: # more main line?
self.index = self.index - 1
elif self.stack: # were we in a variation?
self.gametree = self.stack.pop()
self.index = len(self.gametree) - 1
else:
raise GameTreeEndError
self.node = self.gametree[self.index]
self.nodenum = self.nodenum - 1
self._setChildren()
self._setFlags()
return self.node
def _setChildren(self):
""" Sets up 'self.children'."""
if self.index + 1 < len(self.gametree):
self.children = [self.gametree[self.index+1]]
else:
self.children = map(lambda list: list[0], self.gametree.variations)
|
EpistasisLab/scikit-rebate | skrebate/relieff.py | ReliefF.fit | python | def fit(self, X, y):
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self | Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance | train | https://github.com/EpistasisLab/scikit-rebate/blob/67dab51a7525fa5d076b059f1e6f8cff7481c1ef/skrebate/relieff.py#L87-L214 | null | class ReliefF(BaseEstimator):
"""Feature selection using data-mined expert knowledge.
Based on the ReliefF algorithm as introduced in:
Igor et al. Overcoming the myopia of inductive learning
algorithms with RELIEFF (1997), Applied Intelligence, 7(1), p39-55"""
"""Note that ReliefF class establishes core functionality that is inherited by all other Relief-based algorithms.
Assumes: * There are no missing values in the label/outcome/dependent variable.
* For ReliefF, the setting of k is <= to the number of instances that have the least frequent class label
(binary and multiclass endpoint data. """
def __init__(self, n_features_to_select=10, n_neighbors=100, discrete_threshold=10, verbose=False, n_jobs=1):
"""Sets up ReliefF to perform feature selection. Note that an approximation of the original 'Relief'
algorithm may be run by setting 'n_features_to_select' to 1. Also note that the original Relief parameter 'm'
is not included in this software. 'm' specifies the number of random training instances out of 'n' (total
training instances) used to update feature scores. Since scores are most representative when m=n, all
available training instances are utilized in all Relief-based algorithm score updates here. If the user
wishes to utilize a smaller 'm' in Relief-based scoring, simply pass any of these algorithms a subset of the
original training dataset samples.
Parameters
----------
n_features_to_select: int (default: 10)
the number of top features (according to the relieff score) to
retain after feature selection is applied.
n_neighbors: int or float (default: 100)
The number of neighbors to consider when assigning feature
importance scores. If a float number is provided, that percentage of
training samples is used as the number of neighbors.
More neighbors results in more accurate scores, but takes longer.
discrete_threshold: int (default: 10)
Value used to determine if a feature is discrete or continuous.
If the number of unique levels in a feature is > discrete_threshold, then it is
considered continuous, or discrete otherwise.
verbose: bool (default: False)
If True, output timing of distance array and scoring
n_jobs: int (default: 1)
The number of cores to dedicate to computing the scores with joblib.
Assigning this parameter to -1 will dedicate as many cores as are available on your system.
We recommend setting this parameter to -1 to speed up the algorithm as much as possible.
"""
self.n_features_to_select = n_features_to_select
self.n_neighbors = n_neighbors
self.discrete_threshold = discrete_threshold
self.verbose = verbose
self.n_jobs = n_jobs
#=========================================================================#
def fit(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance
"""
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
#=========================================================================#
def transform(self, X):
"""Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]]
#=========================================================================#
def fit_transform(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data, then reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
self.fit(X, y)
return self.transform(X)
######################### SUPPORTING FUNCTIONS ###########################
def _getMultiClassMap(self):
""" Relief algorithms handle the scoring updates a little differently for data with multiclass outcomes. In ReBATE we implement multiclass scoring in line with
the strategy described by Kononenko 1994 within the RELIEF-F variant which was suggested to outperform the RELIEF-E multiclass variant. This strategy weights
score updates derived from misses of different classes by the class frequency observed in the training data. 'The idea is that the algorithm should estimate the
ability of attributes to separate each pair of classes regardless of which two classes are closest to each other'. In this method we prepare for this normalization
by creating a class dictionary, and storing respective class frequencies. This is needed for ReliefF multiclass score update normalizations. """
mcmap = dict()
for i in range(self._datalen):
if(self._y[i] not in mcmap):
mcmap[self._y[i]] = 0
else:
mcmap[self._y[i]] += 1
for each in self._label_list:
mcmap[each] = mcmap[each]/float(self._datalen)
return mcmap
def _get_attribute_info(self):
""" Preprocess the training dataset to identify which features/attributes are discrete vs. continuous valued. Ignores missing values in this determination."""
attr = dict()
d = 0
limit = self.discrete_threshold
w = self._X.transpose()
for idx in range(len(w)):
h = self._headers[idx]
z = w[idx]
if self._missing_data_count > 0:
z = z[np.logical_not(np.isnan(z))] # Exclude any missing values from consideration
zlen = len(np.unique(z))
if zlen <= limit:
attr[h] = ('discrete', 0, 0, 0, 0)
d += 1
else:
mx = np.max(z)
mn = np.min(z)
sd = np.std(z)
attr[h] = ('continuous', mx, mn, mx - mn, sd)
# For each feature/attribute we store (type, max value, min value, max min difference, average, standard deviation) - the latter three values are set to zero if feature is discrete.
return attr
def _distarray_no_missing(self, xc, xd):
"""Distance array calculation for data with no missing values. The 'pdist() function outputs a condense distance array, and squareform() converts this vector-form
distance vector to a square-form, redundant distance matrix.
*This could be a target for saving memory in the future, by not needing to expand to the redundant square-form matrix. """
from scipy.spatial.distance import pdist, squareform
#------------------------------------------#
def pre_normalize(x):
"""Normalizes continuous features so they are in the same range (0 to 1)"""
idx = 0
# goes through all named features (doesn really need to) this method is only applied to continuous features
for i in sorted(self.attr.keys()):
if self.attr[i][0] == 'discrete':
continue
cmin = self.attr[i][2]
diff = self.attr[i][3]
x[:, idx] -= cmin
x[:, idx] /= diff
idx += 1
return x
#------------------------------------------#
if self.data_type == 'discrete': # discrete features only
return squareform(pdist(self._X, metric='hamming'))
elif self.data_type == 'mixed': # mix of discrete and continuous features
d_dist = squareform(pdist(xd, metric='hamming'))
# Cityblock is also known as Manhattan distance
c_dist = squareform(pdist(pre_normalize(xc), metric='cityblock'))
return np.add(d_dist, c_dist) / self._num_attributes
else: #continuous features only
#xc = pre_normalize(xc)
return squareform(pdist(pre_normalize(xc), metric='cityblock'))
#==================================================================#
def _dtype_array(self):
"""Return mask for discrete(0)/continuous(1) attributes and their indices. Return array of max/min diffs of attributes."""
attrtype = []
attrdiff = []
for key in self._headers:
if self.attr[key][0] == 'continuous':
attrtype.append(1)
else:
attrtype.append(0)
attrdiff.append(self.attr[key][3])
attrtype = np.array(attrtype)
cidx = np.where(attrtype == 1)[0]
didx = np.where(attrtype == 0)[0]
attrdiff = np.array(attrdiff)
return attrdiff, cidx, didx
#==================================================================#
def _distarray_missing(self, xc, xd, cdiffs):
"""Distance array calculation for data with missing values"""
cindices = []
dindices = []
# Get Boolean mask locating missing values for continuous and discrete features separately. These correspond to xc and xd respectively.
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
# For each instance calculate distance from all other instances (in non-redundant manner) (i.e. computes triangle, and puts zeros in for rest to form square).
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array)
#==================================================================#
############################# ReliefF ############################################
def _find_neighbors(self, inst):
""" Identify k nearest hits and k nearest misses for given instance. This is accomplished differently based on the type of endpoint (i.e. binary, multiclass, and continuous). """
# Make a vector of distances between target instance (inst) and all others
dist_vect = []
for j in range(self._datalen):
if inst != j:
locator = [inst, j]
if inst < j:
locator.reverse()
dist_vect.append(self._distance_array[locator[0]][locator[1]])
else:
# Ensures that target instance is never selected as neighbor.
dist_vect.append(sys.maxsize)
dist_vect = np.array(dist_vect)
# Identify neighbors-------------------------------------------------------
""" NN for Binary Endpoints: """
if self._class_type == 'binary':
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
elif self._class_type == 'multiclass':
nn_list = []
match_count = 0
miss_count = dict.fromkeys(self._label_list, 0)
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else:
for label in self._label_list:
if self._y[nn_index] == label:
if miss_count[label] >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count[label] += 1
if match_count >= self.n_neighbors and all(v >= self.n_neighbors for v in miss_count.values()):
break
else:
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if abs(self._y[inst]-self._y[nn_index]) < self._labels_std: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
return np.array(nn_list)
def _run_algorithm(self):
""" Runs nearest neighbor (NN) identification and feature scoring to yield ReliefF scores. """
# Find nearest neighbors
NNlist = map(self._find_neighbors, range(self._datalen))
# Feature scoring - using identified nearest neighbors
nan_entries = np.isnan(self._X) # boolean mask for missing data values
# Call the scoring method for the ReliefF algorithm
scores = np.sum(Parallel(n_jobs=self.n_jobs)(delayed(
ReliefF_compute_scores)(instance_num, self.attr, nan_entries, self._num_attributes, self.mcmap,
NN, self._headers, self._class_type, self._X, self._y, self._labels_std, self.data_type)
for instance_num, NN in zip(range(self._datalen), NNlist)), axis=0)
return np.array(scores)
|
EpistasisLab/scikit-rebate | skrebate/relieff.py | ReliefF.transform | python | def transform(self, X):
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]] | Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix | train | https://github.com/EpistasisLab/scikit-rebate/blob/67dab51a7525fa5d076b059f1e6f8cff7481c1ef/skrebate/relieff.py#L217-L234 | null | class ReliefF(BaseEstimator):
"""Feature selection using data-mined expert knowledge.
Based on the ReliefF algorithm as introduced in:
Igor et al. Overcoming the myopia of inductive learning
algorithms with RELIEFF (1997), Applied Intelligence, 7(1), p39-55"""
"""Note that ReliefF class establishes core functionality that is inherited by all other Relief-based algorithms.
Assumes: * There are no missing values in the label/outcome/dependent variable.
* For ReliefF, the setting of k is <= to the number of instances that have the least frequent class label
(binary and multiclass endpoint data. """
def __init__(self, n_features_to_select=10, n_neighbors=100, discrete_threshold=10, verbose=False, n_jobs=1):
"""Sets up ReliefF to perform feature selection. Note that an approximation of the original 'Relief'
algorithm may be run by setting 'n_features_to_select' to 1. Also note that the original Relief parameter 'm'
is not included in this software. 'm' specifies the number of random training instances out of 'n' (total
training instances) used to update feature scores. Since scores are most representative when m=n, all
available training instances are utilized in all Relief-based algorithm score updates here. If the user
wishes to utilize a smaller 'm' in Relief-based scoring, simply pass any of these algorithms a subset of the
original training dataset samples.
Parameters
----------
n_features_to_select: int (default: 10)
the number of top features (according to the relieff score) to
retain after feature selection is applied.
n_neighbors: int or float (default: 100)
The number of neighbors to consider when assigning feature
importance scores. If a float number is provided, that percentage of
training samples is used as the number of neighbors.
More neighbors results in more accurate scores, but takes longer.
discrete_threshold: int (default: 10)
Value used to determine if a feature is discrete or continuous.
If the number of unique levels in a feature is > discrete_threshold, then it is
considered continuous, or discrete otherwise.
verbose: bool (default: False)
If True, output timing of distance array and scoring
n_jobs: int (default: 1)
The number of cores to dedicate to computing the scores with joblib.
Assigning this parameter to -1 will dedicate as many cores as are available on your system.
We recommend setting this parameter to -1 to speed up the algorithm as much as possible.
"""
self.n_features_to_select = n_features_to_select
self.n_neighbors = n_neighbors
self.discrete_threshold = discrete_threshold
self.verbose = verbose
self.n_jobs = n_jobs
#=========================================================================#
def fit(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance
"""
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
#=========================================================================#
def transform(self, X):
"""Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]]
#=========================================================================#
def fit_transform(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data, then reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
self.fit(X, y)
return self.transform(X)
######################### SUPPORTING FUNCTIONS ###########################
def _getMultiClassMap(self):
""" Relief algorithms handle the scoring updates a little differently for data with multiclass outcomes. In ReBATE we implement multiclass scoring in line with
the strategy described by Kononenko 1994 within the RELIEF-F variant which was suggested to outperform the RELIEF-E multiclass variant. This strategy weights
score updates derived from misses of different classes by the class frequency observed in the training data. 'The idea is that the algorithm should estimate the
ability of attributes to separate each pair of classes regardless of which two classes are closest to each other'. In this method we prepare for this normalization
by creating a class dictionary, and storing respective class frequencies. This is needed for ReliefF multiclass score update normalizations. """
mcmap = dict()
for i in range(self._datalen):
if(self._y[i] not in mcmap):
mcmap[self._y[i]] = 0
else:
mcmap[self._y[i]] += 1
for each in self._label_list:
mcmap[each] = mcmap[each]/float(self._datalen)
return mcmap
def _get_attribute_info(self):
""" Preprocess the training dataset to identify which features/attributes are discrete vs. continuous valued. Ignores missing values in this determination."""
attr = dict()
d = 0
limit = self.discrete_threshold
w = self._X.transpose()
for idx in range(len(w)):
h = self._headers[idx]
z = w[idx]
if self._missing_data_count > 0:
z = z[np.logical_not(np.isnan(z))] # Exclude any missing values from consideration
zlen = len(np.unique(z))
if zlen <= limit:
attr[h] = ('discrete', 0, 0, 0, 0)
d += 1
else:
mx = np.max(z)
mn = np.min(z)
sd = np.std(z)
attr[h] = ('continuous', mx, mn, mx - mn, sd)
# For each feature/attribute we store (type, max value, min value, max min difference, average, standard deviation) - the latter three values are set to zero if feature is discrete.
return attr
def _distarray_no_missing(self, xc, xd):
"""Distance array calculation for data with no missing values. The 'pdist() function outputs a condense distance array, and squareform() converts this vector-form
distance vector to a square-form, redundant distance matrix.
*This could be a target for saving memory in the future, by not needing to expand to the redundant square-form matrix. """
from scipy.spatial.distance import pdist, squareform
#------------------------------------------#
def pre_normalize(x):
"""Normalizes continuous features so they are in the same range (0 to 1)"""
idx = 0
# goes through all named features (doesn really need to) this method is only applied to continuous features
for i in sorted(self.attr.keys()):
if self.attr[i][0] == 'discrete':
continue
cmin = self.attr[i][2]
diff = self.attr[i][3]
x[:, idx] -= cmin
x[:, idx] /= diff
idx += 1
return x
#------------------------------------------#
if self.data_type == 'discrete': # discrete features only
return squareform(pdist(self._X, metric='hamming'))
elif self.data_type == 'mixed': # mix of discrete and continuous features
d_dist = squareform(pdist(xd, metric='hamming'))
# Cityblock is also known as Manhattan distance
c_dist = squareform(pdist(pre_normalize(xc), metric='cityblock'))
return np.add(d_dist, c_dist) / self._num_attributes
else: #continuous features only
#xc = pre_normalize(xc)
return squareform(pdist(pre_normalize(xc), metric='cityblock'))
#==================================================================#
def _dtype_array(self):
"""Return mask for discrete(0)/continuous(1) attributes and their indices. Return array of max/min diffs of attributes."""
attrtype = []
attrdiff = []
for key in self._headers:
if self.attr[key][0] == 'continuous':
attrtype.append(1)
else:
attrtype.append(0)
attrdiff.append(self.attr[key][3])
attrtype = np.array(attrtype)
cidx = np.where(attrtype == 1)[0]
didx = np.where(attrtype == 0)[0]
attrdiff = np.array(attrdiff)
return attrdiff, cidx, didx
#==================================================================#
def _distarray_missing(self, xc, xd, cdiffs):
"""Distance array calculation for data with missing values"""
cindices = []
dindices = []
# Get Boolean mask locating missing values for continuous and discrete features separately. These correspond to xc and xd respectively.
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
# For each instance calculate distance from all other instances (in non-redundant manner) (i.e. computes triangle, and puts zeros in for rest to form square).
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array)
#==================================================================#
############################# ReliefF ############################################
def _find_neighbors(self, inst):
""" Identify k nearest hits and k nearest misses for given instance. This is accomplished differently based on the type of endpoint (i.e. binary, multiclass, and continuous). """
# Make a vector of distances between target instance (inst) and all others
dist_vect = []
for j in range(self._datalen):
if inst != j:
locator = [inst, j]
if inst < j:
locator.reverse()
dist_vect.append(self._distance_array[locator[0]][locator[1]])
else:
# Ensures that target instance is never selected as neighbor.
dist_vect.append(sys.maxsize)
dist_vect = np.array(dist_vect)
# Identify neighbors-------------------------------------------------------
""" NN for Binary Endpoints: """
if self._class_type == 'binary':
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
elif self._class_type == 'multiclass':
nn_list = []
match_count = 0
miss_count = dict.fromkeys(self._label_list, 0)
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else:
for label in self._label_list:
if self._y[nn_index] == label:
if miss_count[label] >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count[label] += 1
if match_count >= self.n_neighbors and all(v >= self.n_neighbors for v in miss_count.values()):
break
else:
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if abs(self._y[inst]-self._y[nn_index]) < self._labels_std: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
return np.array(nn_list)
def _run_algorithm(self):
""" Runs nearest neighbor (NN) identification and feature scoring to yield ReliefF scores. """
# Find nearest neighbors
NNlist = map(self._find_neighbors, range(self._datalen))
# Feature scoring - using identified nearest neighbors
nan_entries = np.isnan(self._X) # boolean mask for missing data values
# Call the scoring method for the ReliefF algorithm
scores = np.sum(Parallel(n_jobs=self.n_jobs)(delayed(
ReliefF_compute_scores)(instance_num, self.attr, nan_entries, self._num_attributes, self.mcmap,
NN, self._headers, self._class_type, self._X, self._y, self._labels_std, self.data_type)
for instance_num, NN in zip(range(self._datalen), NNlist)), axis=0)
return np.array(scores)
|
EpistasisLab/scikit-rebate | skrebate/relieff.py | ReliefF._getMultiClassMap | python | def _getMultiClassMap(self):
mcmap = dict()
for i in range(self._datalen):
if(self._y[i] not in mcmap):
mcmap[self._y[i]] = 0
else:
mcmap[self._y[i]] += 1
for each in self._label_list:
mcmap[each] = mcmap[each]/float(self._datalen)
return mcmap | Relief algorithms handle the scoring updates a little differently for data with multiclass outcomes. In ReBATE we implement multiclass scoring in line with
the strategy described by Kononenko 1994 within the RELIEF-F variant which was suggested to outperform the RELIEF-E multiclass variant. This strategy weights
score updates derived from misses of different classes by the class frequency observed in the training data. 'The idea is that the algorithm should estimate the
ability of attributes to separate each pair of classes regardless of which two classes are closest to each other'. In this method we prepare for this normalization
by creating a class dictionary, and storing respective class frequencies. This is needed for ReliefF multiclass score update normalizations. | train | https://github.com/EpistasisLab/scikit-rebate/blob/67dab51a7525fa5d076b059f1e6f8cff7481c1ef/skrebate/relieff.py#L258-L275 | null | class ReliefF(BaseEstimator):
"""Feature selection using data-mined expert knowledge.
Based on the ReliefF algorithm as introduced in:
Igor et al. Overcoming the myopia of inductive learning
algorithms with RELIEFF (1997), Applied Intelligence, 7(1), p39-55"""
"""Note that ReliefF class establishes core functionality that is inherited by all other Relief-based algorithms.
Assumes: * There are no missing values in the label/outcome/dependent variable.
* For ReliefF, the setting of k is <= to the number of instances that have the least frequent class label
(binary and multiclass endpoint data. """
def __init__(self, n_features_to_select=10, n_neighbors=100, discrete_threshold=10, verbose=False, n_jobs=1):
"""Sets up ReliefF to perform feature selection. Note that an approximation of the original 'Relief'
algorithm may be run by setting 'n_features_to_select' to 1. Also note that the original Relief parameter 'm'
is not included in this software. 'm' specifies the number of random training instances out of 'n' (total
training instances) used to update feature scores. Since scores are most representative when m=n, all
available training instances are utilized in all Relief-based algorithm score updates here. If the user
wishes to utilize a smaller 'm' in Relief-based scoring, simply pass any of these algorithms a subset of the
original training dataset samples.
Parameters
----------
n_features_to_select: int (default: 10)
the number of top features (according to the relieff score) to
retain after feature selection is applied.
n_neighbors: int or float (default: 100)
The number of neighbors to consider when assigning feature
importance scores. If a float number is provided, that percentage of
training samples is used as the number of neighbors.
More neighbors results in more accurate scores, but takes longer.
discrete_threshold: int (default: 10)
Value used to determine if a feature is discrete or continuous.
If the number of unique levels in a feature is > discrete_threshold, then it is
considered continuous, or discrete otherwise.
verbose: bool (default: False)
If True, output timing of distance array and scoring
n_jobs: int (default: 1)
The number of cores to dedicate to computing the scores with joblib.
Assigning this parameter to -1 will dedicate as many cores as are available on your system.
We recommend setting this parameter to -1 to speed up the algorithm as much as possible.
"""
self.n_features_to_select = n_features_to_select
self.n_neighbors = n_neighbors
self.discrete_threshold = discrete_threshold
self.verbose = verbose
self.n_jobs = n_jobs
#=========================================================================#
def fit(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance
"""
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
#=========================================================================#
def transform(self, X):
"""Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]]
#=========================================================================#
def fit_transform(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data, then reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
self.fit(X, y)
return self.transform(X)
######################### SUPPORTING FUNCTIONS ###########################
def _get_attribute_info(self):
""" Preprocess the training dataset to identify which features/attributes are discrete vs. continuous valued. Ignores missing values in this determination."""
attr = dict()
d = 0
limit = self.discrete_threshold
w = self._X.transpose()
for idx in range(len(w)):
h = self._headers[idx]
z = w[idx]
if self._missing_data_count > 0:
z = z[np.logical_not(np.isnan(z))] # Exclude any missing values from consideration
zlen = len(np.unique(z))
if zlen <= limit:
attr[h] = ('discrete', 0, 0, 0, 0)
d += 1
else:
mx = np.max(z)
mn = np.min(z)
sd = np.std(z)
attr[h] = ('continuous', mx, mn, mx - mn, sd)
# For each feature/attribute we store (type, max value, min value, max min difference, average, standard deviation) - the latter three values are set to zero if feature is discrete.
return attr
def _distarray_no_missing(self, xc, xd):
"""Distance array calculation for data with no missing values. The 'pdist() function outputs a condense distance array, and squareform() converts this vector-form
distance vector to a square-form, redundant distance matrix.
*This could be a target for saving memory in the future, by not needing to expand to the redundant square-form matrix. """
from scipy.spatial.distance import pdist, squareform
#------------------------------------------#
def pre_normalize(x):
"""Normalizes continuous features so they are in the same range (0 to 1)"""
idx = 0
# goes through all named features (doesn really need to) this method is only applied to continuous features
for i in sorted(self.attr.keys()):
if self.attr[i][0] == 'discrete':
continue
cmin = self.attr[i][2]
diff = self.attr[i][3]
x[:, idx] -= cmin
x[:, idx] /= diff
idx += 1
return x
#------------------------------------------#
if self.data_type == 'discrete': # discrete features only
return squareform(pdist(self._X, metric='hamming'))
elif self.data_type == 'mixed': # mix of discrete and continuous features
d_dist = squareform(pdist(xd, metric='hamming'))
# Cityblock is also known as Manhattan distance
c_dist = squareform(pdist(pre_normalize(xc), metric='cityblock'))
return np.add(d_dist, c_dist) / self._num_attributes
else: #continuous features only
#xc = pre_normalize(xc)
return squareform(pdist(pre_normalize(xc), metric='cityblock'))
#==================================================================#
def _dtype_array(self):
"""Return mask for discrete(0)/continuous(1) attributes and their indices. Return array of max/min diffs of attributes."""
attrtype = []
attrdiff = []
for key in self._headers:
if self.attr[key][0] == 'continuous':
attrtype.append(1)
else:
attrtype.append(0)
attrdiff.append(self.attr[key][3])
attrtype = np.array(attrtype)
cidx = np.where(attrtype == 1)[0]
didx = np.where(attrtype == 0)[0]
attrdiff = np.array(attrdiff)
return attrdiff, cidx, didx
#==================================================================#
def _distarray_missing(self, xc, xd, cdiffs):
"""Distance array calculation for data with missing values"""
cindices = []
dindices = []
# Get Boolean mask locating missing values for continuous and discrete features separately. These correspond to xc and xd respectively.
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
# For each instance calculate distance from all other instances (in non-redundant manner) (i.e. computes triangle, and puts zeros in for rest to form square).
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array)
#==================================================================#
############################# ReliefF ############################################
def _find_neighbors(self, inst):
""" Identify k nearest hits and k nearest misses for given instance. This is accomplished differently based on the type of endpoint (i.e. binary, multiclass, and continuous). """
# Make a vector of distances between target instance (inst) and all others
dist_vect = []
for j in range(self._datalen):
if inst != j:
locator = [inst, j]
if inst < j:
locator.reverse()
dist_vect.append(self._distance_array[locator[0]][locator[1]])
else:
# Ensures that target instance is never selected as neighbor.
dist_vect.append(sys.maxsize)
dist_vect = np.array(dist_vect)
# Identify neighbors-------------------------------------------------------
""" NN for Binary Endpoints: """
if self._class_type == 'binary':
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
elif self._class_type == 'multiclass':
nn_list = []
match_count = 0
miss_count = dict.fromkeys(self._label_list, 0)
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else:
for label in self._label_list:
if self._y[nn_index] == label:
if miss_count[label] >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count[label] += 1
if match_count >= self.n_neighbors and all(v >= self.n_neighbors for v in miss_count.values()):
break
else:
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if abs(self._y[inst]-self._y[nn_index]) < self._labels_std: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
return np.array(nn_list)
def _run_algorithm(self):
""" Runs nearest neighbor (NN) identification and feature scoring to yield ReliefF scores. """
# Find nearest neighbors
NNlist = map(self._find_neighbors, range(self._datalen))
# Feature scoring - using identified nearest neighbors
nan_entries = np.isnan(self._X) # boolean mask for missing data values
# Call the scoring method for the ReliefF algorithm
scores = np.sum(Parallel(n_jobs=self.n_jobs)(delayed(
ReliefF_compute_scores)(instance_num, self.attr, nan_entries, self._num_attributes, self.mcmap,
NN, self._headers, self._class_type, self._X, self._y, self._labels_std, self.data_type)
for instance_num, NN in zip(range(self._datalen), NNlist)), axis=0)
return np.array(scores)
|
EpistasisLab/scikit-rebate | skrebate/relieff.py | ReliefF._get_attribute_info | python | def _get_attribute_info(self):
attr = dict()
d = 0
limit = self.discrete_threshold
w = self._X.transpose()
for idx in range(len(w)):
h = self._headers[idx]
z = w[idx]
if self._missing_data_count > 0:
z = z[np.logical_not(np.isnan(z))] # Exclude any missing values from consideration
zlen = len(np.unique(z))
if zlen <= limit:
attr[h] = ('discrete', 0, 0, 0, 0)
d += 1
else:
mx = np.max(z)
mn = np.min(z)
sd = np.std(z)
attr[h] = ('continuous', mx, mn, mx - mn, sd)
# For each feature/attribute we store (type, max value, min value, max min difference, average, standard deviation) - the latter three values are set to zero if feature is discrete.
return attr | Preprocess the training dataset to identify which features/attributes are discrete vs. continuous valued. Ignores missing values in this determination. | train | https://github.com/EpistasisLab/scikit-rebate/blob/67dab51a7525fa5d076b059f1e6f8cff7481c1ef/skrebate/relieff.py#L277-L299 | null | class ReliefF(BaseEstimator):
"""Feature selection using data-mined expert knowledge.
Based on the ReliefF algorithm as introduced in:
Igor et al. Overcoming the myopia of inductive learning
algorithms with RELIEFF (1997), Applied Intelligence, 7(1), p39-55"""
"""Note that ReliefF class establishes core functionality that is inherited by all other Relief-based algorithms.
Assumes: * There are no missing values in the label/outcome/dependent variable.
* For ReliefF, the setting of k is <= to the number of instances that have the least frequent class label
(binary and multiclass endpoint data. """
def __init__(self, n_features_to_select=10, n_neighbors=100, discrete_threshold=10, verbose=False, n_jobs=1):
"""Sets up ReliefF to perform feature selection. Note that an approximation of the original 'Relief'
algorithm may be run by setting 'n_features_to_select' to 1. Also note that the original Relief parameter 'm'
is not included in this software. 'm' specifies the number of random training instances out of 'n' (total
training instances) used to update feature scores. Since scores are most representative when m=n, all
available training instances are utilized in all Relief-based algorithm score updates here. If the user
wishes to utilize a smaller 'm' in Relief-based scoring, simply pass any of these algorithms a subset of the
original training dataset samples.
Parameters
----------
n_features_to_select: int (default: 10)
the number of top features (according to the relieff score) to
retain after feature selection is applied.
n_neighbors: int or float (default: 100)
The number of neighbors to consider when assigning feature
importance scores. If a float number is provided, that percentage of
training samples is used as the number of neighbors.
More neighbors results in more accurate scores, but takes longer.
discrete_threshold: int (default: 10)
Value used to determine if a feature is discrete or continuous.
If the number of unique levels in a feature is > discrete_threshold, then it is
considered continuous, or discrete otherwise.
verbose: bool (default: False)
If True, output timing of distance array and scoring
n_jobs: int (default: 1)
The number of cores to dedicate to computing the scores with joblib.
Assigning this parameter to -1 will dedicate as many cores as are available on your system.
We recommend setting this parameter to -1 to speed up the algorithm as much as possible.
"""
self.n_features_to_select = n_features_to_select
self.n_neighbors = n_neighbors
self.discrete_threshold = discrete_threshold
self.verbose = verbose
self.n_jobs = n_jobs
#=========================================================================#
def fit(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance
"""
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
#=========================================================================#
def transform(self, X):
"""Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]]
#=========================================================================#
def fit_transform(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data, then reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
self.fit(X, y)
return self.transform(X)
######################### SUPPORTING FUNCTIONS ###########################
def _getMultiClassMap(self):
""" Relief algorithms handle the scoring updates a little differently for data with multiclass outcomes. In ReBATE we implement multiclass scoring in line with
the strategy described by Kononenko 1994 within the RELIEF-F variant which was suggested to outperform the RELIEF-E multiclass variant. This strategy weights
score updates derived from misses of different classes by the class frequency observed in the training data. 'The idea is that the algorithm should estimate the
ability of attributes to separate each pair of classes regardless of which two classes are closest to each other'. In this method we prepare for this normalization
by creating a class dictionary, and storing respective class frequencies. This is needed for ReliefF multiclass score update normalizations. """
mcmap = dict()
for i in range(self._datalen):
if(self._y[i] not in mcmap):
mcmap[self._y[i]] = 0
else:
mcmap[self._y[i]] += 1
for each in self._label_list:
mcmap[each] = mcmap[each]/float(self._datalen)
return mcmap
def _distarray_no_missing(self, xc, xd):
"""Distance array calculation for data with no missing values. The 'pdist() function outputs a condense distance array, and squareform() converts this vector-form
distance vector to a square-form, redundant distance matrix.
*This could be a target for saving memory in the future, by not needing to expand to the redundant square-form matrix. """
from scipy.spatial.distance import pdist, squareform
#------------------------------------------#
def pre_normalize(x):
"""Normalizes continuous features so they are in the same range (0 to 1)"""
idx = 0
# goes through all named features (doesn really need to) this method is only applied to continuous features
for i in sorted(self.attr.keys()):
if self.attr[i][0] == 'discrete':
continue
cmin = self.attr[i][2]
diff = self.attr[i][3]
x[:, idx] -= cmin
x[:, idx] /= diff
idx += 1
return x
#------------------------------------------#
if self.data_type == 'discrete': # discrete features only
return squareform(pdist(self._X, metric='hamming'))
elif self.data_type == 'mixed': # mix of discrete and continuous features
d_dist = squareform(pdist(xd, metric='hamming'))
# Cityblock is also known as Manhattan distance
c_dist = squareform(pdist(pre_normalize(xc), metric='cityblock'))
return np.add(d_dist, c_dist) / self._num_attributes
else: #continuous features only
#xc = pre_normalize(xc)
return squareform(pdist(pre_normalize(xc), metric='cityblock'))
#==================================================================#
def _dtype_array(self):
"""Return mask for discrete(0)/continuous(1) attributes and their indices. Return array of max/min diffs of attributes."""
attrtype = []
attrdiff = []
for key in self._headers:
if self.attr[key][0] == 'continuous':
attrtype.append(1)
else:
attrtype.append(0)
attrdiff.append(self.attr[key][3])
attrtype = np.array(attrtype)
cidx = np.where(attrtype == 1)[0]
didx = np.where(attrtype == 0)[0]
attrdiff = np.array(attrdiff)
return attrdiff, cidx, didx
#==================================================================#
def _distarray_missing(self, xc, xd, cdiffs):
"""Distance array calculation for data with missing values"""
cindices = []
dindices = []
# Get Boolean mask locating missing values for continuous and discrete features separately. These correspond to xc and xd respectively.
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
# For each instance calculate distance from all other instances (in non-redundant manner) (i.e. computes triangle, and puts zeros in for rest to form square).
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array)
#==================================================================#
############################# ReliefF ############################################
def _find_neighbors(self, inst):
""" Identify k nearest hits and k nearest misses for given instance. This is accomplished differently based on the type of endpoint (i.e. binary, multiclass, and continuous). """
# Make a vector of distances between target instance (inst) and all others
dist_vect = []
for j in range(self._datalen):
if inst != j:
locator = [inst, j]
if inst < j:
locator.reverse()
dist_vect.append(self._distance_array[locator[0]][locator[1]])
else:
# Ensures that target instance is never selected as neighbor.
dist_vect.append(sys.maxsize)
dist_vect = np.array(dist_vect)
# Identify neighbors-------------------------------------------------------
""" NN for Binary Endpoints: """
if self._class_type == 'binary':
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
elif self._class_type == 'multiclass':
nn_list = []
match_count = 0
miss_count = dict.fromkeys(self._label_list, 0)
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else:
for label in self._label_list:
if self._y[nn_index] == label:
if miss_count[label] >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count[label] += 1
if match_count >= self.n_neighbors and all(v >= self.n_neighbors for v in miss_count.values()):
break
else:
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if abs(self._y[inst]-self._y[nn_index]) < self._labels_std: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
return np.array(nn_list)
def _run_algorithm(self):
""" Runs nearest neighbor (NN) identification and feature scoring to yield ReliefF scores. """
# Find nearest neighbors
NNlist = map(self._find_neighbors, range(self._datalen))
# Feature scoring - using identified nearest neighbors
nan_entries = np.isnan(self._X) # boolean mask for missing data values
# Call the scoring method for the ReliefF algorithm
scores = np.sum(Parallel(n_jobs=self.n_jobs)(delayed(
ReliefF_compute_scores)(instance_num, self.attr, nan_entries, self._num_attributes, self.mcmap,
NN, self._headers, self._class_type, self._X, self._y, self._labels_std, self.data_type)
for instance_num, NN in zip(range(self._datalen), NNlist)), axis=0)
return np.array(scores)
|
EpistasisLab/scikit-rebate | skrebate/relieff.py | ReliefF._distarray_no_missing | python | def _distarray_no_missing(self, xc, xd):
from scipy.spatial.distance import pdist, squareform
#------------------------------------------#
def pre_normalize(x):
"""Normalizes continuous features so they are in the same range (0 to 1)"""
idx = 0
# goes through all named features (doesn really need to) this method is only applied to continuous features
for i in sorted(self.attr.keys()):
if self.attr[i][0] == 'discrete':
continue
cmin = self.attr[i][2]
diff = self.attr[i][3]
x[:, idx] -= cmin
x[:, idx] /= diff
idx += 1
return x
#------------------------------------------#
if self.data_type == 'discrete': # discrete features only
return squareform(pdist(self._X, metric='hamming'))
elif self.data_type == 'mixed': # mix of discrete and continuous features
d_dist = squareform(pdist(xd, metric='hamming'))
# Cityblock is also known as Manhattan distance
c_dist = squareform(pdist(pre_normalize(xc), metric='cityblock'))
return np.add(d_dist, c_dist) / self._num_attributes
else: #continuous features only
#xc = pre_normalize(xc)
return squareform(pdist(pre_normalize(xc), metric='cityblock')) | Distance array calculation for data with no missing values. The 'pdist() function outputs a condense distance array, and squareform() converts this vector-form
distance vector to a square-form, redundant distance matrix.
*This could be a target for saving memory in the future, by not needing to expand to the redundant square-form matrix. | train | https://github.com/EpistasisLab/scikit-rebate/blob/67dab51a7525fa5d076b059f1e6f8cff7481c1ef/skrebate/relieff.py#L301-L333 | null | class ReliefF(BaseEstimator):
"""Feature selection using data-mined expert knowledge.
Based on the ReliefF algorithm as introduced in:
Igor et al. Overcoming the myopia of inductive learning
algorithms with RELIEFF (1997), Applied Intelligence, 7(1), p39-55"""
"""Note that ReliefF class establishes core functionality that is inherited by all other Relief-based algorithms.
Assumes: * There are no missing values in the label/outcome/dependent variable.
* For ReliefF, the setting of k is <= to the number of instances that have the least frequent class label
(binary and multiclass endpoint data. """
def __init__(self, n_features_to_select=10, n_neighbors=100, discrete_threshold=10, verbose=False, n_jobs=1):
"""Sets up ReliefF to perform feature selection. Note that an approximation of the original 'Relief'
algorithm may be run by setting 'n_features_to_select' to 1. Also note that the original Relief parameter 'm'
is not included in this software. 'm' specifies the number of random training instances out of 'n' (total
training instances) used to update feature scores. Since scores are most representative when m=n, all
available training instances are utilized in all Relief-based algorithm score updates here. If the user
wishes to utilize a smaller 'm' in Relief-based scoring, simply pass any of these algorithms a subset of the
original training dataset samples.
Parameters
----------
n_features_to_select: int (default: 10)
the number of top features (according to the relieff score) to
retain after feature selection is applied.
n_neighbors: int or float (default: 100)
The number of neighbors to consider when assigning feature
importance scores. If a float number is provided, that percentage of
training samples is used as the number of neighbors.
More neighbors results in more accurate scores, but takes longer.
discrete_threshold: int (default: 10)
Value used to determine if a feature is discrete or continuous.
If the number of unique levels in a feature is > discrete_threshold, then it is
considered continuous, or discrete otherwise.
verbose: bool (default: False)
If True, output timing of distance array and scoring
n_jobs: int (default: 1)
The number of cores to dedicate to computing the scores with joblib.
Assigning this parameter to -1 will dedicate as many cores as are available on your system.
We recommend setting this parameter to -1 to speed up the algorithm as much as possible.
"""
self.n_features_to_select = n_features_to_select
self.n_neighbors = n_neighbors
self.discrete_threshold = discrete_threshold
self.verbose = verbose
self.n_jobs = n_jobs
#=========================================================================#
def fit(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance
"""
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
#=========================================================================#
def transform(self, X):
"""Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]]
#=========================================================================#
def fit_transform(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data, then reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
self.fit(X, y)
return self.transform(X)
######################### SUPPORTING FUNCTIONS ###########################
def _getMultiClassMap(self):
""" Relief algorithms handle the scoring updates a little differently for data with multiclass outcomes. In ReBATE we implement multiclass scoring in line with
the strategy described by Kononenko 1994 within the RELIEF-F variant which was suggested to outperform the RELIEF-E multiclass variant. This strategy weights
score updates derived from misses of different classes by the class frequency observed in the training data. 'The idea is that the algorithm should estimate the
ability of attributes to separate each pair of classes regardless of which two classes are closest to each other'. In this method we prepare for this normalization
by creating a class dictionary, and storing respective class frequencies. This is needed for ReliefF multiclass score update normalizations. """
mcmap = dict()
for i in range(self._datalen):
if(self._y[i] not in mcmap):
mcmap[self._y[i]] = 0
else:
mcmap[self._y[i]] += 1
for each in self._label_list:
mcmap[each] = mcmap[each]/float(self._datalen)
return mcmap
def _get_attribute_info(self):
""" Preprocess the training dataset to identify which features/attributes are discrete vs. continuous valued. Ignores missing values in this determination."""
attr = dict()
d = 0
limit = self.discrete_threshold
w = self._X.transpose()
for idx in range(len(w)):
h = self._headers[idx]
z = w[idx]
if self._missing_data_count > 0:
z = z[np.logical_not(np.isnan(z))] # Exclude any missing values from consideration
zlen = len(np.unique(z))
if zlen <= limit:
attr[h] = ('discrete', 0, 0, 0, 0)
d += 1
else:
mx = np.max(z)
mn = np.min(z)
sd = np.std(z)
attr[h] = ('continuous', mx, mn, mx - mn, sd)
# For each feature/attribute we store (type, max value, min value, max min difference, average, standard deviation) - the latter three values are set to zero if feature is discrete.
return attr
#==================================================================#
def _dtype_array(self):
"""Return mask for discrete(0)/continuous(1) attributes and their indices. Return array of max/min diffs of attributes."""
attrtype = []
attrdiff = []
for key in self._headers:
if self.attr[key][0] == 'continuous':
attrtype.append(1)
else:
attrtype.append(0)
attrdiff.append(self.attr[key][3])
attrtype = np.array(attrtype)
cidx = np.where(attrtype == 1)[0]
didx = np.where(attrtype == 0)[0]
attrdiff = np.array(attrdiff)
return attrdiff, cidx, didx
#==================================================================#
def _distarray_missing(self, xc, xd, cdiffs):
"""Distance array calculation for data with missing values"""
cindices = []
dindices = []
# Get Boolean mask locating missing values for continuous and discrete features separately. These correspond to xc and xd respectively.
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
# For each instance calculate distance from all other instances (in non-redundant manner) (i.e. computes triangle, and puts zeros in for rest to form square).
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array)
#==================================================================#
############################# ReliefF ############################################
def _find_neighbors(self, inst):
""" Identify k nearest hits and k nearest misses for given instance. This is accomplished differently based on the type of endpoint (i.e. binary, multiclass, and continuous). """
# Make a vector of distances between target instance (inst) and all others
dist_vect = []
for j in range(self._datalen):
if inst != j:
locator = [inst, j]
if inst < j:
locator.reverse()
dist_vect.append(self._distance_array[locator[0]][locator[1]])
else:
# Ensures that target instance is never selected as neighbor.
dist_vect.append(sys.maxsize)
dist_vect = np.array(dist_vect)
# Identify neighbors-------------------------------------------------------
""" NN for Binary Endpoints: """
if self._class_type == 'binary':
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
elif self._class_type == 'multiclass':
nn_list = []
match_count = 0
miss_count = dict.fromkeys(self._label_list, 0)
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else:
for label in self._label_list:
if self._y[nn_index] == label:
if miss_count[label] >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count[label] += 1
if match_count >= self.n_neighbors and all(v >= self.n_neighbors for v in miss_count.values()):
break
else:
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if abs(self._y[inst]-self._y[nn_index]) < self._labels_std: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
return np.array(nn_list)
def _run_algorithm(self):
""" Runs nearest neighbor (NN) identification and feature scoring to yield ReliefF scores. """
# Find nearest neighbors
NNlist = map(self._find_neighbors, range(self._datalen))
# Feature scoring - using identified nearest neighbors
nan_entries = np.isnan(self._X) # boolean mask for missing data values
# Call the scoring method for the ReliefF algorithm
scores = np.sum(Parallel(n_jobs=self.n_jobs)(delayed(
ReliefF_compute_scores)(instance_num, self.attr, nan_entries, self._num_attributes, self.mcmap,
NN, self._headers, self._class_type, self._X, self._y, self._labels_std, self.data_type)
for instance_num, NN in zip(range(self._datalen), NNlist)), axis=0)
return np.array(scores)
|
EpistasisLab/scikit-rebate | skrebate/relieff.py | ReliefF._dtype_array | python | def _dtype_array(self):
attrtype = []
attrdiff = []
for key in self._headers:
if self.attr[key][0] == 'continuous':
attrtype.append(1)
else:
attrtype.append(0)
attrdiff.append(self.attr[key][3])
attrtype = np.array(attrtype)
cidx = np.where(attrtype == 1)[0]
didx = np.where(attrtype == 0)[0]
attrdiff = np.array(attrdiff)
return attrdiff, cidx, didx | Return mask for discrete(0)/continuous(1) attributes and their indices. Return array of max/min diffs of attributes. | train | https://github.com/EpistasisLab/scikit-rebate/blob/67dab51a7525fa5d076b059f1e6f8cff7481c1ef/skrebate/relieff.py#L336-L354 | null | class ReliefF(BaseEstimator):
"""Feature selection using data-mined expert knowledge.
Based on the ReliefF algorithm as introduced in:
Igor et al. Overcoming the myopia of inductive learning
algorithms with RELIEFF (1997), Applied Intelligence, 7(1), p39-55"""
"""Note that ReliefF class establishes core functionality that is inherited by all other Relief-based algorithms.
Assumes: * There are no missing values in the label/outcome/dependent variable.
* For ReliefF, the setting of k is <= to the number of instances that have the least frequent class label
(binary and multiclass endpoint data. """
def __init__(self, n_features_to_select=10, n_neighbors=100, discrete_threshold=10, verbose=False, n_jobs=1):
"""Sets up ReliefF to perform feature selection. Note that an approximation of the original 'Relief'
algorithm may be run by setting 'n_features_to_select' to 1. Also note that the original Relief parameter 'm'
is not included in this software. 'm' specifies the number of random training instances out of 'n' (total
training instances) used to update feature scores. Since scores are most representative when m=n, all
available training instances are utilized in all Relief-based algorithm score updates here. If the user
wishes to utilize a smaller 'm' in Relief-based scoring, simply pass any of these algorithms a subset of the
original training dataset samples.
Parameters
----------
n_features_to_select: int (default: 10)
the number of top features (according to the relieff score) to
retain after feature selection is applied.
n_neighbors: int or float (default: 100)
The number of neighbors to consider when assigning feature
importance scores. If a float number is provided, that percentage of
training samples is used as the number of neighbors.
More neighbors results in more accurate scores, but takes longer.
discrete_threshold: int (default: 10)
Value used to determine if a feature is discrete or continuous.
If the number of unique levels in a feature is > discrete_threshold, then it is
considered continuous, or discrete otherwise.
verbose: bool (default: False)
If True, output timing of distance array and scoring
n_jobs: int (default: 1)
The number of cores to dedicate to computing the scores with joblib.
Assigning this parameter to -1 will dedicate as many cores as are available on your system.
We recommend setting this parameter to -1 to speed up the algorithm as much as possible.
"""
self.n_features_to_select = n_features_to_select
self.n_neighbors = n_neighbors
self.discrete_threshold = discrete_threshold
self.verbose = verbose
self.n_jobs = n_jobs
#=========================================================================#
def fit(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
Copy of the ReliefF instance
"""
self._X = X # matrix of predictive variables ('independent variables')
self._y = y # vector of values for outcome variable ('dependent variable')
# Set up the properties for ReliefF -------------------------------------------------------------------------------------
self._datalen = len(self._X) # Number of training instances ('n')
""""Below: Handles special case where user requests that a proportion of training instances be neighbors for
ReliefF rather than a specified 'k' number of neighbors. Note that if k is specified, then k 'hits' and k
'misses' will be used to update feature scores. Thus total number of neighbors is 2k. If instead a proportion
is specified (say 0.1 out of 1000 instances) this represents the total number of neighbors (e.g. 100). In this
case, k would be set to 50 (i.e. 50 hits and 50 misses). """
if hasattr(self, 'n_neighbors') and type(self.n_neighbors) is float:
# Halve the number of neighbors because ReliefF uses n_neighbors matches and n_neighbors misses
self.n_neighbors = int(self.n_neighbors * self._datalen * 0.5)
# Number of unique outcome (label) values (used to determine outcome variable type)
self._label_list = list(set(self._y))
# Determine if label is discrete
discrete_label = (len(self._label_list) <= self.discrete_threshold)
# Identify label type (binary, multiclass, or continuous)
if discrete_label:
if len(self._label_list) == 2:
self._class_type = 'binary'
self.mcmap = 0
elif len(self._label_list) > 2:
self._class_type = 'multiclass'
self.mcmap = self._getMultiClassMap()
else:
raise ValueError('All labels are of the same class.')
else:
self._class_type = 'continuous'
self.mcmap = 0
# Training labels standard deviation -- only used if the training labels are continuous
self._labels_std = 0.
if len(self._label_list) > self.discrete_threshold:
self._labels_std = np.std(self._y, ddof=1)
self._num_attributes = len(self._X[0]) # Number of features in training data
# Number of missing data values in predictor variable matrix.
self._missing_data_count = np.isnan(self._X).sum()
"""Assign internal headers for the features (scikit-learn does not accept external headers from dataset):
The pre_normalize() function relies on the headers being ordered, e.g., X01, X02, etc.
If this is changed, then the sort in the pre_normalize() function needs to be adapted as well. """
xlen = len(self._X[0])
mxlen = len(str(xlen + 1))
self._headers = ['X{}'.format(str(i).zfill(mxlen)) for i in range(1, xlen + 1)]
start = time.time() # Runtime tracking
# Determine data types for all features/attributes in training data (i.e. discrete or continuous)
C = D = False
# Examines each feature and applies discrete_threshold to determine variable type.
self.attr = self._get_attribute_info()
for key in self.attr.keys():
if self.attr[key][0] == 'discrete':
D = True
if self.attr[key][0] == 'continuous':
C = True
# For downstream computational efficiency, determine if dataset is comprised of all discrete, all continuous, or a mix of discrete/continuous features.
if C and D:
self.data_type = 'mixed'
elif D and not C:
self.data_type = 'discrete'
elif C and not D:
self.data_type = 'continuous'
else:
raise ValueError('Invalid data type in data set.')
#--------------------------------------------------------------------------------------------------------------------
# Compute the distance array between all data points ----------------------------------------------------------------
# For downstream efficiency, separate features in dataset by type (i.e. discrete/continuous)
diffs, cidx, didx = self._dtype_array()
cdiffs = diffs[cidx] # max/min continuous value difference for continuous features.
xc = self._X[:, cidx] # Subset of continuous-valued feature data
xd = self._X[:, didx] # Subset of discrete-valued feature data
""" For efficiency, the distance array is computed more efficiently for data with no missing values.
This distance array will only be used to identify nearest neighbors. """
if self._missing_data_count > 0:
self._distance_array = self._distarray_missing(xc, xd, cdiffs)
else:
self._distance_array = self._distarray_no_missing(xc, xd)
if self.verbose:
elapsed = time.time() - start
print('Created distance array in {} seconds.'.format(elapsed))
print('Feature scoring under way ...')
start = time.time()
#--------------------------------------------------------------------------------------------------------------------
# Run remainder of algorithm (i.e. identification of 'neighbors' for each instance, and feature scoring).------------
# Stores feature importance scores for ReliefF or respective Relief-based algorithm.
self.feature_importances_ = self._run_algorithm()
# Delete the internal distance array because it is no longer needed
del self._distance_array
if self.verbose:
elapsed = time.time() - start
print('Completed scoring in {} seconds.'.format(elapsed))
# Compute indices of top features
self.top_features_ = np.argsort(self.feature_importances_)[::-1]
return self
#=========================================================================#
def transform(self, X):
"""Scikit-learn required: Reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Feature matrix to perform feature selection on
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
if self._num_attributes < self.n_features_to_select:
raise ValueError('Number of features to select is larger than the number of features in the dataset.')
return X[:, self.top_features_[:self.n_features_to_select]]
#=========================================================================#
def fit_transform(self, X, y):
"""Scikit-learn required: Computes the feature importance scores from the training data, then reduces the feature set down to the top `n_features_to_select` features.
Parameters
----------
X: array-like {n_samples, n_features}
Training instances to compute the feature importance scores from
y: array-like {n_samples}
Training labels
Returns
-------
X_reduced: array-like {n_samples, n_features_to_select}
Reduced feature matrix
"""
self.fit(X, y)
return self.transform(X)
######################### SUPPORTING FUNCTIONS ###########################
def _getMultiClassMap(self):
""" Relief algorithms handle the scoring updates a little differently for data with multiclass outcomes. In ReBATE we implement multiclass scoring in line with
the strategy described by Kononenko 1994 within the RELIEF-F variant which was suggested to outperform the RELIEF-E multiclass variant. This strategy weights
score updates derived from misses of different classes by the class frequency observed in the training data. 'The idea is that the algorithm should estimate the
ability of attributes to separate each pair of classes regardless of which two classes are closest to each other'. In this method we prepare for this normalization
by creating a class dictionary, and storing respective class frequencies. This is needed for ReliefF multiclass score update normalizations. """
mcmap = dict()
for i in range(self._datalen):
if(self._y[i] not in mcmap):
mcmap[self._y[i]] = 0
else:
mcmap[self._y[i]] += 1
for each in self._label_list:
mcmap[each] = mcmap[each]/float(self._datalen)
return mcmap
def _get_attribute_info(self):
""" Preprocess the training dataset to identify which features/attributes are discrete vs. continuous valued. Ignores missing values in this determination."""
attr = dict()
d = 0
limit = self.discrete_threshold
w = self._X.transpose()
for idx in range(len(w)):
h = self._headers[idx]
z = w[idx]
if self._missing_data_count > 0:
z = z[np.logical_not(np.isnan(z))] # Exclude any missing values from consideration
zlen = len(np.unique(z))
if zlen <= limit:
attr[h] = ('discrete', 0, 0, 0, 0)
d += 1
else:
mx = np.max(z)
mn = np.min(z)
sd = np.std(z)
attr[h] = ('continuous', mx, mn, mx - mn, sd)
# For each feature/attribute we store (type, max value, min value, max min difference, average, standard deviation) - the latter three values are set to zero if feature is discrete.
return attr
def _distarray_no_missing(self, xc, xd):
"""Distance array calculation for data with no missing values. The 'pdist() function outputs a condense distance array, and squareform() converts this vector-form
distance vector to a square-form, redundant distance matrix.
*This could be a target for saving memory in the future, by not needing to expand to the redundant square-form matrix. """
from scipy.spatial.distance import pdist, squareform
#------------------------------------------#
def pre_normalize(x):
"""Normalizes continuous features so they are in the same range (0 to 1)"""
idx = 0
# goes through all named features (doesn really need to) this method is only applied to continuous features
for i in sorted(self.attr.keys()):
if self.attr[i][0] == 'discrete':
continue
cmin = self.attr[i][2]
diff = self.attr[i][3]
x[:, idx] -= cmin
x[:, idx] /= diff
idx += 1
return x
#------------------------------------------#
if self.data_type == 'discrete': # discrete features only
return squareform(pdist(self._X, metric='hamming'))
elif self.data_type == 'mixed': # mix of discrete and continuous features
d_dist = squareform(pdist(xd, metric='hamming'))
# Cityblock is also known as Manhattan distance
c_dist = squareform(pdist(pre_normalize(xc), metric='cityblock'))
return np.add(d_dist, c_dist) / self._num_attributes
else: #continuous features only
#xc = pre_normalize(xc)
return squareform(pdist(pre_normalize(xc), metric='cityblock'))
#==================================================================#
#==================================================================#
def _distarray_missing(self, xc, xd, cdiffs):
"""Distance array calculation for data with missing values"""
cindices = []
dindices = []
# Get Boolean mask locating missing values for continuous and discrete features separately. These correspond to xc and xd respectively.
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
# For each instance calculate distance from all other instances (in non-redundant manner) (i.e. computes triangle, and puts zeros in for rest to form square).
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array)
#==================================================================#
############################# ReliefF ############################################
def _find_neighbors(self, inst):
""" Identify k nearest hits and k nearest misses for given instance. This is accomplished differently based on the type of endpoint (i.e. binary, multiclass, and continuous). """
# Make a vector of distances between target instance (inst) and all others
dist_vect = []
for j in range(self._datalen):
if inst != j:
locator = [inst, j]
if inst < j:
locator.reverse()
dist_vect.append(self._distance_array[locator[0]][locator[1]])
else:
# Ensures that target instance is never selected as neighbor.
dist_vect.append(sys.maxsize)
dist_vect = np.array(dist_vect)
# Identify neighbors-------------------------------------------------------
""" NN for Binary Endpoints: """
if self._class_type == 'binary':
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
elif self._class_type == 'multiclass':
nn_list = []
match_count = 0
miss_count = dict.fromkeys(self._label_list, 0)
for nn_index in np.argsort(dist_vect):
if self._y[inst] == self._y[nn_index]: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else:
for label in self._label_list:
if self._y[nn_index] == label:
if miss_count[label] >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count[label] += 1
if match_count >= self.n_neighbors and all(v >= self.n_neighbors for v in miss_count.values()):
break
else:
nn_list = []
match_count = 0
miss_count = 0
for nn_index in np.argsort(dist_vect):
if abs(self._y[inst]-self._y[nn_index]) < self._labels_std: # Hit neighbor identified
if match_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
match_count += 1
else: # Miss neighbor identified
if miss_count >= self.n_neighbors:
continue
nn_list.append(nn_index)
miss_count += 1
if match_count >= self.n_neighbors and miss_count >= self.n_neighbors:
break
return np.array(nn_list)
def _run_algorithm(self):
""" Runs nearest neighbor (NN) identification and feature scoring to yield ReliefF scores. """
# Find nearest neighbors
NNlist = map(self._find_neighbors, range(self._datalen))
# Feature scoring - using identified nearest neighbors
nan_entries = np.isnan(self._X) # boolean mask for missing data values
# Call the scoring method for the ReliefF algorithm
scores = np.sum(Parallel(n_jobs=self.n_jobs)(delayed(
ReliefF_compute_scores)(instance_num, self.attr, nan_entries, self._num_attributes, self.mcmap,
NN, self._headers, self._class_type, self._X, self._y, self._labels_std, self.data_type)
for instance_num, NN in zip(range(self._datalen), NNlist)), axis=0)
return np.array(scores)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.