gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
import os
import shutil
import platform
# Handle packages
try:
from pynput.keyboard import Key, Listener, Controller
except ImportError:
print("-- Warning: pynput is not installed on this machine, auto completion will not be supported --")
print("-- You can type 'pip install pynput' to get pynput and then type 'refresh' to play with auto completion --")
Key = Listener = Controller = None
# Handle cross-platform figures
_bad_slash, _tar_slash = "/", "\\"
if platform.system() != "Windows":
_bad_slash, _tar_slash = "\\", "/"
class Util:
@staticmethod
def msg(dt, *args):
if dt == "undefined_error":
return Util.msg(
"block_msg", "Error\n",
"Undefined command '{}', type 'help' for more information\n".format(args[0]))
if dt == "root_path_error":
return Util.msg(
"block_msg", "Error\n",
"Current path '{}' is the root path\n".format(args[0]))
if dt == "valid_path_error":
return Util.msg(
"block_msg", "Error\n",
"'{}' is not a valid {}\n".format(args[0], args[1]))
if dt == "block_msg":
return "=" * 30 + "\n" + args[0] + "-" * 30 + "\n" + args[1] + "-" * 30
if dt == "show_ls_message":
title, folder_lst, file_lst = args
body = (
"\n".join([" folder - {}".format(_f) for _f in folder_lst]) + "\n" +
"\n".join([" file - {}".format(_f) for _f in file_lst]) + "\n"
) if file_lst or folder_lst else "-- Empty --\n"
return Util.msg("block_msg", title, body)
@staticmethod
def get_cmd(msg):
if Listener is not None:
with Listener(on_press=CmdTool.on_press, on_release=CmdTool.on_release) as listener:
cmd = input(msg).strip()
listener.join()
return cmd.replace(_bad_slash, _tar_slash)
else:
cmd = input(msg).strip()
return cmd.replace(_bad_slash, _tar_slash)
@staticmethod
def get_formatted_error(err):
return "-- {} --\n".format(err)
@staticmethod
def get_clean_path(path, user_platform):
if user_platform == "Windows":
while path.find("\\\\") >= 0:
path = path.replace("\\\\", "\\")
else:
while path.find("//") >= 0:
path = path.replace("//", "/")
while len(path) > 3 and path[-1] == _tar_slash:
path = path[:-1]
return path
@staticmethod
def get_short_path(path, length):
if len(path) >= length:
return "..{}".format(path[len(path) - length:len(path)])
return path
@staticmethod
def get_two_paths(cmd_tool, msgs, args):
cmd = args[0].split()
first_flag, second_flag = False, False
first_arg, second_arg = "", ""
if len(cmd) == 1:
first_flag = second_flag = True
elif len(cmd) == 2:
first_arg = cmd[1]
second_flag = True
elif len(cmd) == 3:
first_arg, second_arg = cmd[1:]
if first_flag:
first_arg = Util.get_cmd(msgs[0])
if second_flag:
second_arg = Util.get_cmd(msgs[1])
first_path = cmd_tool.file_path if first_arg == "." else CmdTool.get_path(first_arg)
second_path = cmd_tool.file_path if first_arg == "." else CmdTool.get_path(second_arg)
return first_path, second_path
@staticmethod
def show_help_msg(dt):
# Basic
if dt == "help":
print("Help (help) -> type 'help' to see available commands,\n"
" type 'help **' to see the function of **")
elif dt == "cd":
print("Help (cd) -> used to get into a folder")
elif dt == "ls":
print("Help (ls) -> used to view files & folders")
elif dt == "rm":
print("Help (rm) -> used to remove files or folders")
elif dt == "mk":
print("Help (mk) -> used to make a file or a folder\n"
" it is recommended to type 'mk (dir) (type) (name)'\n"
" dir == '.' -> direct to current folder\n"
" assert(type in ('folder', 'file'))")
elif dt == "mv":
print("Help (mv) -> used to move a file or a folder\n"
" it is recommended to type 'mv (old_dir) (new_dir)'\n")
elif dt == "refresh":
print("Help (refresh) -> used to refresh this command line tool.\n"
" you'll be sent back to 'Root' status")
elif dt == "exit":
print("Help (exit) -> used to exit current status.\n"
" used to exit this command line tool when you're under 'Root' status")
elif dt == "config":
print("Help (config) -> type 'config' to change configurations of all available tools,\n"
" type 'config rename' to change configurations of 'Rename' tool")
# Root
elif dt == "rename":
print("Help (rename) -> get use of 'Rename' tool")
elif dt == "python":
print("Help (python) -> get use of Python")
# Rename
elif dt == "folder":
print("Help (folder) -> used to rename a folder in current folder\n"
" it is recommended to type 'folder (old_name) (new_name)'\n")
elif dt == "file":
print("Help (file) -> used to rename a file in current folder\n"
" it is recommended to type 'file (old_name) (new_name)'\n")
else:
raise NotImplementedError
@staticmethod
def do_system_default(cmd):
print(Util.msg("block_msg", "Caution\n", "Running default command in Windows command line\n"))
os.system(cmd)
class CmdTool:
_platform = platform.system()
_file_path = os.getcwd()
_self_path = "\"{}{}pycmd\".py".format(_file_path, _tar_slash)
_current_command = []
_do_auto_complete = True
_auto_complete = ""
_auto_complete_lst = []
_auto_complete_track = []
_auto_complete_flag = False
_auto_complete_cursor = 0
_auto_complete_finish = True
if Controller is not None:
_keyboard = Controller()
else:
_keyboard = None
@classmethod
def clear_cache(cls):
cls._current_command = []
cls._do_auto_complete = True
cls._auto_complete = ""
cls._auto_complete_lst = []
cls._auto_complete_track = []
@classmethod
def click(cls, key):
cls._keyboard.press(key)
cls._keyboard.release(key)
@classmethod
def on_press(cls, key):
if key == Key.tab and cls._current_command and cls._do_auto_complete:
cls._auto_complete_flag = True
cls._auto_complete_finish = False
full_cmd = "".join(cls._current_command).split()
cmd = full_cmd[0]
if not cls._auto_complete_track:
pth_head = full_cmd[-1]
pth_head_len = len(pth_head)
else:
pth_head = "".join(cls._current_command).split(_tar_slash)[-1]
pth_head_len = len(pth_head)
track = _tar_slash.join(cls._auto_complete_track)
cls._auto_complete_lst = [_f for _f in os.listdir(cls.get_path(track)) if _f[:pth_head_len] == pth_head]
add_back_slash = False
if cmd in ("cd", "ls"):
cls._auto_complete_lst = [
_f for _f in cls._auto_complete_lst if os.path.isdir(cls.get_path(track + _tar_slash + _f))]
add_back_slash = True
if len(cls._auto_complete_lst) == 1:
cls._auto_complete = cls._auto_complete_lst[0][pth_head_len:]
else:
cls._auto_complete = ""
cls._current_command.append("\t")
cls.click(Key.backspace)
if cls._auto_complete:
cls._auto_complete_track.append(pth_head + cls._auto_complete)
if add_back_slash:
cls._auto_complete += _tar_slash
cls._keyboard.type(cls._auto_complete)
cls._auto_complete_flag = False
if not cls._auto_complete:
cls._auto_complete_finish = True
@classmethod
def on_release(cls, key):
if not cls._auto_complete_flag:
if key == Key.enter:
cls.clear_cache()
return False
if key == Key.left or key == Key.right or key == Key.up:
cls._do_auto_complete = False
elif key == Key.backspace:
if cls._auto_complete_finish:
cls._do_auto_complete = False
if cls._current_command:
cls._current_command.pop()
elif key == Key.space and cls._auto_complete_finish:
cls._auto_complete_track = []
cls._current_command.append(" ")
elif key == Key.tab:
pass
else:
if cls._auto_complete_finish:
char = str(key)[1:-1].strip()
if len(char) == 1:
cls._current_command.append(char)
elif char == "\\\\":
cls._current_command.append("\\")
elif char == "/":
cls._current_command.append("/")
else:
if cls._auto_complete_cursor < len(cls._auto_complete):
cls._current_command.append(cls._auto_complete[cls._auto_complete_cursor])
cls._auto_complete_cursor += 1
if cls._auto_complete_cursor == len(cls._auto_complete):
cls._auto_complete_cursor = 0
cls._auto_complete_finish = True
@classmethod
def get_path(cls, ad):
if not ad or ad == ".":
return cls._file_path
return cls._file_path + _tar_slash + ad
@classmethod
def get_path_from_cmd(cls, raw_cmd, cmd_type, tar_type, base_path=None, allow_no_param=True, custom_msg=""):
if base_path is None:
tmp_path = Util.get_clean_path(cls._file_path, CmdTool._platform)
else:
tmp_path = Util.get_clean_path(base_path, CmdTool._platform)
if cmd_type and not allow_no_param:
if raw_cmd == cmd_type:
if not custom_msg:
raw_cmd = cmd_type + " " + Util.get_cmd("{} -> ".format(cmd_type))
else:
raw_cmd = cmd_type + " " + Util.get_cmd(custom_msg)
if raw_cmd[2] != " ":
print(Util.msg("undefined_error", raw_cmd))
return tmp_path
if not raw_cmd or raw_cmd == ".":
return tmp_path
elif (not raw_cmd and not cmd_type) or (allow_no_param and raw_cmd == cmd_type):
raw_cmd = "** ."
if len(raw_cmd) > 3:
raw_cmd = raw_cmd[3:]
if raw_cmd[0] == ".":
dot_counter = 1
while dot_counter < len(raw_cmd) and raw_cmd[dot_counter] == ".":
dot_counter += 1
pth_length = tmp_path.count(_tar_slash) + 1
if dot_counter > pth_length:
print(Util.msg("block_msg", "Path Error\n",
"-- Too many '.': {} which exceed {} --".format(dot_counter, pth_length)))
return tmp_path
if dot_counter == len(raw_cmd):
while dot_counter > 1:
tmp_path = tmp_path[:tmp_path.rfind(_tar_slash)]
dot_counter -= 1
else:
addition_path = raw_cmd[dot_counter:]
while dot_counter > 1:
tmp_path = tmp_path[:tmp_path.rfind(_tar_slash)]
dot_counter -= 1
tmp_path += _tar_slash + addition_path
tmp_path = Util.get_clean_path(tmp_path, CmdTool._platform)
else:
tmp_path = Util.get_clean_path(CmdTool.get_path(raw_cmd), CmdTool._platform)
if tar_type != "all" and (
(tar_type.find("folder") >= 0 and not os.path.isdir(tmp_path)) or
(tar_type.find("file") >= 0and not os.path.isfile(tmp_path))
):
print(Util.msg("valid_path_error", tmp_path, tar_type))
return tmp_path
return tmp_path
@property
def file_path(self):
return self._file_path
def __init__(self, file_path=None):
self._status = "root"
if file_path is None:
self._file_path = CmdTool._file_path
else:
self._file_path = CmdTool._file_path = file_path
self._common_command = ("help", "config", "refresh", "exit")
self._advance_command = ("cd", "ls", "rm", "mk", "mv", "cp")
self._break_command = ("refresh", "exit")
def _get_cmd(self, msg, format_path=False):
def _cmd():
if format_path:
return Util.get_cmd(msg.format(self._file_path))
return Util.get_cmd(msg)
cmd = _cmd()
while self._do_common_work(cmd):
if self._status == "exit":
return "exit"
cmd = _cmd()
return cmd
def _cd(self, args):
self._file_path = CmdTool.get_path_from_cmd(args[0], "cd", "folder", allow_no_param=False)
self._update_path()
return
@classmethod
def _ls(cls, args):
pth = cls.get_path_from_cmd(args[0], "ls", "folder")
folder_lst, file_lst = [], []
for _f in os.listdir(pth):
_p = pth + _tar_slash + _f
if os.path.isdir(_p):
folder_lst.append(_f)
elif os.path.isfile(_p):
file_lst.append(_f)
try:
print(Util.msg("show_ls_message", "Files & Folders in '{}':\n".format(pth), folder_lst, file_lst))
except UnicodeEncodeError as err:
print(Util.msg("block_msg", "Encoding Error\n", Util.get_formatted_error(err)))
def _rm(self, args):
pth = CmdTool.get_path_from_cmd(args[0], "rm", "all")
if not os.path.isdir(pth):
if not os.path.isfile(pth):
print(Util.msg("block_msg", "Path Error\n", "-- '{}' not exists\n --".format(pth)))
else:
print(Util.msg("block_msg", "Removing\n", pth + "\n"))
if self._get_cmd("(Root) Sure to proceed ? (y/n) -> ").lower() == "y":
try:
os.remove(pth)
except PermissionError as err:
print(Util.msg("block_msg", "Permission Error\n", Util.get_formatted_error(err)))
return
folder_lst, file_lst = [], []
for _f in os.listdir(pth):
_p = pth + _tar_slash + _f
if os.path.isdir(_p):
folder_lst.append(_f)
elif os.path.isfile(_p):
file_lst.append(_f)
try:
print(Util.msg("show_ls_message", "Removing '{}'\nWhich contains\n".format(pth), folder_lst, file_lst))
except UnicodeEncodeError as err:
print(Util.msg("block_msg", "Encoding Error\n", Util.get_formatted_error(err)))
if self._get_cmd("(Root) Sure to proceed ? (y/n) -> ").lower() == "y":
try:
if os.path.isfile(pth):
os.remove(pth)
else:
if pth == self._file_path:
self._do_common("cd", "cd ..")
shutil.rmtree(pth)
except PermissionError as err:
print(Util.msg("block_msg", "Permission Error\n", Util.get_formatted_error(err)))
else:
print("(Root) Caution -> Nothing happened")
def _mk(self, args):
mk = args[0].split()
path_flag, type_flag, name_flag = False, False, False
mk_path, mk_type, mk_name = "", "", ""
if len(mk) == 1:
path_flag = type_flag = name_flag = True
elif len(mk) == 2:
mk_path = mk[1]
type_flag = name_flag = True
elif len(mk) == 3:
mk_path, mk_type = mk[1:]
name_flag = True
else:
mk_path, mk_type = mk[1:3]
mk_name = " ".join(mk[3:])
if path_flag:
mk_path = CmdTool.get_path(Util.get_cmd("(mk) dir -> "))
else:
mk_path = self._file_path if mk_path == "." else CmdTool.get_path(mk_path)
if type_flag:
mk_type = Util.get_cmd("(mk) type (folder or file) -> ")
if name_flag:
mk_name = Util.get_cmd("(mk) name -> ")
if not os.path.isdir(mk_path):
print(Util.msg("valid_path_error", mk_path, "folder"))
return
if not mk_type or mk_type not in ("folder", "file"):
print("(Root) Caution -> Nothing happened")
return
mk_dir = mk_path + _tar_slash + mk_name
if mk_type == "folder":
try:
os.mkdir(mk_dir)
except FileExistsError as err:
print(Util.msg("block_msg", "File Exists Error\n", Util.get_formatted_error(err)))
elif mk_type == "file":
if os.path.isfile(mk_dir):
print(Util.msg("block_msg", "Error\n", "'{}' already exists\n".format(mk_dir)))
else:
try:
with open(mk_dir, "w"):
pass
except PermissionError as err:
print(Util.msg("block_msg", "Permission Error\n", Util.get_formatted_error(err)))
def _mv(self, args):
old_path, new_path = Util.get_two_paths(
self, ("(mv) Old dir -> ", "(mv) New dir -> "), args
)
if not os.path.isfile(old_path) and not os.path.isdir(old_path):
print(Util.msg("valid_path_error", old_path, "dir"))
try:
shutil.move(old_path, new_path)
except FileExistsError as err:
print(Util.msg("block_msg", "File Exists Error\n", Util.get_formatted_error(err)))
def _cp(self, args):
old_path, new_path = Util.get_two_paths(
self, ("(cp) Old dir -> ", "(cp) New dir -> "), args
)
if not os.path.isfile(old_path) and not os.path.isdir(old_path):
print(Util.msg("valid_path_error", old_path, "dir"))
try:
if os.path.isfile(old_path):
shutil.copyfile(old_path, new_path)
else:
shutil.copytree(old_path, new_path)
except FileExistsError as err:
print(Util.msg("block_msg", "File Exists Error\n", Util.get_formatted_error(err)))
def _help(self, args):
pass
def _config(self, args):
pass
def _check_path(self, cmd):
if len(cmd) >= 2 and cmd[1] == ":":
if len(cmd) == 2:
cmd += _tar_slash
if os.path.isdir(cmd):
self._file_path = cmd
self._update_path()
return True
return False
def _do_common(self, dt, *args):
try:
if dt in self._break_command:
self._status = "exit"
if dt == "refresh":
os.system("python {}".format(CmdTool._self_path))
return False
if dt == "help":
self._help(args)
elif dt == "config":
self._config(args)
elif dt == "cd":
self._cd(args)
elif dt == "ls":
self._ls(args)
elif dt == "rm":
self._rm(args)
elif dt == "mk":
self._mk(args)
elif dt == "mv":
self._mv(args)
elif dt == "cp":
self._cp(args)
except Exception as err:
print(Util.msg("block_msg", "-- Special Error --\n", Util.get_formatted_error(err)))
return True
def _do_common_work(self, cmd):
if self._status == "exit":
return False
if self._check_path(cmd):
return True
if cmd in self._common_command:
return self._do_common(cmd)
if cmd[:2] in self._advance_command:
self._do_common(cmd[:2], cmd)
elif cmd[:4] == "help":
self._do_common("help", cmd[4:].strip())
elif cmd[:6] == "config":
self._do_common("config", cmd[6:].strip())
else:
return False
return True
def _renew_path(self, parent):
parent._file_path = self._file_path
def _update_path(self):
os.chdir(self._file_path)
CmdTool._file_path = self._file_path
class Rename(CmdTool):
def __init__(self, file_path, pl, fl, whether_preview_detail, parent):
CmdTool.__init__(self, file_path)
self._parent = parent
self._basic_batch_command = ("folders", "files")
self._basic_command = ("folder", "file")
self._special_command = ("batch", "seq")
self._root_commands = [
self._common_command, self._advance_command,
self._basic_batch_command, self._basic_command,
self._special_command
]
self._batch_commands = [
self._common_command, self._advance_command,
self._basic_batch_command, self._basic_command
]
self._batch_lst = []
self._log_lst = []
self._err_lst = []
self._default_pl = pl
self._default_fl = fl
self._prev_detail = whether_preview_detail
self._path_length = pl
self._fn_length = fl
self._fn_max = 10 ** self._fn_length - 1
def _rename(self, args):
old_path, new_path = Util.get_two_paths(
self, (
"{} (Rename {}) Old name -> ".format(args[1], args[0]),
"{} (Rename {}) New name -> ".format(args[1], args[0])
), args
)
if (
(args[0] == "file" and not os.path.isfile(old_path)) or
(args[0] == "folder" and not os.path.isdir(old_path))
):
print(Util.msg("valid_path_error", old_path, args[0]))
return
try:
os.rename(old_path, new_path)
except FileExistsError as err:
print(Util.msg("block_msg", "File Exists Error\n", Util.get_formatted_error(err)))
def _rename_batch(self, tar_type, tar_path=None, show_preview=True):
tar_path = self._file_path if tar_path is None else tar_path
if tar_type not in self._basic_batch_command:
first_space = tar_type.find(" ")
tar_type, addition_dir = tar_type[:first_space], tar_type[first_space + 1:]
try:
file_lst = os.listdir(tar_path)
except FileNotFoundError as err:
self._err_lst.append(Util.get_formatted_error(err))
return
counter = 0
pipeline = []
for i, file in enumerate(file_lst):
old_dir = os.path.join(tar_path, file)
if tar_type == "files" and os.path.isdir(old_dir):
continue
if tar_type == "folders" and os.path.isfile(old_dir):
continue
new_name = str(counter).zfill(self._fn_length)
extension = os.path.splitext(file)[1]
new_dir = os.path.join(tar_path, new_name + extension)
pipeline.append((old_dir, new_dir))
counter += 1
if not pipeline:
return
flag = True
if self._prev_detail and show_preview:
self._preview_batch_detail(pipeline)
_proceed = self._get_cmd("(Rename Batch) Sure to proceed ? (y/n) (Default: y) -> ")
if _proceed and _proceed.lower() != "y":
flag = False
if not flag:
return
for old_dir, new_dir in pipeline:
try:
os.rename(old_dir, new_dir)
self._log_lst.append("{} -> {}".format(
Util.get_short_path(old_dir, self._path_length),
Util.get_short_path(new_dir, self._path_length)
))
except FileExistsError as err:
self._err_lst.append(Util.get_formatted_error(err))
def _do(self, dt, *args):
if dt == "batch":
self._batch()
self._status = "root"
elif dt == "finish_batch":
self._finish_batch(args)
elif dt == "handle_batch_result":
self._handle_batch_result()
elif dt == "seq":
self._seq()
self._status = "root"
elif dt == "rename":
self._rename(args)
else:
raise NotImplementedError
def _help(self, args):
if not args or not args[0]:
if self._status == "root":
rs = "\n".join([", ".join(_cmd) for _cmd in self._root_commands]) + "\n"
elif self._status in self._special_command:
rs = "\n".join([", ".join(_cmd) for _cmd in self._batch_commands]) + "\n"
else:
raise NotImplementedError
print(Util.msg("block_msg", "Available commands:\n", rs))
else:
dt = args[0]
if dt in self._common_command or dt in self._advance_command:
Util.show_help_msg(dt)
elif dt == "folders":
print("Help (folders) -> used to rename folders in current folder.\n"
" By default, they will be like '{}', '{}' and so on".format(
"0".zfill(self._fn_length), "1".zfill(self._fn_length)))
elif dt == "files":
print("Help (files) -> used to rename files in current folder.\n"
" They will be like '{}.***', '{}.***' and so on.\n"
" If you want to rename the files on your own, "
"please enter 'Batch' status".format(
"0".zfill(self._fn_length), "1".zfill(self._fn_length)))
elif dt in self._basic_command:
Util.show_help_msg(dt)
elif dt == "batch":
print("Help (batch) -> only available under 'Rename Root' status, used to enter 'Batch' status.\n"
" You can build a 'pipeline' under 'Batch' status")
elif dt == "seq":
print("Help (seq) -> only available under 'Rename Root' status, "
"used to enter 'Sequence' ('Seq') status.\n"
" You can use it to do specific sequential work. Useful only if:\n"
" 1) you want to rename files or folders in folders naming '0000', '0001', ...\n"
" 2) you want to rename them to "
"'{}.***', '{}.***', ... or "
"'{}', '{}', ...".format(
"0".zfill(self._fn_length), "1".zfill(self._fn_length),
"0".zfill(self._fn_length), "1".zfill(self._fn_length)
))
else:
print("Help (error) -> '{}' is not a valid command".format(dt))
def _config(self, args):
if not args or not args[0]:
try:
self._fn_length = int(Util.get_cmd("(Rename Config) (file_name_length) -> "))
self._path_length = int(Util.get_cmd("(Rename Config) (path_shown_length) -> "))
except ValueError as err:
print(Util.msg("block_msg", "Value Error\n", Util.get_formatted_error(err)))
self._fn_length, self._path_length = self._default_fl, self._default_pl
self._fn_max = 10 ** self._fn_length - 1
else:
dt = args[0]
if dt == "fl":
try:
self._fn_length = int(Util.get_cmd("(Rename Config) (file_name_length) -> "))
except ValueError as err:
print(Util.msg("block_msg", "Value Error\n", Util.get_formatted_error(err)))
self._fn_length = self._default_fl
self._fn_max = 10 ** self._fn_length - 1
elif dt == "pl":
try:
self._path_length = int(Util.get_cmd("(Rename Config) (path_shown_length) -> "))
except ValueError as err:
print(Util.msg("block_msg", "Value Error\n", Util.get_formatted_error(err)))
self._path_length = self._default_pl
def _batch(self):
self._status = "batch"
while True:
new_cmd = self._get_cmd("(Rename Batch) {} -> ", True)
if self._status == "exit":
break
if new_cmd == "end":
if self._batch_lst:
self._preview_batch()
self._do("finish_batch")
break
elif new_cmd in self._basic_batch_command:
addition_dir = self._get_cmd("(Rename Batch) (Rename {}) dir -> ".format(new_cmd))
tmp_path = CmdTool.get_path_from_cmd(addition_dir, "", "folder")
if not os.path.isdir(tmp_path):
print(Util.msg("valid_path_error", tmp_path, "folder"))
continue
self._batch_lst.append((new_cmd, tmp_path))
elif new_cmd in self._basic_command:
while True:
self._do("rename", new_cmd, "(Rename Batch)")
_continue = self._get_cmd("(Rename Batch) Continue ? (y/n) (default: y) -> ")
if _continue and _continue.lower() != "y":
break
else:
print(Util.msg("undefined_error", new_cmd))
def _preview_batch_detail(self, pipeline):
rs = "\n".join([
Util.get_short_path(_old, self._path_length) + " -> " + Util.get_short_path(_new, self._path_length)
for _old, _new in pipeline
]) + "\n"
print(Util.msg("block_msg", "Batch Detail\n", rs))
def _preview_batch(self):
rs = "\n".join([
Util.get_short_path(_p, self._path_length) + " -> {:>8s}".format(_c)
for _c, _p in self._batch_lst
]) + "\n"
print(Util.msg("block_msg", "Batch Preview\n", rs))
def _finish_batch(self, args):
args = True if not args else args[0]
if len(self._batch_lst) != 0:
for _c, _p in self._batch_lst:
self._rename_batch(_c, _p, args)
self._do("handle_batch_result")
self._batch_lst = []
def _handle_batch_result(self):
if self._log_lst:
print(Util.msg("block_msg", "Batch Results\n", "\n".join(self._log_lst) + "\n"))
self._log_lst = []
else:
print(Util.msg("block_msg", "Batch Results\n", "None\n"))
if self._err_lst:
print(Util.msg("block_msg", "Batch Errors\n", "\n".join(self._err_lst) + "\n"))
self._err_lst = []
def _seq(self):
self._status = "seq"
while True:
seq_cmd = self._get_cmd("(Rename Seq) {} -> ", True)
if self._status == "exit":
break
if seq_cmd in self._basic_command:
seq_cmd += "s"
if seq_cmd not in self._basic_batch_command:
print(Util.msg("undefined_error", seq_cmd))
continue
try:
seq_start = int(self._get_cmd("(Rename Seq) start point -> "))
seq_end = int(self._get_cmd("(Rename Seq) end point -> "))
except ValueError as err:
print(Util.msg("block_msg", "Value Error\n", Util.get_formatted_error(err)))
continue
if seq_end < seq_start:
print(Util.msg("block_msg", "Error\n", "start point '{}' exceeded end point '{}'\n".format(
seq_start, seq_end)))
continue
if seq_end >= self._fn_max:
print(Util.msg("block_msg", "Error\n", "end point '{}' exceeded ceiling '{}'\n".format(
seq_end, self._fn_max)))
continue
name_lst = [
Util.get_short_path(CmdTool.get_path(str(i).zfill(self._fn_length)), self._path_length)
for i in range(seq_start, seq_end + 1)
]
self._batch_lst = [
(seq_cmd, CmdTool.get_path(str(i).zfill(self._fn_length)))
for i in range(seq_start, seq_end + 1)
]
print(Util.msg("block_msg", "Sequential Task ({})\n".format(seq_cmd), "\n".join(name_lst) + "\n"))
_proceed = self._get_cmd("(Rename Seq) Sure to proceed ? (y/n) (Default: y) -> ")
if _proceed and _proceed.lower() == "y":
self._do("finish_batch", False)
def _cmd_tool(self):
while True:
new_cmd = self._get_cmd("(Rename Root) {} -> ", True)
if self._do_common_work(new_cmd):
continue
if self._status == "exit":
self._renew_path(self._parent)
break
if new_cmd in self._special_command:
self._do(new_cmd)
elif new_cmd:
cmd_lst = new_cmd.split()
if cmd_lst:
if cmd_lst[0] in self._basic_batch_command:
self._rename_batch(new_cmd)
self._do("handle_batch_result")
elif cmd_lst[0] in self._basic_command:
self._do("rename", new_cmd, "(Rename Root)")
else:
Util.do_system_default(new_cmd)
def run(self, dt="cmd"):
if dt == "cmd":
self._cmd_tool()
else:
raise NotImplementedError
class PyCmd(CmdTool):
def __init__(self):
CmdTool.__init__(self)
print("-- Welcome to pycmd, a light & extensible command line tool --")
print("-- Your platform: {} --".format(CmdTool._platform))
self._special_command = ("rename", )
self._commands = [
self._common_command, self._advance_command,
self._special_command
]
self._config = {
"rename": (16, 4, True)
}
def _help(self, args):
if not args or not args[0]:
rs = "\n".join([", ".join(_cmd) for _cmd in self._commands]) + "\n"
print(Util.msg("block_msg", "Available commands:\n", rs))
else:
dt = args[0]
if dt in self._common_command or dt in self._advance_command or dt in self._special_command:
Util.show_help_msg(dt)
else:
print("Help (error) -> '{}' is not a valid command".format(dt))
def _config(self, args):
if not args or not args[0]:
self._config_rename()
else:
dt = args[0]
if dt == "rename":
self._config_rename()
def _config_rename(self):
try:
tmp_fl = int(Util.get_cmd("Config 'Rename' (file_name_length) -> "))
tmp_pl = int(Util.get_cmd("Config 'Rename' (path_shown_length) -> "))
tmp_pd = bool(Util.get_cmd("Config 'Rename' (preview_batch_detail) -> "))
self._config["rename"] = (tmp_fl, tmp_pl, tmp_pd)
except ValueError as err:
print(Util.msg("block_msg", "Value Error\n", Util.get_formatted_error(err)))
def _do_special(self, cmd):
self._status = cmd
if cmd == "rename":
pl, fl, pd = self._config["rename"]
rename = Rename(self._file_path, pl, fl, pd, self)
rename.run()
def run(self):
while True:
new_cmd = self._get_cmd("(Root) {} -> ", True)
if self._do_common_work(new_cmd):
continue
if self._status == "exit":
break
if new_cmd in self._special_command:
self._do_special(new_cmd)
elif new_cmd:
Util.do_system_default(new_cmd)
if __name__ == "__main__":
tool = PyCmd()
tool.run()
|
|
# Copyright 2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test AsyncIOMotorCollection."""
import asyncio
import sys
import traceback
import unittest
from test.asyncio_tests import AsyncIOTestCase, asyncio_test
from test.utils import ignore_deprecations
from bson import CodecOptions
from bson.binary import JAVA_LEGACY
from pymongo import ReadPreference, WriteConcern
from pymongo.errors import BulkWriteError, DuplicateKeyError, OperationFailure
from pymongo.read_concern import ReadConcern
from pymongo.read_preferences import Secondary
from motor.motor_asyncio import AsyncIOMotorCollection
class TestAsyncIOCollection(AsyncIOTestCase):
@asyncio_test
async def test_collection(self):
# Test that we can create a collection directly, not just from
# database accessors.
collection = AsyncIOMotorCollection(self.db, "test_collection")
# Make sure we got the right collection and it can do an operation
self.assertEqual("test_collection", collection.name)
await collection.delete_many({})
await collection.insert_one({"_id": 1})
doc = await collection.find_one({"_id": 1})
self.assertEqual(1, doc["_id"])
# If you pass kwargs to PyMongo's Collection(), it calls
# db.create_collection(). Motor can't do I/O in a constructor
# so this is prohibited.
self.assertRaises(
TypeError, AsyncIOMotorCollection, self.db, "test_collection", capped=True
)
@asyncio_test
async def test_dotted_collection_name(self):
# Ensure that remove, insert, and find work on collections with dots
# in their names.
for coll in (self.db.foo.bar, self.db.foo.bar.baz):
await coll.delete_many({})
result = await coll.insert_one({"_id": "xyzzy"})
self.assertEqual("xyzzy", result.inserted_id)
result = await coll.find_one({"_id": "xyzzy"})
self.assertEqual(result["_id"], "xyzzy")
await coll.delete_many({})
resp = await coll.find_one({"_id": "xyzzy"})
self.assertEqual(None, resp)
def test_call(self):
# Prevents user error with nice message.
try:
self.db.foo()
except TypeError as e:
self.assertTrue("no such method exists" in str(e))
else:
self.fail("Expected TypeError")
@ignore_deprecations
@asyncio_test
async def test_update(self):
await self.collection.insert_one({"_id": 1})
result = await self.collection.update_one({"_id": 1}, {"$set": {"foo": "bar"}})
self.assertIsNone(result.upserted_id)
self.assertEqual(1, result.modified_count)
@ignore_deprecations
@asyncio_test
async def test_update_bad(self):
# Violate a unique index, make sure we handle error well
coll = self.db.unique_collection
await coll.create_index("s", unique=True)
try:
await coll.insert_many([{"s": 1}, {"s": 2}])
with self.assertRaises(DuplicateKeyError):
await coll.update_one({"s": 2}, {"$set": {"s": 1}})
finally:
await coll.drop()
@asyncio_test
async def test_insert_one(self):
collection = self.collection
result = await collection.insert_one({"_id": 201})
self.assertEqual(201, result.inserted_id)
@ignore_deprecations
@asyncio_test
async def test_insert_many_one_bad(self):
collection = self.collection
await collection.insert_one({"_id": 2})
# Violate a unique index in one of many updates, handle error.
with self.assertRaises(BulkWriteError):
await collection.insert_many([{"_id": 1}, {"_id": 2}, {"_id": 3}]) # Already exists
# First insert should have succeeded, but not second or third.
self.assertEqual(set([1, 2]), set((await collection.distinct("_id"))))
@asyncio_test
async def test_delete_one(self):
# Remove a document twice, check that we get a success responses
# and n = 0 for the second time.
await self.collection.insert_one({"_id": 1})
result = await self.collection.delete_one({"_id": 1})
# First time we remove, n = 1
self.assertEqual(1, result.raw_result["n"])
self.assertEqual(1, result.raw_result["ok"])
self.assertEqual(None, result.raw_result.get("err"))
result = await self.collection.delete_one({"_id": 1})
# Second time, document is already gone, n = 0
self.assertEqual(0, result.raw_result["n"])
self.assertEqual(1, result.raw_result["ok"])
self.assertEqual(None, result.raw_result.get("err"))
@ignore_deprecations
@asyncio_test
async def test_unacknowledged_insert(self):
coll = self.db.test_unacknowledged_insert
await coll.with_options(write_concern=WriteConcern(0)).insert_one({"_id": 1})
# The insert is eventually executed.
while not (await coll.count_documents({})):
await asyncio.sleep(0.1)
@ignore_deprecations
@asyncio_test
async def test_unacknowledged_update(self):
coll = self.collection
await coll.insert_one({"_id": 1})
await coll.with_options(write_concern=WriteConcern(0)).update_one(
{"_id": 1}, {"$set": {"a": 1}}
)
while not (await coll.find_one({"a": 1})):
await asyncio.sleep(0.1)
@ignore_deprecations
@asyncio_test
async def test_indexes(self):
test_collection = self.collection
# Create an index
idx_name = await test_collection.create_index([("foo", 1)])
index_info = await test_collection.index_information()
self.assertEqual([("foo", 1)], index_info[idx_name]["key"])
# Don't test drop_index or drop_indexes -- Synchro tests them
async def _make_test_data(self, n):
await self.db.drop_collection("test")
await self.db.test.insert_many([{"_id": i} for i in range(n)])
expected_sum = sum(range(n))
return expected_sum
pipeline = [{"$project": {"_id": "$_id"}}]
@asyncio_test(timeout=30)
async def test_aggregation_cursor(self):
db = self.db
# A small collection which returns only an initial batch,
# and a larger one that requires a getMore.
for collection_size in (10, 1000):
expected_sum = await self._make_test_data(collection_size)
cursor = db.test.aggregate(self.pipeline)
docs = await cursor.to_list(collection_size)
self.assertEqual(expected_sum, sum(doc["_id"] for doc in docs))
@asyncio_test
async def test_aggregation_cursor_exc_info(self):
await self._make_test_data(200)
cursor = self.db.test.aggregate(self.pipeline)
await cursor.to_list(length=10)
await self.db.test.drop()
try:
await cursor.to_list(length=None)
except OperationFailure:
_, _, tb = sys.exc_info()
# The call tree should include PyMongo code we ran on a thread.
formatted = "\n".join(traceback.format_tb(tb))
self.assertTrue(
"_unpack_response" in formatted or "_check_command_response" in formatted
)
@asyncio_test
async def test_aggregate_cursor_del(self):
cursor = self.db.test.aggregate(self.pipeline)
del cursor
cursor = self.db.test.aggregate(self.pipeline)
await cursor.close()
del cursor
def test_with_options(self):
coll = self.db.test
codec_options = CodecOptions(tz_aware=True, uuid_representation=JAVA_LEGACY)
write_concern = WriteConcern(w=2, j=True)
coll2 = coll.with_options(codec_options, ReadPreference.SECONDARY, write_concern)
self.assertTrue(isinstance(coll2, AsyncIOMotorCollection))
self.assertEqual(codec_options, coll2.codec_options)
self.assertEqual(Secondary(), coll2.read_preference)
self.assertEqual(write_concern, coll2.write_concern)
pref = Secondary([{"dc": "sf"}])
coll2 = coll.with_options(read_preference=pref)
self.assertEqual(pref, coll2.read_preference)
self.assertEqual(coll.codec_options, coll2.codec_options)
self.assertEqual(coll.write_concern, coll2.write_concern)
def test_sub_collection(self):
# Verify that a collection with a dotted name inherits options from its
# parent collection.
write_concern = WriteConcern(w=2, j=True)
read_concern = ReadConcern("majority")
read_preference = Secondary([{"dc": "sf"}])
codec_options = CodecOptions(tz_aware=True, uuid_representation=JAVA_LEGACY)
coll1 = self.db.get_collection(
"test",
write_concern=write_concern,
read_concern=read_concern,
read_preference=read_preference,
codec_options=codec_options,
)
coll2 = coll1.subcollection
coll3 = coll1["subcollection"]
for c in [coll1, coll2, coll3]:
self.assertEqual(write_concern, c.write_concern)
self.assertEqual(read_concern, c.read_concern)
self.assertEqual(read_preference, c.read_preference)
self.assertEqual(codec_options, c.codec_options)
if __name__ == "__main__":
unittest.main()
|
|
'''
Created on 05/06/2014
@author: nikolay
'''
from __builtin__ import map
import sys
import time
import collections
import logging
from os.path import os
from sys import maxint
from autoscale.FANNWrapper import FANNWrapper
from autoscale.VMType import VMType
from workload.Workload import Workload
from workload.ClientFactory import ClientFactory
from autoscale.Util import convertMem, sigmoid, nextEpoch, getMk, getLrk, formatCurrTime, statHeader, statLine, printTest
log = logging.getLogger(__name__)
##== Get command line arguements
inputUnits = int(sys.argv[1]) if len(sys.argv) > 1 else 1
hiddenUnits = int(sys.argv[2]) if len(sys.argv) > 2 else 250
lr = float(sys.argv[3]) if len(sys.argv) > 3 else 0.001
epochCode = int(sys.argv[4]) if len(sys.argv) > 4 else 1
trainingStatFile=os.path.expanduser("~/RESULTS-NN(%d-%d-%d) LR(%.4f) EpCode(%d)-%s.txt" % (inputUnits, hiddenUnits, 2, lr, epochCode, formatCurrTime(fmt='%d-%measurement-%Y %H:%M:%S')))
trainingResFile=os.path.expanduser("~/NN(%d-%d-%d) LR(%.4f) EpCode(%d)-%s.txt" % (inputUnits, hiddenUnits, 2, lr, epochCode, formatCurrTime(fmt='%d-%measurement-%Y %H:%M:%S')))
scalingStatFile=os.path.expanduser("~/Scale-%s.txt" % ( formatCurrTime(fmt='%d-%measurement-%Y %H:%M:%S')))
##== Set up auxiliary result files
printTest(trainingStatFile, None, overwrite=True)
statHeader(trainingResFile, overwrite=True)
##== Configure the level, handler and format for the loggers
rootLogger = logging.getLogger()
rootLogger.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt="%H:%M:%S")
ch.setFormatter(formatter)
rootLogger.addHandler(ch)
logging.getLogger('paramiko').setLevel(logging.ERROR)
##== AWS Access variables - used to create a new VM
providerId = "aws-ec2"
accesskeyid = "AKIAILRWRBMXXTCFZAYA"
secretkey = "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"
imageOwnerId = "575249362288"
locationId = "ap-southeast-2a"
imageId = "ap-southeast-2/ami-59264163"
hardwareId = "t1.micro"
securityGroupName = "CloudStone"
keyPairName = "CloudStone"
groupName = "cloudstone-as" # Must be lower case
mavenPrjPath = "/home/nikolay/Dropbox/CloudStoneSetupOnUbuntuAdvanced/AutoScaler/provisionvm/pom.xml"
##== Initialising common variables - addresses, access keys
pemFile = "/home/nikolay/Dropbox/CloudStoneSetupOnUbuntuAdvanced/CloudStone.pem"
monitoringScript = "/home/nikolay/Dropbox/CloudStoneSetupOnUbuntuAdvanced/AutoScaler/monitor.sh"
runConfig = "/home/nikolay/Dropbox/CloudStoneSetupOnUbuntuAdvanced/AutoScaler/run.xml"
userName = "ubuntu"
loadBalancerAddress = "ec2-54-79-203-150.ap-southeast-2.compute.amazonaws.com"
firstAppServerAddress = "ec2-54-253-205-116.ap-southeast-2.compute.amazonaws.com"#"ec2-54-253-144-28.ap-southeast-2.compute.amazonaws.com"
clientAddress = "ec2-54-79-149-247.ap-southeast-2.compute.amazonaws.com"
##== Factory for creating objects that manage VMs, Load Balancer and Clients
factory = ClientFactory(providerId, accesskeyid, secretkey, imageOwnerId, locationId, imageId, securityGroupName, keyPairName,\
groupName, mavenPrjPath, pemFile, monitoringScript, userName, runConfig)
##== VM types
t1Micro = VMType(code="t1.micro", declaredCpuCapacity=0.5, declaredRAMCapacityKB=convertMem(0.615, "GB", "KB"), costPerTimeUnit=0.02)
m1Small = VMType(code="m1.small", declaredCpuCapacity=1, declaredRAMCapacityKB=convertMem(1.7, "GB", "KB"), costPerTimeUnit=0.058)
m1Medium = VMType(code="m1.medium", declaredCpuCapacity=2, declaredRAMCapacityKB=convertMem(3.75, "GB", "KB"), costPerTimeUnit=0.117)
m3Medium = VMType(code="m3.medium", declaredCpuCapacity=3, declaredRAMCapacityKB=convertMem(3.75, "GB", "KB"), costPerTimeUnit=0.098)
types = [m1Small, m1Medium, m3Medium]
types.sort(key=lambda t: t.declaredCpuCapacity)
##== Initialise client, first AS servers and Load Balancers
firstAppServer = factory.createVM(readableName="App 1", vmType=m3Medium, address=firstAppServerAddress)
serverFarm = factory.createServerFarm(address=loadBalancerAddress)
serverFarm.addServers(firstAppServer)
##==
fann = FANNWrapper(topology=(inputUnits, hiddenUnits, 2))
# # ===
#nextAppServer = factory.createVM(readableName="App 2", vmType = t1Micro, address = None)
#log.info("------------->" + nextAppServer.address)
#serverFarm.addServers(nextAppServer)
client = factory.createClient(address=clientAddress)
workload = Workload(readableName = "Main Workload", client = client)
for w in range(0, 94):
workload.addRun(loadScale=w*10 + 30, rampUp=60, steadyState=300 , rampDown=10)
workload.start()
time.sleep(90)
inputMomentum = 0.0
# Sleep between measurments
defSleepPeriod=5
sleepPeriod=defSleepPeriod
# USed to determine lrk
annomalies = collections.deque(maxlen=10)
rmses = collections.deque(maxlen=10)
# The time in GST seconds of the last autoscaling event and the cool-down period
lastEventTime = int(time.time())
coolDownPeriod = 600
# How many overloaded sequences before autoscaling
conseqTrig = 2
# The last CPU/RAM measurements, used to decide whether to austoscale or not
lastCPUUtils = collections.deque(maxlen=conseqTrig)
lastRAMUtils = collections.deque(maxlen=conseqTrig)
# Min/Max users encountered so far
minUsers, maxUsers = (maxint , 0)
# Counts the number if ANN samples
k=1
initMeasurements = []
for iteration in range(1, 50001):
if iteration % 50 == 0:
printTest(trainingStatFile, fann)
serverMeasurements = []
for server in serverFarm.servers:
log.info("Receiving:[" + str(iteration) + "]")
if workload.getCurrentNumUsers() > 490:
raise StopIteration
injectVariance = (workload.getCurrentNumUsers() >= 300)
if injectVariance:
log.info("Injecting variance ...")
measurement = server.fetchData(inputMomentum = inputMomentum, injectVariance = injectVariance)
ank = measurement.anomaly if measurement is not None and iteration > 105 else 0
cpu = measurement.normaliseCpuUtil() if measurement is not None else 0
mem = measurement.normaliseRAMUtil() if measurement is not None else 0
if measurement is not None:
annomalies.append(ank)
serverMeasurements.append(measurement)
if measurement is not None and measurement.isValid() :
minUsers, maxUsers = (min(measurement.numberOfUsers(), minUsers), max(measurement.numberOfUsers(), maxUsers))
if k % 50 == 0:
statHeader(trainingResFile, overwrite=False);
rmsePre = fann.rmse(measurement.numberOfUsers(), (cpu, mem))
avgRMSE = rmsePre if len(rmses) == 0 else sum(rmses) / len(rmses)
if rmsePre < 0.01:
log.info("Predicted well!!! RMSE: %.5f" % (rmsePre))
time.sleep(sleepPeriod)
continue
rmses.append(rmsePre)
run = fann.run(measurement.numberOfUsers())
annPart = 2**len(annomalies) * reduce(lambda x, y : x * y, map(lambda x : sigmoid(x), annomalies))
rmsePart = max(1.0, rmsePre / avgRMSE)
lrk = getLrk(k=k, lr=lr, rmsePart=rmsePart, annPart=annPart, epochCode = epochCode)
mk = getMk(k=k, lr=lr, lrk = lrk, epochCode = epochCode)
ek = nextEpoch(lrk=lrk, lr=lr, epochCode=epochCode)
# sleepPeriod = defSleepPeriod if ek < 2 else defSleepPeriod / 2.0
# Speculated run ...
fann.config(momentum = mk, learning_rate = lrk)
rmsePost=fann.train(measurement.numberOfUsers(), (cpu, mem), trainTimes=1, revert = True)
lrkBuff = lrk
if rmsePost > rmsePre:
lrk=lr
fann.config(momentum = mk, learning_rate = lrk)
fann.train(measurement.numberOfUsers(), (cpu, mem), trainTimes=ek, revert = False, maxRMSE=None)
values = (formatCurrTime(), server.readableName, server.vmType.readableName, iteration, k, measurement.numberOfUsers(),
cpu, mem, ank, rmsePre, run[0], run[1], ek, mk, lrk, lrkBuff, annPart, rmsePart, annPart*rmsePart, avgRMSE )
statLine(trainingResFile, values)
k = k + 1
else :
if measurement is not None:
txt = "SKIP: %s: [%.3d] %3d, %.5f, %.5f" % (firstAppServer.readableName, iteration, measurement.numberOfUsers(), measurement.cpuUtil(), measurement.ramUtil())
log.info(txt)
else:
log.info("SKIP: is null:")
cpuUtils = map(lambda sm : sm.cpuUtil() , serverMeasurements)
ramUtils = map(lambda sm : sm.ramUtil() , serverMeasurements)
avgCpuUtil = sum(cpuUtils) / len(cpuUtils) if len(cpuUtils) > 0 else 0.0
avgRamUtil= sum(ramUtils) / len(ramUtils) if len(ramUtils) > 0 else 0.0
lastCPUUtils.append(avgCpuUtil)
lastRAMUtils.append(avgRamUtil)
scaleUpCPU = (len(lastCPUUtils) >= conseqTrig and reduce(lambda x, y: x and y, map(lambda x: x > 0.7, lastCPUUtils)))
scaleUpRAM = (len(lastRAMUtils) >= conseqTrig and reduce(lambda x, y: x and y, map(lambda x: x > 0.7, lastRAMUtils)))
timeForScaling = ((int(time.time()) - lastEventTime) > coolDownPeriod)
if timeForScaling and (scaleUpCPU or scaleUpRAM):
log.info("Autoscaling reason: CPU(%s), RAM(%s)" % (str(scaleUpCPU), str(scaleUpRAM)))
readableName="App "+str(len(serverFarm.servers) + 1)
newtype=m3Medium
#newtype=VMType.selectVMType(fann, types, scalingStatFile = scalingStatFile, minUsers=minUsers, maxUsers=maxUsers, serverName=readableName)
nextAppServer = factory.createVM(readableName=readableName, vmType = newtype, address = None, htm=firstAppServer.htm.clone())
log.info("\n\n------------->" + nextAppServer.address)
serverFarm.addServers(nextAppServer)
lastEventTime = int(time.time())
time.sleep(sleepPeriod)
printTest(trainingStatFile, fann)
|
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import base64
import datetime
import functools
import hashlib
import io
import json
import os.path
import random
import sqlite3
import typing
import uuid
import PIL.Image
import PIL.ImageOps
try:
import networkx
except ImportError:
networkx = None
PIXEL_ART_CSS = (
'image-rendering: -moz-crisp-edges; '
'image-rendering: crisp-edges; '
'image-rendering: pixelated; '
)
class PikovError(Exception):
pass
class NotFound(PikovError):
pass
class Image:
"""An (immutable) image.
Args:
connection (sqlite3.Connection):
A connection to the SQLite database this image belongs to.
key (str): The image identifier.
"""
def __init__(self, connection: sqlite3.Connection, key: str):
self._connection = connection
self._key = key
self._content_type = None
self._contents = None
@property
def key(self) -> str:
return self._key
@property
def content_type(self) -> str:
if self._content_type is not None:
return self._content_type
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT content_type FROM image WHERE key = ?;', (self._key,))
row = cursor.fetchone()
self._content_type = row[0]
return self._content_type
@property
def contents(self) -> bytes:
if self._contents is not None:
return self._contents
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT contents FROM image WHERE key = ?;', (self._key,))
row = cursor.fetchone()
self._contents = row[0]
return self._contents
def _to_html(self):
return (
'<table>'
'<tr><th>Image</th><th></th></tr>'
f'<tr><td>key</td><td>{self.key}</td></tr>'
f'<tr><td>content_type</td><td>{self.content_type}</td></tr>'
f'<tr><td>contents</td><td>{self._to_img()}</td></tr>'
'</table>'
)
def _to_data_url(self):
contents_base64 = base64.b64encode(self.contents).decode('utf-8')
return f'data:{self.content_type};base64,{contents_base64}'
def _to_img(self):
return (
f'<img alt="image with key {self.key}" '
f'src="{self._to_data_url()}" '
f'style="width: 5em; {PIXEL_ART_CSS}">'
)
def __repr__(self):
return f"Image(key='{self._key}')"
def _repr_mimebundle_(self, include=None, exclude=None, **kwargs):
data = {}
should_include = functools.partial(
_should_include, include=include, exclude=exclude)
if should_include(self.content_type) and self.contents:
data[self.content_type] = self.contents
if should_include('text/html'):
data['text/html'] = self._to_html()
return data
class Frame:
"""An animation frame.
Args:
connection (sqlite3.Connection):
A connection to the SQLite database this frame belongs to.
id (str): The primary key of this frame.
"""
def __init__(self, connection: sqlite3.Connection, id: int):
self._connection = connection
self._id = id
@property
def id(self) -> str:
return self._id
@property
def image(self) -> Image:
"""image (Image): Image content on the frame."""
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT image_key FROM frame '
'WHERE id = ?;',
(self._id,))
row = cursor.fetchone()
image_key = row[0]
return Image(self._connection, image_key)
@property
def duration(self) -> datetime.timedelta:
"""duration (datetime.timedelta): Time duration to display the
animation frame.
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT duration_microseconds FROM frame '
'WHERE id = ?;',
(self._id,))
row = cursor.fetchone()
duration_microseconds = row[0]
return datetime.timedelta(microseconds=duration_microseconds)
@property
def transitions(self) -> typing.Tuple['Transition', ...]:
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT id '
'FROM transition '
'WHERE source_frame_id = ? '
'ORDER BY target_frame_id ASC;',
(self._id,))
return tuple((
Transition(self._connection, row[0])
for row in cursor.fetchall()
))
def get_random_next(self) -> 'Frame':
"""Transition randomly to a next frame.
The next frame is one of the target frames from the transitions. If
no such frame exists, this method returns this frame.
"""
target_frames = tuple(
(transition.target for transition in self.transitions))
# No frames to transition to? Return this frame.
if not target_frames:
return self
return target_frames[random.randrange(len(target_frames))]
def _get_properties(self, cursor) -> typing.Mapping[str, typing.Any]:
cursor.execute(
'SELECT properties '
'FROM frame '
'WHERE id = ? ',
(self._id,))
row = cursor.fetchone()
if not row[0]:
return {}
return json.loads(row[0])
def get_property(self, key: str) -> typing.Any:
with self._connection:
cursor = self._connection.cursor()
properties = self._get_properties(cursor)
return properties.get(key)
def set_property(self, key: str, value: typing.Any):
with self._connection:
cursor = self._connection.cursor()
try:
cursor.execute('BEGIN')
properties = self._get_properties(cursor)
properties[key] = value
properties_json = json.dumps(properties)
cursor.execute(
'UPDATE frame '
'SET properties = ? '
'WHERE id = ?;',
(properties_json, self._id,))
cursor.execute('COMMIT')
except Exception:
cursor.execute('ROLLBACK')
raise
def transition_to(self, target: 'Frame') -> 'Transition':
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'INSERT INTO transition '
'(source_frame_id, target_frame_id) '
'VALUES (?, ?);',
(self.id, target.id))
return Transition(self._connection, cursor.lastrowid)
def __add__(self, other):
if hasattr(other, 'frames'):
return Clip((self,) + other.frames)
elif isinstance(other, Frame):
return Clip((self, other))
else:
raise TypeError(f'Cannot add Frame and {str(type(other))}')
def _to_html(self):
image_key = None
image_html = None
if self.image is not None:
image_key = self.image.key
image_html = self.image._to_img()
with self._connection:
properties = self._get_properties(self._connection.cursor())
return (
'<table>'
f'<tr><th>Frame</th><th></th></tr>'
f'<tr><td>id</td><td>{self._id}</td></tr>'
'<tr><td>duration</td>'
f'<td>{self.duration.total_seconds()} seconds</td></tr>'
f'<tr><td>image.key</td><td>{image_key}</td></tr>'
f'<tr><td>image.contents</td><td>{image_html}</td></tr>'
'<tr><td>properties</td><td style="text-align: left;">'
f'<pre>{json.dumps(properties, indent=2)}</pre></td></tr>'
'</table>'
)
def __eq__(self, other):
return isinstance(other, Frame) and self.id == other.id
def __hash__(self):
return self._id.__hash__()
def __repr__(self):
return f"Frame(id='{self._id}')"
def _repr_mimebundle_(self, include=None, exclude=None, **kwargs):
data = {}
should_include = functools.partial(
_should_include, include=include, exclude=exclude)
# Frame can be represented by just its image if the image content-type
# is desired.
if self.image and should_include(self.image.content_type) \
and self.image.contents:
data[self.image.content_type] = self.image.contents
if should_include('text/html'):
data['text/html'] = self._to_html()
return data
class Clip:
"""An animation clip, which is a collection of frames."""
def __init__(self, frames, is_loop=False):
self._frames = tuple(frames)
self._is_loop = is_loop
@property
def frames(self) -> typing.Tuple[Frame, ...]:
"""Tuple[Frame, ...]: A collection of frames in this clip."""
return self._frames
@property
def is_loop(self) -> bool:
"""Does the clip loop on itself?"""
return self._is_loop
def save_gif(self, fp):
"""Save this clip as a GIF to file pointer ``fp``."""
if not self.frames:
raise NotFound('No frames to write to GIF.')
imgs = []
durations = []
previous_image = None
for frame in self.frames:
duration = frame.duration.total_seconds() * 1000
# Increase the duration if the image is the same as the previous.
if previous_image == frame.image.key:
durations[-1] = durations[-1] + duration
continue
# New image, add it to the list.
previous_image = frame.image.key
img_file = io.BytesIO(frame.image.contents)
imgs.append(PIL.Image.open(img_file))
durations.append(duration)
imgs[0].save(
fp, format='gif', save_all=(len(imgs) > 1), append_images=imgs[1:],
duration=durations if len(durations) > 1 else durations[0],
# Always loop since this the GIF is used to preview the clip.
loop=0)
def add_missing_transitions(self) -> typing.Iterable['Transition']:
"""Add missing transitions between consecutive frames in the clip.
TODO: Only add missing transitions. Method currently adds all
transitions.
Returns:
Iterable[Transition]: Any transitions that were added.
"""
added_transitions = []
previous_frame = self.frames[0]
for frame in self.frames[1:]:
# TODO: Transition only if the transition isn't already present.
transition = previous_frame.transition_to(frame)
added_transitions.append(transition)
previous_frame = frame
if self.is_loop: # TODO: And there isn't a transition already.
transition = self.frames[-1].transition_to(self.frames[0])
added_transitions.append(transition)
return added_transitions
def transition_to(self, target: 'Clip') -> 'Transition':
"""Add a transition from this clip to the beginning of a target clip.
Arguments:
target (Clip): Destination clip for a transition.
Returns:
Transition:
A transition from the last frame of this clip to the first
frame of the ``target`` clip.
"""
return self.frames[-1].transition_to(target.frames[0])
def __add__(self, other):
if hasattr(other, 'frames'):
return Clip(self.frames + other.frames)
elif isinstance(other, Frame):
return Clip(self.frames + (other,))
else:
raise TypeError(f'Cannot add Clip and {str(type(other))}')
def _to_gif(self) -> typing.Optional[bytes]:
"""Write a sequence of frames to a GIF (requires Pillow).
Returns:
Optional[bytes]:
Contents of a GIF rendering of the clip or ``None`` if the clip
contains no image frames.
"""
if not self.frames:
return None
output = io.BytesIO()
self.save_gif(output)
return output.getvalue()
def _to_img(self):
gif_contents = self._to_gif()
if not gif_contents:
return None
contents_base64 = base64.b64encode(gif_contents).decode('utf-8')
return (
f'<img alt="clip preview" '
f'src="data:image/gif;base64,{contents_base64}" '
f'style="width: 5em; {PIXEL_ART_CSS}">'
)
def _to_html(self):
frames_repr = ', '.join((repr(frame) for frame in self._frames))
return (
'<table>'
f'<tr><th>Clip</th><th></th></tr>'
f'<tr><td>frames</td><td>{frames_repr}</td></tr>'
f'<tr><td>preview</td><td>{self._to_img()}</td></tr>'
'</table>'
)
def __repr__(self):
return f"Clip(frames={repr(self._frames)}')"
def _repr_mimebundle_(self, include=None, exclude=None, **kwargs):
data = {}
should_include = functools.partial(
_should_include, include=include, exclude=exclude)
# Clip can be represented by just a GIF.
if should_include('image/gif'):
gif_contents = self._to_gif()
if gif_contents:
data['image/gif'] = gif_contents
if should_include('text/html'):
data['text/html'] = self._to_html()
return data
class Transition:
def __init__(self, connection: sqlite3.Connection, id: int):
self._connection = connection
self._id = id
self._deleted = False
# Source and Target are immutable for a Transition,
# so we can cache the objects for them.
self._source = None
self._target = None
@property
def id(self) -> int:
return self._id
@property
def source(self) -> Frame:
if self._deleted:
raise ValueError('Cannot fetch source on deleted transition.')
if self._source is not None:
return self._source
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT source_frame_id FROM transition WHERE id = ?;',
(self._id,))
row = cursor.fetchone()
self._source = Frame(self._connection, row[0])
return self._source
@property
def target(self) -> Frame:
if self._deleted:
raise ValueError('Cannot fetch target on deleted transition.')
if self._target is not None:
return self._target
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT target_frame_id FROM transition WHERE id = ?;',
(self._id,))
row = cursor.fetchone()
self._target = Frame(self._connection, row[0])
return self._target
def delete(self):
if self._deleted:
raise ValueError('Cannot delete. Transition already deleted.')
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'DELETE FROM transition WHERE id = ?;',
(self._id,))
self._deleted = True
def _to_clip(self) -> Clip:
return Clip((self.source, self.target))
def _to_gif(self) -> typing.Optional[bytes]:
return self._to_clip()._to_gif()
def _to_img(self) -> str:
return self._to_clip()._to_img()
def _to_html(self) -> str:
source_img = self.source.image._to_img()
target_img = self.target.image._to_img()
return (
'<table>'
'<tr><th>Transition</th><th></th></tr>'
f'<tr><td>id</td><td>{self.id}</td></tr>'
f'<tr><td>source.id</td><td>{self.source.id}</td></tr>'
f'<tr><td>source.image</td><td>{source_img}</td></tr>'
f'<tr><td>target.id</td><td>{self.target.id}</td></tr>'
f'<tr><td>target.image</td><td>{target_img}</td></tr>'
f'<tr><td>preview</td><td>{self._to_img()}</td></tr>'
'</table>'
)
def __repr__(self) -> str:
return (
'Transition('
f'id={repr(self.id)}, '
f'source={repr(self.source)}, '
f'target{repr(self.target)})'
)
def _repr_mimebundle_(self, include=None, exclude=None, **kwargs):
data = {}
should_include = functools.partial(
_should_include, include=include, exclude=exclude)
# Clip can be represented by just a GIF.
if should_include('image/gif'):
gif_contents = self._to_gif()
if gif_contents:
data['image/gif'] = gif_contents
if should_include('text/html'):
data['text/html'] = self._to_html()
return data
class Pikov:
def __init__(self, connection):
self._connection = connection
@classmethod
def open(cls, path):
connection = sqlite3.connect(path)
# Allow for manual BEGIN/END transcactions.
# https://stackoverflow.com/a/24374430/101923
connection.isolation_level = None
cursor = connection.cursor()
cursor.execute('PRAGMA foreign_keys = ON;')
return cls(connection)
@classmethod
def create(cls, path):
pikov = cls.open(path)
cursor = pikov._connection.cursor()
cursor.execute(
'CREATE TABLE image ('
'key TEXT PRIMARY KEY, '
'contents BLOB, '
'content_type TEXT);')
cursor.execute(
'CREATE TABLE frame ('
'id TEXT PRIMARY KEY, '
'image_key TEXT, '
'duration_microseconds INTEGER, '
'properties TEXT, '
'FOREIGN KEY(image_key) REFERENCES image(key));')
cursor.execute(
'CREATE TABLE pikov ('
'id STRING PRIMARY KEY, '
'start_frame_id TEXT, '
'FOREIGN KEY(start_frame_id) REFERENCES frame(id));')
cursor.execute(
'CREATE TABLE transition ('
'id INTEGER PRIMARY KEY, '
'source_frame_id TEXT, '
'target_frame_id TEXT, '
# TODO: add weights (inversely proportional to probabilities)
'FOREIGN KEY(source_frame_id) REFERENCES frame(id), '
'FOREIGN KEY(target_frame_id) REFERENCES frame(id));')
cursor.execute('INSERT INTO pikov (id) VALUES (1);')
pikov._connection.commit()
return pikov
@property
def start_frame(self) -> typing.Optional[Frame]:
"""Optional[Frame]: The starting animation frame.
Returns:
Optional[Frame]: The starting frame, if one is set.
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute('SELECT start_frame_id FROM pikov WHERE id = 1')
row = cursor.fetchone()
if not row:
return None
frame_id = row[0]
if frame_id is None:
return None
return Frame(self._connection, frame_id)
@start_frame.setter
def start_frame(self, frame: typing.Optional[Frame]):
frame_id = None
if frame is not None:
frame_id = frame.id
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'UPDATE pikov SET start_frame_id = ? WHERE id = 1',
(frame_id,))
def add_image(self, image):
"""Add an image to the Pikov file.
Args:
image (PIL.Image.Image):
An image to add to the Pikov file.
Returns:
Tuple[str, bool]:
The content-based address to the image and a boolean
indicating True if the image was added or False for
duplicates.
"""
image_fp = io.BytesIO()
image.save(image_fp, format='PNG')
image_fp.seek(0)
image_hash = hash_image(image)
try:
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'INSERT INTO image (key, contents, content_type) '
'VALUES (?, ?, ?)',
(image_hash, image_fp.read(), 'image/png'))
except sqlite3.IntegrityError:
return image_hash, False # Frame already exists
return image_hash, True
def get_image(self, key):
"""Add an image to the Pikov file.
Args:
key (str): Content-based key for image file.
Returns:
Image: The image loaded from the Pikov file.
Raises:
NotFound: If image with ``key`` is not found.
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute('SELECT key FROM image WHERE key = ?', (key,))
image_row = cursor.fetchone()
if not image_row:
raise NotFound(
'Could not find image with key "{}"'.format(key))
return Image(self._connection, key)
def add_frame(self, image_key, duration=None, frame_id=None):
"""Add an animation frame to the Pikov file.
Args:
image_key (str):
An image to use as a frame in a clip.
duration (datetime.timedelta, optional):
Duration to display the frame within a clip. Defaults to
100,000 microseconds (10 frames per second).
frame_id (str):
ID to use to refer to this frame.
Returns:
Frame: The frame added.
"""
if frame_id is None:
frame_id = str(uuid.uuid4())
if duration is None:
duration = datetime.timedelta(microseconds=100000)
duration_microseconds = int(duration.total_seconds() * 1000000)
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'INSERT INTO frame (id, image_key, duration_microseconds) '
'VALUES (?, ?, ?)',
(frame_id, image_key, duration_microseconds))
# Set to the start clip if one hasn't been set yet.
cursor.execute(
'UPDATE pikov SET start_frame_id = ? '
'WHERE id = 1 AND start_frame_id IS NULL',
(frame_id,))
return Frame(self._connection, frame_id)
def get_frame(self, frame_id):
"""Get the animation frame with a specific ID.
Args:
frame_id (str): Identifier of the animation frame to load.
Returns:
Frame: A frame, if found.
Raises:
NotFound: If frame with ``frame_id`` is not found.
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT id FROM frame WHERE id = ?', (frame_id,))
row = cursor.fetchone()
if not row:
raise NotFound(
'Could not find frame with frame_id "{}"'.format(frame_id))
return Frame(self._connection, frame_id)
def list_frames(self):
"""List all the animation frames in no particular order
Returns:
Iterable[Frame]: A collection of frames
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute('SELECT id FROM frame')
rows = cursor.fetchall()
return (Frame(self._connection, row[0]) for row in rows)
def list_transitions(self):
"""List all the transitions in no particular order
Returns:
Iterable[Transition]: A collection of transitions
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute('SELECT id FROM transition')
rows = cursor.fetchall()
return (Transition(self._connection, row[0]) for row in rows)
def find_absorbing_frames(self) -> typing.Tuple[Frame, ...]:
"""Return all frames which are 'absorbing'.
An absorbing frame is one with no connections outgoing connections to
any other frame except itself. Once an animation reaches such a
frame, it will be stuck there until reset.
"""
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT frame.id AS frame_id FROM frame '
# Join with transitions to find frames with no
# transitions out.
'LEFT OUTER JOIN transition'
' ON frame.id = transition.source_frame_id'
' AND frame.id != transition.target_frame_id '
# No transitions out.
'WHERE transition.source_frame_id IS NULL '
'ORDER BY frame_id ASC;')
frame_ids = cursor.fetchall()
return tuple((
Frame(self._connection, frame_id[0])
for frame_id in frame_ids
))
def _preview_clip(
self,
start_frame: Frame,
min_duration: datetime.timedelta=datetime.timedelta(seconds=10),
max_duration: datetime.timedelta=datetime.timedelta(seconds=30)):
current_frame = None
preview_frames = []
total_duration = datetime.timedelta(seconds=0)
next_frame = start_frame
# Haven't gotten long enough or haven't looped yet.
while total_duration < min_duration or next_frame != start_frame:
current_frame = next_frame
preview_frames.append(current_frame)
total_duration = total_duration + current_frame.duration
next_frame = current_frame.get_random_next()
# Have reached the maximum animation length?
if total_duration + next_frame.duration > max_duration:
break
return Clip(frames=preview_frames)
def save_gif(
self,
fp: typing.BinaryIO,
start_frame: typing.Optional[Frame]=None,
min_duration: datetime.timedelta=datetime.timedelta(seconds=10),
max_duration: datetime.timedelta=datetime.timedelta(seconds=30)):
"""Save a preview GIF to file pointer ``fp``.
Arguments:
fp (BinaryIO): File-like object to write GIF to.
start_frame (Optional[Frame]):
Frame to start the animation at. Defaults to
:attr:`Pikov.start_frame`.
min_duration (Optional[datetime.timedelta]):
The minimum duration of the GIF animation (inclusive).
Defaults to 10 seconds.
max_duration (Optional[datetime.timedelta]):
The maximum duration of the GIF animation (inclusive).
Defaults to 30 seconds. Once the animation exceeds
``min_duration`` in length, it ends once it creates a looping
animation or reaches ``max_duration`` in length.
Raises:
ValueError: If could not get a ``start_frame``.
"""
if start_frame is None:
start_frame = self.start_frame
if start_frame is None:
raise ValueError('missing start_frame.')
preview_clip = self._preview_clip(
start_frame=start_frame,
min_duration=min_duration,
max_duration=max_duration)
preview_clip.save_gif(fp)
def to_networkx(self) -> 'networkx.DiGraph':
"""Convert pikov object to networkx directed graph."""
# TODO: raise error if networkx not installed
graph = networkx.DiGraph()
graph.add_nodes_from(self.list_frames())
with self._connection:
cursor = self._connection.cursor()
cursor.execute(
'SELECT source_frame_id, target_frame_id '
'FROM transition '
'GROUP BY source_frame_id, target_frame_id')
rows = cursor.fetchall()
edges = (
(
Frame(self._connection, row[0]),
Frame(self._connection, row[1]),
) for row in rows
)
graph.add_edges_from(edges)
return graph
def _to_gif(self) -> typing.Optional[bytes]:
"""Write a sequence of frames to a GIF (requires Pillow).
Returns:
Optional[bytes]:
Contents of a GIF rendering of the clip or ``None`` if
there is no start frame.
"""
start_frame = self.start_frame
if start_frame is None:
return None
output = io.BytesIO()
self.save_gif(output, start_frame=start_frame)
return output.getvalue()
def _to_img(self):
gif_contents = self._to_gif()
if not gif_contents:
return None
contents_base64 = base64.b64encode(gif_contents).decode('utf-8')
return (
f'<img alt="clip preview" '
f'src="data:image/gif;base64,{contents_base64}" '
f'style="width: 5em; {PIXEL_ART_CSS}">'
)
def _to_html(self):
return (
'<table>'
f'<tr><th>Pikov</th><th></th></tr>'
f'<tr><td>preview</td><td>{self._to_img()}</td></tr>'
'</table>'
)
def _repr_mimebundle_(self, include=None, exclude=None, **kwargs):
data = {}
should_include = functools.partial(
_should_include, include=include, exclude=exclude)
# Clip can be represented by just a GIF.
if should_include('image/gif'):
gif_contents = self._to_gif()
if gif_contents:
data['image/gif'] = gif_contents
if should_include('text/html'):
data['text/html'] = self._to_html()
return data
def _should_include(mime, include=None, exclude=None):
if not mime:
return False
included = not include or mime in include
not_excluded = not exclude or mime not in exclude
return included and not_excluded
def hash_image(image):
"""Encode pixels as bytes and take the MD5 hash.
Note: this is meant for de-duplication, not security purposes.
"""
# Convert to common format for deterministic encoding.
if image.getbands() != ('R', 'G', 'B', 'A'):
image = image.convert(mode='RGBA')
assert image.getbands() == ('R', 'G', 'B', 'A')
md5 = hashlib.md5()
for x in range(image.size[0]):
for y in range(image.size[1]):
# Format each pixel as a 4-byte string.
# https://stackoverflow.com/a/31761722/101923
md5.update(b"%c%c%c%c" % image.getpixel((x, y)))
return 'md5-{}'.format(md5.hexdigest())
def import_clip(
pikov_path, clip_id, spritesheet_path, frame_width, frame_height,
fps, frames, flip_x=False):
# Normalize the paths for file relative path comparison.
pikov_path = os.path.abspath(pikov_path)
pikov_dir = os.path.dirname(pikov_path)
spritesheet_path = os.path.abspath(spritesheet_path)
relative_spritesheet_path = os.path.relpath(
spritesheet_path, start=pikov_dir)
# Convert FPS input into a per-frame duration.
duration = datetime.timedelta(seconds=1) / fps
# Read the Pikov file.
pkv = Pikov.open(pikov_path)
# Chop the sprite sheet into frames.
sheet = PIL.Image.open(spritesheet_path)
sheet_width, _ = sheet.size
cols = sheet_width // frame_width
# Add images to Pikov.
images = {}
added = 0
duplicates = 0
frames_set = frozenset(frames)
for spritesheet_frame in frames_set:
row = spritesheet_frame // cols
col = spritesheet_frame % cols
x = col * frame_width
y = row * frame_height
frame = sheet.crop(box=(x, y, x + frame_width, y + frame_height,))
if flip_x:
frame = PIL.ImageOps.mirror(frame)
image_key, image_added = pkv.add_image(frame)
if image_added:
added += 1
else:
duplicates += 1
images[spritesheet_frame] = (
image_key,
{
'path': relative_spritesheet_path,
'x': x,
'y': y,
'width': frame_width,
'height': frame_height,
'flipX': flip_x,
})
# Create clip
start_frame = None
clip_frames = []
for frame_sequence, spritesheet_frame in enumerate(frames):
image_key, original_image = images[spritesheet_frame]
frame = pkv.add_frame(
image_key,
duration=duration,
frame_id=f'{clip_id}_{frame_sequence}')
frame.set_property('originalImage', original_image)
frame.set_property('clipId', clip_id)
if start_frame is None:
start_frame = frame
clip_frames.append(frame)
clip = Clip(clip_frames) # TODO: is_loop?
transitions = clip.add_missing_transitions()
print('Added {} of {} images ({} duplicates)'.format(
added, len(frames_set), duplicates))
print((
'Created clip {} starting at frame {} with {} frames and {} '
'transitions.').format(
clip_id, start_frame.id, len(frames), len(transitions)))
def create(pikov_path):
Pikov.create(pikov_path)
def main():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(title='Actions', dest='action')
create_parser = subparsers.add_parser(
'create', help='Create a new .pikov file.')
create_parser.add_argument('pikov_path', help='Path to .pikov file.')
import_clip_parser = subparsers.add_parser(
'import-clip',
help='Import a sprite sheet animation as a clip.')
import_clip_parser.add_argument(
'--fps', help='Frames per second.', type=int, default=12)
import_clip_parser.add_argument(
'--flip_x', help='Flip frames horizontally.', type=bool, default=False)
import_clip_parser.add_argument('pikov_path', help='Path to .pikov file.')
import_clip_parser.add_argument(
'clip_id', help='Unique identifier for the new clip.')
import_clip_parser.add_argument(
'spritesheet_path', help='Path to sprite sheet.')
import_clip_parser.add_argument(
'frame_size', help='Size of frame. WIDTHxHEIGHT. Example: 8x8')
import_clip_parser.add_argument(
'frames',
help=(
'List of comma-separated frame IDs to include in clip. '
'Frames are 0-indexed from left-to-right, top-to-bottom.'
))
args = parser.parse_args()
if args.action == 'create':
create(args.pikov_path)
elif args.action == 'import-clip':
frame_width, frame_height = map(int, args.frame_size.split('x'))
frames = list(map(int, args.frames.split(',')))
import_clip(
args.pikov_path, args.clip_id, args.spritesheet_path,
frame_width, frame_height, args.fps, frames, flip_x=args.flip_x)
elif args.action is not None:
raise NotImplementedError(
'Got unknown action: {}'.format(args.action))
if __name__ == '__main__':
main()
|
|
# -*- coding: UTF-8 -*-
from pandasticsearch.client import RestClient
from pandasticsearch.queries import Agg, ScrollSelect
from pandasticsearch.operators import *
from pandasticsearch.types import Column, Row
from pandasticsearch.errors import DataFrameException
import json
import six
import sys
import copy
_unbound_index_err = DataFrameException('DataFrame is not bound to ES index')
_count_aggregator = MetricAggregator('_index', 'value_count', alias='count').build()
class DataFrame(object):
"""
A :class:`DataFrame` treats index and documents in Elasticsearch as named columns and rows.
>>> from pandasticsearch import DataFrame
>>> df = DataFrame.from_es('http://host:port', index='people')
Customizing the endpoint of the ElasticSearch:
>>> from pandasticsearch import DataFrame
>>> from pandasticsearch.client import RestClient
>>> df = DataFrame(client=RestClient('http://host:port',), index='people')
It can be converted to Pandas object for subsequent analysis:
>>> df.to_pandas()
"""
def __init__(self, **kwargs):
self._client = kwargs.get('client', None)
self._mapping = kwargs.get('mapping', None)
self._doc_type = kwargs.get('doc_type', None)
self._index = kwargs.get('index', None)
self._compat = kwargs.get('compat', 2)
self._filter = kwargs.get('filter', None)
self._groupby = kwargs.get('groupby', None)
self._aggregation = kwargs.get('aggregation', None)
self._sort = kwargs.get('sort', None)
self._projection = kwargs.get('projection', None)
self._limit = kwargs.get('limit', 100)
self._last_query = None
@property
def index(self):
"""
Returns the index name.
:return: string as the name
>>> df.index
people/children
"""
if self._index is None:
return None
return self._index + '/' + self._doc_type if self._doc_type else self._index
@property
def columns(self):
"""
Returns all column names as a list.
:return: column names as a list
>>> df.columns
['age', 'name']
"""
return sorted(self._get_cols(self._mapping)) if self._mapping else None
@property
def schema(self):
"""
Returns the schema(mapping) of the index/type as a dictionary.
"""
return self._mapping
@staticmethod
def from_es(**kwargs):
"""
Creates an :class:`DataFrame <DataFrame>` object by providing the URL of ElasticSearch node and the name of the index.
:param str url: URL of the node connected to (default: 'http://localhost:9200')
:param str index: The name of the index
:param str doc_type: The type of the document
:param str compat: The compatible ES version (an integer number)
:return: DataFrame object for accessing
:rtype: DataFrame
>>> from pandasticsearch import DataFrame
>>> df = DataFrame.from_es('http://host:port', index='people')
"""
doc_type = kwargs.get('doc_type', None)
index = kwargs.get('index', None)
url = kwargs.get('url', 'http://localhost:9200')
compat = kwargs.get('compat', 2)
username = kwargs.get('username', None)
password = kwargs.get('password', None)
verify_ssl = kwargs.get('verify_ssl', True)
if index is None:
raise ValueError('Index name must be specified')
if doc_type is None:
path = index
else:
path = index + '/' + doc_type
client = RestClient(url, username, password, verify_ssl)
mapping = client.get(path)
return DataFrame(client=client, mapping=mapping, index=index, doc_type=doc_type, compat=compat)
def __getattr__(self, name):
"""
Returns a :class:`types.Column <pandasticsearch.types.Column>` object denoted by ``name``.
"""
if name not in self.columns:
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
return Column(name)
def __getitem__(self, item):
if isinstance(item, six.string_types):
if item not in self.columns:
raise TypeError('Column does not exist: [{0}]'.format(item))
return Column(item)
elif isinstance(item, BooleanFilter):
self._filter = item
return self
else:
raise TypeError('Unsupported expr: [{0}]'.format(item))
def filter(self, condition):
"""
Filters rows using a given condition.
where() is an alias for filter().
:param condition: :class:`BooleanFilter <pandasticsearch.operators.BooleanFilter>` object or a string
>>> df.filter(df['age'] < 13).collect()
[Row(age=12,gender='female',name='Alice'), Row(age=11,gender='male',name='Bob')]
"""
if isinstance(condition, six.string_types):
_filter = ScriptFilter(condition)
elif isinstance(condition, BooleanFilter):
_filter = condition
else:
raise TypeError('{0} is supposed to be str or BooleanFilter'.format(condition))
# chaining filter treated as AND
if self._filter is not None:
_filter = (self._filter & _filter)
return DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=_filter,
groupby=self._groupby,
aggregation=self._aggregation,
projection=self._projection,
sort=self._sort,
limit=self._limit,
compat=self._compat)
where = filter
def select(self, *cols):
"""
Projects a set of columns and returns a new :class:`DataFrame <DataFrame>`
:param cols: list of column names or :class:`Column <pandasticsearch.types.Column>`.
>>> df.filter(df['age'] < 25).select('name', 'age').collect()
[Row(age=12,name='Alice'), Row(age=11,name='Bob'), Row(age=13,name='Leo')]
"""
projection = []
for col in cols:
if isinstance(col, six.string_types):
projection.append(getattr(self, col))
elif isinstance(col, Column):
projection.append(col)
else:
raise TypeError('{0} is supposed to be str or Column'.format(col))
return DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=self._filter,
groupby=self._groupby,
aggregation=self._aggregation,
projection=projection,
sort=self._sort,
limit=self._limit,
compat=self._compat)
def limit(self, num):
"""
Limits the result count to the number specified.
"""
assert isinstance(num, int)
assert num >= 1
return DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=self._filter,
groupby=self._groupby,
aggregation=self._aggregation,
projection=self._projection,
sort=self._sort,
limit=num,
compat=self._compat)
def groupby(self, *cols):
"""
Returns a new :class:`DataFrame <DataFrame>` object grouped by the specified column(s).
:param cols: A list of column names, :class:`Column <pandasticsearch.types.Column>` or :class:`Grouper <pandasticsearch.operators.Grouper>` objects
"""
columns = []
if len(cols) == 1 and isinstance(cols[0], Grouper):
groupby = cols[0].build()
else:
for col in cols:
if isinstance(col, six.string_types):
columns.append(getattr(self, col))
elif isinstance(col, Column):
columns.append(col)
else:
raise TypeError('{0} is supposed to be str or Column'.format(col))
names = [col.field_name() for col in columns]
groupby = Grouper.from_list(names).build()
return DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=self._filter,
groupby=groupby,
aggregation=self._aggregation,
projection=self._projection,
sort=self._sort,
limit=self.limit,
compat=self._compat)
def agg(self, *aggs):
"""
Aggregate on the entire DataFrame without groups.
:param aggs: a list of :class:`Aggregator <pandasticsearch.operators.Aggregator>` objects
>>> df[df['gender'] == 'male'].agg(df['age'].avg).collect()
[Row(avg(age)=12)]
"""
aggregation = {}
for agg in aggs:
assert isinstance(agg, Aggregator)
aggregation.update(agg.build())
return DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=self._filter,
groupby=self._groupby,
aggregation=aggregation,
projection=self._projection,
sort=self._sort,
limit=self._limit,
compat=self._compat)
def sort(self, *cols):
"""
Returns a new :class:`DataFrame <DataFrame>` object sorted by the specified column(s).
:param cols: A list of column names, :class:`Column <pandasticsearch.types.Column>` or :class:`Sorter <pandasticsearch.operators.Sorter>`.
orderby() is an alias for sort().
>>> df.sort(df['age'].asc).collect()
[Row(age=11,name='Bob'), Row(age=12,name='Alice'), Row(age=13,name='Leo')]
"""
sorts = []
for col in cols:
if isinstance(col, six.string_types):
sorts.append(ScriptSorter(col).build())
elif isinstance(col, Sorter):
sorts.append(col.build())
else:
raise TypeError('{0} is supposed to be str or Sorter'.format(col))
return DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=self._filter,
groupby=self._groupby,
aggregation=self._aggregation,
projection=self._projection,
sort=sorts,
limit=self._limit,
compat=self._compat)
orderby = sort
def _execute(self):
if self._client is None:
raise _unbound_index_err
if self._doc_type is None:
path = self._index + '/_search'
else:
path = self._index + '/' + self._doc_type + '/_search'
if self._aggregation is None and self._groupby is None:
def _scroll():
row_counter = 0
_query = self._build_query()
resp = self._client.post(path, params={"scroll": "10s"}, data=_query)
scroll_id = resp.get("_scroll_id")
try:
while scroll_id and resp["hits"]["hits"]:
if row_counter >= self._limit:
break
for hit in resp["hits"]["hits"]:
if row_counter >= self._limit:
break
row_counter += 1
yield hit
resp = self._client.post('_search/scroll',
data={"scroll_id": scroll_id, "scroll": "10s"})
scroll_id = resp.get("_scroll_id")
finally:
# TODO(onesuper): Delete the scroll resource anyway
pass
return ScrollSelect(_scroll)
else:
res_dict = self._client.post(path, data=self._build_query())
return Agg.from_dict(res_dict)
def collect(self):
"""
Returns all the records as a list of Row.
:return: list of :class:`Row <pandasticsearch.types.Row>`
>>> df.collect()
[Row(age=2, name='Alice'), Row(age=5, name='Bob')]
"""
query = self._execute()
return [Row(**v) for v in query.result]
def to_pandas(self):
"""
Export to a Pandas DataFrame object.
:return: The DataFrame representing the query result
>>> df[df['gender'] == 'male'].agg(Avg('age')).to_pandas()
avg(age)
0 12
"""
query = self._execute()
return query.to_pandas()
def count(self):
"""
Returns a list of numbers indicating the count for each group
>>> df.groupby(df.gender).count()
[2, 1]
"""
df = DataFrame(client=self._client,
index=self._index,
doc_type=self._doc_type,
mapping=self._mapping,
filter=self._filter,
groupby=self._groupby,
aggregation=_count_aggregator,
projection=self._projection,
sort=self._sort,
limit=self._limit,
compat=self._compat)
return df
def show(self, n=200, truncate=15):
"""
Prints the first ``n`` rows to the console.
:param n: Number of rows to show.
:param truncate: Number of words to be truncated for each column.
>>> df.filter(df['age'] < 25).select('name').show(3)
+------+
| name |
+------+
| Alice|
| Bob |
| Leo |
+------+
"""
assert n > 0
if self._aggregation:
raise DataFrameException('show() is not allowed for aggregation. use collect() instead')
query = self._execute()
if self._projection:
cols = [col.field_name() for col in self._projection]
else:
cols = self.columns
if cols is None:
raise _unbound_index_err
sys.stdout.write(query.result_as_tabular(cols, n, truncate))
def __repr__(self):
if self.columns is None:
return "DataFrame(Unbound)"
return "DataFrame[%s]" % (", ".join("%s" % c for c in self.columns))
def print_debug(self):
"""
Post the query to the Elasticsearch Server and prints out the result it returned
"""
if self._client is None:
raise _unbound_index_err
sys.stdout.write(json.dumps(self._client.post(data=self._build_query()), indent=4))
def to_dict(self):
"""
Converts the current :class:`DataFrame <DataFrame>` object to Elasticsearch search dictionary.
:return: a dictionary which obeys the Elasticsearch RESTful protocol
"""
return self._build_query()
def print_schema(self):
"""
Prints out the schema in the tree format.
>>> df.print_schema()
index_name
|-- type_name
|-- experience : {'type': 'integer'}
|-- id : {'type': 'string'}
|-- mobile : {'index': 'not_analyzed', 'type': 'string'}
|-- regions : {'index': 'not_analyzed', 'type': 'string'}
"""
if self._index is None:
return
sys.stdout.write('{0}\n'.format(self._index))
index_name = list(self._mapping.keys())[0]
if self._compat >= 7:
json_obj = self._mapping[index_name]["mappings"]["properties"]
sys.stdout.write(self.resolve_schema(json_obj))
else:
if self._doc_type is not None:
json_obj = self._mapping[index_name]["mappings"][self._doc_type]["properties"]
sys.stdout.write(self.resolve_schema(json_obj))
else:
raise DataFrameException('Please specify mapping for ES version under 7')
def resolve_schema(self, json_prop, res_schema="", depth=1):
for field in json_prop:
if "properties" in json_prop[field]:
res_schema += "{}|--{}:\n".format(' ' * depth, field)
res_schema = self.resolve_schema(json_prop[field]["properties"],
res_schema, depth=depth+1)
else:
res_schema += "{}|--{}: {}\n".format(' ' * depth, field, json_prop[field])
return res_schema
def _build_query(self):
query = dict()
query['size'] = 20 # batch size for scroll search
if self._groupby and not self._aggregation:
query['aggregations'] = self._groupby
query['size'] = 0
if self._aggregation:
if self._groupby is None:
query['aggregations'] = self._aggregation
query['size'] = 0
else:
agg = copy.deepcopy(self._groupby)
# insert aggregator to the inner-most grouper
inner_most = agg
while True:
key = list(inner_most.keys())[0]
if 'aggregations' in inner_most[key]:
inner_most = inner_most[key]['aggregations']
else:
break
key = list(inner_most.keys())[0]
inner_most[key]['aggregations'] = self._aggregation
query['aggregations'] = agg
query['size'] = 0
if self._filter:
assert isinstance(self._filter, BooleanFilter)
if self._compat >= 5:
query['query'] = {'bool': {'filter': self._filter.build()}}
else:
query['query'] = {'filtered': {'filter': self._filter.build()}}
if self._projection:
query['_source'] = {"includes": [col.field_name() for col in self._projection], "excludes": []}
if self._sort:
query['sort'] = self._sort
self._last_query = query
return query
def _get_cols(self, mapping):
cols = self._get_mappings(mapping)
if len(cols) == 0:
raise DataFrameException('0 columns found in mapping')
return cols
@classmethod
def resolve_mappings(cls, json_map):
prop = []
for field in json_map:
nested_props = []
if "properties" in json_map[field]:
nested_props = cls.resolve_mappings(json_map[field]["properties"])
if len(nested_props) == 0:
prop.append(field)
else:
for nested_prop in nested_props:
prop.append("{}.{}".format(field, nested_prop))
return prop
def _get_mappings(self, json_map):
index_name = list(self._mapping.keys())[0]
if self._compat >= 7:
return DataFrame.resolve_mappings(json_map[index_name]["mappings"]["properties"])
else:
if self._doc_type is not None:
return DataFrame.resolve_mappings(json_map[index_name]["mappings"][self._doc_type]["properties"])
else:
raise DataFrameException('Please specify doc_type for ES version under 7')
|
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cryptography import exceptions as crypto_exception
import glance_store as store
import mock
from oslo_config import cfg
from six.moves import urllib
from glance.common import exception
from glance.common import store_utils
from glance.common import wsgi
import glance.context
import glance.db.simple.api as simple_db
CONF = cfg.CONF
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
UUID2 = '971ec09a-8067-4bc8-a91f-ae3557f1c4c7'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
USER2 = '0b3b3006-cb76-4517-ae32-51397e22c754'
USER3 = '2hss8dkl-d8jh-88yd-uhs9-879sdjsd8skd'
BASE_URI = 'http://storeurl.com/container'
def sort_url_by_qs_keys(url):
# NOTE(kragniz): this only sorts the keys of the query string of a url.
# For example, an input of '/v2/tasks?sort_key=id&sort_dir=asc&limit=10'
# returns '/v2/tasks?limit=10&sort_dir=asc&sort_key=id'. This is to prevent
# non-deterministic ordering of the query string causing problems with unit
# tests.
parsed = urllib.parse.urlparse(url)
queries = urllib.parse.parse_qsl(parsed.query, True)
sorted_query = sorted(queries, key=lambda x: x[0])
encoded_sorted_query = urllib.parse.urlencode(sorted_query, True)
url_parts = (parsed.scheme, parsed.netloc, parsed.path,
parsed.params, encoded_sorted_query,
parsed.fragment)
return urllib.parse.urlunparse(url_parts)
def get_fake_request(path='', method='POST', is_admin=False, user=USER1,
roles=None, tenant=TENANT1):
if roles is None:
roles = ['member']
req = wsgi.Request.blank(path)
req.method = method
kwargs = {
'user': user,
'tenant': tenant,
'roles': roles,
'is_admin': is_admin,
}
req.context = glance.context.RequestContext(**kwargs)
return req
def fake_get_size_from_backend(uri, context=None):
return 1
def fake_get_verifier(context, img_signature_certificate_uuid,
img_signature_hash_method, img_signature,
img_signature_key_type):
verifier = mock.Mock()
if (img_signature is not None and img_signature == 'VALID'):
verifier.verify.return_value = None
else:
ex = crypto_exception.InvalidSignature()
verifier.verify.side_effect = ex
return verifier
class FakeDB(object):
def __init__(self, initialize=True):
self.reset()
if initialize:
self.init_db()
@staticmethod
def init_db():
images = [
{'id': UUID1, 'owner': TENANT1, 'status': 'queued',
'locations': [{'url': '%s/%s' % (BASE_URI, UUID1),
'metadata': {}, 'status': 'queued'}]},
{'id': UUID2, 'owner': TENANT1, 'status': 'queued'},
]
[simple_db.image_create(None, image) for image in images]
members = [
{'image_id': UUID1, 'member': TENANT1, 'can_share': True},
{'image_id': UUID1, 'member': TENANT2, 'can_share': False},
]
[simple_db.image_member_create(None, member) for member in members]
simple_db.image_tag_set_all(None, UUID1, ['ping', 'pong'])
@staticmethod
def reset():
simple_db.reset()
def __getattr__(self, key):
return getattr(simple_db, key)
class FakeStoreUtils(object):
def __init__(self, store_api):
self.store_api = store_api
def safe_delete_from_backend(self, context, id, location):
try:
del self.store_api.data[location['url']]
except KeyError:
pass
def schedule_delayed_delete_from_backend(self, context, id, location):
pass
def delete_image_location_from_backend(self, context,
image_id, location):
if CONF.delayed_delete:
self.schedule_delayed_delete_from_backend(context, image_id,
location)
else:
self.safe_delete_from_backend(context, image_id, location)
def validate_external_location(self, uri):
if uri and urllib.parse.urlparse(uri).scheme:
return store_utils.validate_external_location(uri)
else:
return True
class FakeStoreAPI(object):
def __init__(self, store_metadata=None):
self.data = {
'%s/%s' % (BASE_URI, UUID1): ('XXX', 3),
'%s/fake_location' % (BASE_URI): ('YYY', 3)
}
self.acls = {}
if store_metadata is None:
self.store_metadata = {}
else:
self.store_metadata = store_metadata
def create_stores(self):
pass
def set_acls(self, uri, public=False, read_tenants=None,
write_tenants=None, context=None):
if read_tenants is None:
read_tenants = []
if write_tenants is None:
write_tenants = []
self.acls[uri] = {
'public': public,
'read': read_tenants,
'write': write_tenants,
}
def get_from_backend(self, location, offset=0,
chunk_size=None, context=None):
try:
scheme = location[:location.find('/') - 1]
if scheme == 'unknown':
raise store.UnknownScheme(scheme=scheme)
return self.data[location]
except KeyError:
raise store.NotFound(image=location)
def get_size_from_backend(self, location, context=None):
return self.get_from_backend(location, context=context)[1]
def add_to_backend(self, conf, image_id, data, size,
scheme=None, context=None, verifier=None):
store_max_size = 7
current_store_size = 2
for location in self.data.keys():
if image_id in location:
raise exception.Duplicate()
if not size:
# 'data' is a string wrapped in a LimitingReader|CooperativeReader
# pipeline, so peek under the hood of those objects to get at the
# string itself.
size = len(data.data.fd)
if (current_store_size + size) > store_max_size:
raise exception.StorageFull()
if context.user == USER2:
raise exception.Forbidden()
if context.user == USER3:
raise exception.StorageWriteDenied()
self.data[image_id] = (data, size)
checksum = 'Z'
return (image_id, size, checksum, self.store_metadata)
def check_location_metadata(self, val, key=''):
store.check_location_metadata(val)
class FakePolicyEnforcer(object):
def __init__(self, *_args, **kwargs):
self.rules = {}
def enforce(self, _ctxt, action, target=None, **kwargs):
"""Raise Forbidden if a rule for given action is set to false."""
if self.rules.get(action) is False:
raise exception.Forbidden()
def set_rules(self, rules):
self.rules = rules
class FakeNotifier(object):
def __init__(self, *_args, **kwargs):
self.log = []
def _notify(self, event_type, payload, level):
log = {
'notification_type': level,
'event_type': event_type,
'payload': payload
}
self.log.append(log)
def warn(self, event_type, payload):
self._notify(event_type, payload, 'WARN')
def info(self, event_type, payload):
self._notify(event_type, payload, 'INFO')
def error(self, event_type, payload):
self._notify(event_type, payload, 'ERROR')
def debug(self, event_type, payload):
self._notify(event_type, payload, 'DEBUG')
def critical(self, event_type, payload):
self._notify(event_type, payload, 'CRITICAL')
def get_logs(self):
return self.log
class FakeGateway(object):
def __init__(self, image_factory=None, image_member_factory=None,
image_repo=None, task_factory=None, task_repo=None):
self.image_factory = image_factory
self.image_member_factory = image_member_factory
self.image_repo = image_repo
self.task_factory = task_factory
self.task_repo = task_repo
def get_image_factory(self, context):
return self.image_factory
def get_image_member_factory(self, context):
return self.image_member_factory
def get_repo(self, context):
return self.image_repo
def get_task_factory(self, context):
return self.task_factory
def get_task_repo(self, context):
return self.task_repo
class FakeTask(object):
def __init__(self, task_id, type=None, status=None):
self.task_id = task_id
self.type = type
self.message = None
self.input = None
self._status = status
self._executor = None
def success(self, result):
self.result = result
self._status = 'success'
def fail(self, message):
self.message = message
self._status = 'failure'
|
|
from __future__ import unicode_literals
import json
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from moto.core.utils import amz_crc32, amzn_request_id
from moto.core.responses import BaseResponse
from .models import lambda_backends
class LambdaResponse(BaseResponse):
@property
def json_body(self):
"""
:return: JSON
:rtype: dict
"""
return json.loads(self.body)
@property
def lambda_backend(self):
"""
Get backend
:return: Lambda Backend
:rtype: moto.awslambda.models.LambdaBackend
"""
return lambda_backends[self.region]
def root(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
return self._list_functions(request, full_url, headers)
elif request.method == 'POST':
return self._create_function(request, full_url, headers)
else:
raise ValueError("Cannot handle request")
def function(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
return self._get_function(request, full_url, headers)
elif request.method == 'DELETE':
return self._delete_function(request, full_url, headers)
else:
raise ValueError("Cannot handle request")
def versions(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
# This is ListVersionByFunction
raise ValueError("Cannot handle request")
elif request.method == 'POST':
return self._publish_function(request, full_url, headers)
else:
raise ValueError("Cannot handle request")
@amz_crc32
@amzn_request_id
def invoke(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'POST':
return self._invoke(request, full_url)
else:
raise ValueError("Cannot handle request")
@amz_crc32
@amzn_request_id
def invoke_async(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'POST':
return self._invoke_async(request, full_url)
else:
raise ValueError("Cannot handle request")
def tag(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'GET':
return self._list_tags(request, full_url)
elif request.method == 'POST':
return self._tag_resource(request, full_url)
elif request.method == 'DELETE':
return self._untag_resource(request, full_url)
else:
raise ValueError("Cannot handle {0} request".format(request.method))
def policy(self, request, full_url, headers):
if request.method == 'GET':
return self._get_policy(request, full_url, headers)
if request.method == 'POST':
return self._add_policy(request, full_url, headers)
def _add_policy(self, request, full_url, headers):
lambda_backend = self.get_lambda_backend(full_url)
path = request.path if hasattr(request, 'path') else request.path_url
function_name = path.split('/')[-2]
if lambda_backend.has_function(function_name):
policy = request.body.decode('utf8')
lambda_backend.add_policy(function_name, policy)
return 200, {}, json.dumps(dict(Statement=policy))
else:
return 404, {}, "{}"
def _get_policy(self, request, full_url, headers):
lambda_backend = self.get_lambda_backend(full_url)
path = request.path if hasattr(request, 'path') else request.path_url
function_name = path.split('/')[-2]
if lambda_backend.has_function(function_name):
function = lambda_backend.get_function(function_name)
return 200, {}, json.dumps(dict(Policy="{\"Statement\":[" + function.policy + "]}"))
else:
return 404, {}, "{}"
def _invoke(self, request, full_url):
response_headers = {}
function_name = self.path.rsplit('/', 2)[-2]
qualifier = self._get_param('qualifier')
fn = self.lambda_backend.get_function(function_name, qualifier)
if fn:
payload = fn.invoke(self.body, self.headers, response_headers)
response_headers['Content-Length'] = str(len(payload))
return 202, response_headers, payload
else:
return 404, response_headers, "{}"
def _invoke_async(self, request, full_url):
response_headers = {}
function_name = self.path.rsplit('/', 3)[-3]
fn = self.lambda_backend.get_function(function_name, None)
if fn:
payload = fn.invoke(self.body, self.headers, response_headers)
response_headers['Content-Length'] = str(len(payload))
return 202, response_headers, payload
else:
return 404, response_headers, "{}"
def _list_functions(self, request, full_url, headers):
result = {
'Functions': []
}
for fn in self.lambda_backend.list_functions():
json_data = fn.get_configuration()
result['Functions'].append(json_data)
return 200, {}, json.dumps(result)
def _create_function(self, request, full_url, headers):
try:
fn = self.lambda_backend.create_function(self.json_body)
except ValueError as e:
return 400, {}, json.dumps({"Error": {"Code": e.args[0], "Message": e.args[1]}})
else:
config = fn.get_configuration()
return 201, {}, json.dumps(config)
def _publish_function(self, request, full_url, headers):
function_name = self.path.rsplit('/', 2)[-2]
fn = self.lambda_backend.publish_function(function_name)
if fn:
config = fn.get_configuration()
return 200, {}, json.dumps(config)
else:
return 404, {}, "{}"
def _delete_function(self, request, full_url, headers):
function_name = self.path.rsplit('/', 1)[-1]
qualifier = self._get_param('Qualifier', None)
if self.lambda_backend.delete_function(function_name, qualifier):
return 204, {}, ""
else:
return 404, {}, "{}"
def _get_function(self, request, full_url, headers):
function_name = self.path.rsplit('/', 1)[-1]
qualifier = self._get_param('Qualifier', None)
fn = self.lambda_backend.get_function(function_name, qualifier)
if fn:
code = fn.get_code()
return 200, {}, json.dumps(code)
else:
return 404, {}, "{}"
def _get_aws_region(self, full_url):
region = self.region_regex.search(full_url)
if region:
return region.group(1)
else:
return self.default_region
def _list_tags(self, request, full_url):
function_arn = unquote(self.path.rsplit('/', 1)[-1])
fn = self.lambda_backend.get_function_by_arn(function_arn)
if fn:
return 200, {}, json.dumps({'Tags': fn.tags})
else:
return 404, {}, "{}"
def _tag_resource(self, request, full_url):
function_arn = unquote(self.path.rsplit('/', 1)[-1])
if self.lambda_backend.tag_resource(function_arn, self.json_body['Tags']):
return 200, {}, "{}"
else:
return 404, {}, "{}"
def _untag_resource(self, request, full_url):
function_arn = unquote(self.path.rsplit('/', 1)[-1])
tag_keys = self.querystring['tagKeys']
if self.lambda_backend.untag_resource(function_arn, tag_keys):
return 204, {}, "{}"
else:
return 404, {}, "{}"
|
|
#!/usr/bin/env python
#
# Copyright (c) Greenplum Inc 2008. All Rights Reserved.
#
'''
Greenplum logging facilities.
This Module contains some helper functions for setting up the
python builtin logging module. Tools and libraries are expected
to centralize configuration of logging through these functions.
Typical usage:
from gppylib import gplog
logger = gplog.setup_tool_logging(EXECNAME, hostname, username, logdir)
if options.verbose:
gplog.enable_verbose_logging()
if options.quiet:
gplog.quiet_stdout_logging()
logger.info("Start myTool")
...
'''
import datetime
import logging
import os
import sys
#------------------------------- Public Interface --------------------------------
def get_default_logger():
"""
Return the singleton default logger.
If a logger has not yet been established it creates one that:
- Logs output to stdout
- Does not setup file logging.
Typicial usage would be to call one of the setup_*_logging() functions
at the beginning of a script in order to establish the exact type of
logging desired, afterwhich later calls to get_default_logger() can be
used to return a reference to the logger.
"""
global _LOGGER, _SOUT_HANDLER
if _LOGGER is None:
_LOGGER = logging.getLogger('default')
f = _get_default_formatter()
_SOUT_HANDLER = EncodingStreamHandler(sys.stdout)
_SOUT_HANDLER.setFormatter(f)
_LOGGER.addHandler(_SOUT_HANDLER)
_LOGGER.setLevel(logging.INFO)
return _LOGGER
def get_unittest_logger():
"""
Returns a singleton logger for use by gppylib unittests:
- Does not setup stdout logging
- Logs output to a file named "unittest.log" in the current directory.
Much like get_default_logger, except that the default logger it creates
(if one does not already exist) is different.
Note: perhaps the interface for this should be cleaned up. It would be
more consistent to gave a single get_default_logger() method and supply
a setup_unittest_logging() function.
"""
global _LOGGER, _SOUT_HANDLER
if _LOGGER is None:
_LOGGER = logging.getLogger('default')
filename="unittest.log"
_set_file_logging(filename)
return _LOGGER
def setup_helper_tool_logging(appName,hostname,userName):
"""
Returns a singleton logger for use by helper tools:
- Logs output to stdout
- Does not log output to a file
"""
logger = get_default_logger()
logger.name="%s:%s" % (hostname,userName)
return logger
def setup_tool_logging(appName,hostname,userName,logdir=None,nonuser=False):
"""
Returns a singleton logger for standard Greenplum tools:
- Logs output to stdout
- Logs output to a file, typically in ~/gpAdminLogs
"""
global _DEFAULT_FORMATTER
global _APP_NAME_FOR_DEFAULT_FORMAT
loggerName ="%s:%s" % (hostname,userName)
if nonuser:
appName=appName + "_" + loggerName
_APP_NAME_FOR_DEFAULT_FORMAT = appName
_enable_gpadmin_logging(appName,logdir)
#
# now reset the default formatter (someone may have called get_default_logger before calling setup_tool_logging)
#
logger = get_default_logger()
logger.name = loggerName
_DEFAULT_FORMATTER = None
f = _get_default_formatter()
_SOUT_HANDLER.setFormatter(f)
_FILE_HANDLER.setFormatter(f)
return logger
def enable_verbose_logging():
"""
Increases the log level to be verbose.
- Applies to all logging handlers (stdout/file).
"""
_LOGGER.setLevel(logging.DEBUG)
def quiet_stdout_logging():
"""
Reduce log level for stdout logging
"""
global _SOUT_HANDLER
_SOUT_HANDLER.setLevel(logging.WARN)
def very_quiet_stdout_logging():
"""
Reduce log level to critical for stdout logging
"""
global _SOUT_HANDLER
_SOUT_HANDLER.setLevel(logging.CRITICAL)
def logging_is_verbose():
"""
Returns true if the logging level has been set to verbose
"""
return _LOGGER.getEffectiveLevel() == logging.DEBUG
def logging_is_quiet():
"""
Returns true if the logging level has been set to quiet.
"""
# Todo: Currently this checks the default LOGGER, the
# quiet_stdout_logging() function only sets it on the stdout
# logging handler. So typical usage will never return true.
return _LOGGER.getEffectiveLevel() == logging.WARN
def get_logfile():
"""
Returns the name of the file we are logging to, if any.
"""
global _FILENAME
return _FILENAME
def log_literal(logger, lvl, msg):
"""
Logs a message to a specified logger bypassing the normal formatter
and writing the message exactly as passed.
The intended purpose of this is for logging messages returned from
remote backends that have already been formatted.
"""
# We assume the logger is using the two global handlers
global _SOUT_HANDLER
global _FILE_HANDLER
# Switch to the literal formatter
#
# Note: the logger may or may not actually make use of both formatters,
# but it is safe to always set both even if only one of them is used.
f = _get_literal_formatter()
_SOUT_HANDLER.setFormatter(f)
_FILE_HANDLER.setFormatter(f)
# Log the message
logger.log(lvl, msg)
# Restore default formatter
f = _get_default_formatter()
_SOUT_HANDLER.setFormatter(f)
_FILE_HANDLER.setFormatter(f)
return
def get_logger_if_verbose():
if logging_is_verbose():
return get_default_logger()
return None
#------------------------------- Private --------------------------------
#evil global
_LOGGER=None
_FILENAME=None
_DEFAULT_FORMATTER=None
_LITERAL_FORMATTER=None
_SOUT_HANDLER=None
_FILE_HANDLER=None
_APP_NAME_FOR_DEFAULT_FORMAT=os.path.split(sys.argv[0])[-1]
def _set_file_logging(filename):
"""
Establishes a file output HANDLER for the default formater.
NOTE: internal use only
"""
global _LOGGER, _SOUT_HANDLER, _FILENAME, _FILE_HANDLER
_FILENAME=filename
_FILE_HANDLER = EncodingFileHandler( filename, 'a')
_FILE_HANDLER.setFormatter(_get_default_formatter())
_LOGGER.addHandler(_FILE_HANDLER)
def _get_default_formatter():
"""
Returns the default formatter, constructing it if needed.
The default formatter formats things using Greenplum standard logging:
<date>:<pid> <programname>:<hostname>:<username>:[LEVEL]:-message
NOTE: internal use only
"""
global _DEFAULT_FORMATTER
global _APP_NAME_FOR_DEFAULT_FORMAT
if _DEFAULT_FORMATTER == None:
formatStr = "%(asctime)s:%(programname)s:%(name)s-[%(levelname)-s]:-%(message)s"
appName = _APP_NAME_FOR_DEFAULT_FORMAT.replace("%", "") # to make sure we don't produce a format string
formatStr = formatStr.replace("%(programname)s", "%06d %s" % (os.getpid(), appName))
_DEFAULT_FORMATTER = logging.Formatter(formatStr,"%Y%m%d:%H:%M:%S")
return _DEFAULT_FORMATTER
def _get_literal_formatter():
"""
Returns the literal formatter, constructing it if needed.
The literal formatter formats the input string exactly as it was received.
It is only used by the log_literal() function.
NOTE: internal use only
"""
global _LITERAL_FORMATTER
if _LITERAL_FORMATTER == None:
_LITERAL_FORMATTER = logging.Formatter()
return _LITERAL_FORMATTER
def _enable_gpadmin_logging(name,logdir=None):
"""
Sets up the file output handler for the default logger.
- if logdir is not specified it uses ~/gpAdminLogs
- the file is constructed as appended with "<logdir>/<name>_<date>.log"
NOTE: internal use only
"""
global _FILE_HANDLER
get_default_logger()
now = datetime.date.today()
if logdir is None:
homeDir=os.path.expanduser("~")
gpadmin_logs_dir=homeDir + "/gpAdminLogs"
else:
gpadmin_logs_dir=logdir
if not os.path.exists(gpadmin_logs_dir):
os.mkdir(gpadmin_logs_dir)
filename = "%s/%s_%s.log" % (gpadmin_logs_dir,name, now.strftime("%Y%m%d"))
_set_file_logging(filename)
class EncodingFileHandler(logging.FileHandler):
"""This handler makes sure that the encoding of the message is utf-8 before
passing it along to the FileHandler. This will prevent encode/decode
errors later on."""
def __init__(self, filename, mode='a', encoding=None, delay=0):
logging.FileHandler.__init__(self, filename, mode, encoding, delay)
def emit(self, record):
if not isinstance(record.msg, str) and not isinstance(record.msg, unicode):
record.msg = str(record.msg)
if not isinstance(record.msg, unicode):
record.msg = unicode(record.msg, 'utf-8')
logging.FileHandler.emit(self, record)
class EncodingStreamHandler(logging.StreamHandler):
"""This handler makes sure that the encoding of the message is utf-8 before
passing it along to the StreamHandler. This will prevent encode/decode
errors later on."""
def __init__(self, strm=None):
logging.StreamHandler.__init__(self, strm)
def emit(self, record):
if not isinstance(record.msg, str) and not isinstance(record.msg, unicode):
record.msg = str(record.msg)
if not isinstance(record.msg, unicode):
record.msg = unicode(record.msg, 'utf-8')
logging.StreamHandler.emit(self, record)
|
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at:
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the
# License.
# Python 2/3 compatibility
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
from io import BytesIO
from pytest import raises
from tests import is_exception, parametrize
from tests.event_aliases import e_int
from tests.trampoline_util import always_self
from tests.trampoline_util import trampoline_scaffold
from amazon.ion.core import Transition
from amazon.ion.core import ION_VERSION_MARKER_EVENT
from amazon.ion.core import ION_STREAM_INCOMPLETE_EVENT, ION_STREAM_END_EVENT
from amazon.ion.reader import read_data_event, reader_trampoline, blocking_reader
from amazon.ion.reader import NEXT_EVENT, SKIP_EVENT
from amazon.ion.util import coroutine, record
_TRIVIAL_ION_EVENT = e_int(0)
_ivm_transition = partial(Transition, ION_VERSION_MARKER_EVENT)
_incomplete_transition = partial(Transition, ION_STREAM_INCOMPLETE_EVENT)
_end_transition = partial(Transition, ION_STREAM_END_EVENT)
_event_transition = partial(Transition, _TRIVIAL_ION_EVENT)
class ReaderTrampolineParameters(record('desc', 'coroutine', 'input', 'expected', ('allow_flush', False))):
def __str__(self):
return self.desc
_P = ReaderTrampolineParameters
_TRIVIAL_DATA_EVENT = read_data_event(b'DATA')
_EMPTY_DATA_EVENT = read_data_event(b'')
@parametrize(
_P(
desc='START WITH NONE',
coroutine=always_self(_ivm_transition),
input=[None],
expected=[TypeError],
),
_P(
desc='START WITH SKIP',
coroutine=always_self(_ivm_transition),
input=[SKIP_EVENT],
expected=[TypeError],
),
_P(
desc='ALWAYS IVM NEXT',
coroutine=always_self(_ivm_transition),
input=[NEXT_EVENT] * 4,
expected=[ION_VERSION_MARKER_EVENT] * 4,
),
_P(
desc='ALWAYS IVM NEXT THEN SKIP',
coroutine=always_self(_ivm_transition),
input=[NEXT_EVENT, SKIP_EVENT],
expected=[ION_VERSION_MARKER_EVENT, TypeError],
),
_P(
desc='ALWAYS INCOMPLETE NEXT EOF',
coroutine=always_self(_incomplete_transition),
input=[NEXT_EVENT, NEXT_EVENT],
expected=[ION_STREAM_INCOMPLETE_EVENT, TypeError],
allow_flush=False
),
_P(
desc='ALWAYS INCOMPLETE NEXT FLUSH',
coroutine=always_self(_incomplete_transition),
input=[NEXT_EVENT, NEXT_EVENT],
expected=[ION_STREAM_INCOMPLETE_EVENT, ION_STREAM_INCOMPLETE_EVENT],
allow_flush=True
),
_P(
desc='ALWAYS INCOMPLETE SKIP',
coroutine=always_self(_incomplete_transition),
input=[NEXT_EVENT, SKIP_EVENT],
expected=[ION_STREAM_INCOMPLETE_EVENT, TypeError],
),
_P(
desc='ALWAYS INCOMPLETE DATA',
coroutine=always_self(_incomplete_transition),
input=[NEXT_EVENT] + [_TRIVIAL_DATA_EVENT] * 4,
expected=[ION_STREAM_INCOMPLETE_EVENT] * 5,
),
_P(
desc='ALWAYS END NEXT',
coroutine=always_self(_end_transition),
input=[NEXT_EVENT, NEXT_EVENT],
expected=[ION_STREAM_END_EVENT, TypeError],
),
_P(
desc='ALWAYS END SKIP',
coroutine=always_self(_end_transition),
input=[NEXT_EVENT, SKIP_EVENT],
expected=[ION_STREAM_END_EVENT, TypeError],
),
_P(
desc='ALWAYS END DATA',
coroutine=always_self(_end_transition),
input=[NEXT_EVENT] + [_TRIVIAL_DATA_EVENT] * 4,
expected=[ION_STREAM_END_EVENT] * 5,
),
_P(
desc='ALWAYS END EMPTY DATA',
coroutine=always_self(_end_transition),
input=[NEXT_EVENT] + [_EMPTY_DATA_EVENT],
expected=[ION_STREAM_END_EVENT, ValueError],
),
_P(
desc='ALWAYS EVENT DATA',
coroutine=always_self(_event_transition),
input=[NEXT_EVENT] + [_TRIVIAL_DATA_EVENT],
expected=[_TRIVIAL_ION_EVENT, TypeError]
)
)
def test_trampoline(p):
trampoline_scaffold(reader_trampoline, p, p.allow_flush)
class _P(record('desc', 'coroutine', 'data', 'input', 'expected')):
def __str__(self):
return self.desc
@coroutine
def _asserts_events(expecteds, outputs, allow_flush=False):
output = None
for expected, next_output in zip(expecteds, outputs):
actual = yield output
assert expected == actual
output = next_output
yield output
if not allow_flush:
raise EOFError()
yield ION_STREAM_END_EVENT
@parametrize(
_P(
desc='ALWAYS COMPLETE',
coroutine=_asserts_events(
[NEXT_EVENT, SKIP_EVENT],
[ION_VERSION_MARKER_EVENT] * 2),
data=b'',
input=[NEXT_EVENT, SKIP_EVENT],
expected=[ION_VERSION_MARKER_EVENT] * 2
),
_P(
desc='FIRST INCOMPLETE, THEN COMPLETE',
coroutine=_asserts_events(
[NEXT_EVENT, read_data_event(b'a'), SKIP_EVENT],
[ION_STREAM_INCOMPLETE_EVENT] + ([ION_VERSION_MARKER_EVENT] * 2)
),
data=b'a',
input=[NEXT_EVENT, SKIP_EVENT],
expected=[ION_VERSION_MARKER_EVENT] * 2
),
_P(
desc='FIRST STREAM END, THEN COMPLETE',
coroutine=_asserts_events(
[NEXT_EVENT, read_data_event(b'a'), SKIP_EVENT],
[ION_STREAM_END_EVENT] + ([ION_VERSION_MARKER_EVENT] * 2)
),
data=b'a',
input=[NEXT_EVENT, SKIP_EVENT],
expected=[ION_VERSION_MARKER_EVENT] * 2
),
_P(
desc='PREMATURE EOF',
coroutine=_asserts_events(
[NEXT_EVENT, read_data_event(b'a'), read_data_event(b'b')],
[ION_STREAM_END_EVENT, ION_STREAM_INCOMPLETE_EVENT, ION_STREAM_INCOMPLETE_EVENT],
allow_flush=False
),
data=b'ab',
input=[NEXT_EVENT],
expected=[EOFError]
),
_P(
desc='FLUSH',
coroutine=_asserts_events(
[NEXT_EVENT, read_data_event(b'a'), read_data_event(b'b')],
[ION_STREAM_END_EVENT, ION_STREAM_INCOMPLETE_EVENT, ION_STREAM_INCOMPLETE_EVENT],
allow_flush=True
),
data=b'ab',
input=[NEXT_EVENT],
expected=[ION_STREAM_END_EVENT]
),
_P(
desc='SINGLE EVENT, THEN NATURAL EOF',
coroutine=_asserts_events(
[NEXT_EVENT, read_data_event(b'a'), NEXT_EVENT],
[ION_STREAM_END_EVENT, ION_VERSION_MARKER_EVENT, ION_STREAM_END_EVENT]
),
data=b'a',
input=[NEXT_EVENT, NEXT_EVENT],
expected=[ION_VERSION_MARKER_EVENT, ION_STREAM_END_EVENT]
),
)
def test_blocking_reader(p):
buf = BytesIO(p.data)
reader = blocking_reader(p.coroutine, buf, buffer_size=1)
for input, expected in zip(p.input, p.expected):
if is_exception(expected):
with raises(expected):
reader.send(input)
else:
actual = reader.send(input)
assert expected == actual
|
|
# -*- coding: utf-8 -*-
from __future__ import with_statement, print_function, absolute_import
import json
import requests
from requests_oauthlib import OAuth1
from trello.board import Board
from trello.card import Card
from trello.trellolist import List
from trello.organization import Organization
from trello.member import Member
from trello.webhook import WebHook
from trello.exceptions import *
try:
# PyOpenSSL works around some issues in python ssl modules
# In particular in python < 2.7.9 and python < 3.2
# It is not a hard requirement, so it's not listed in requirements.txt
# More info https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
except:
pass
class TrelloClient(object):
""" Base class for Trello API access """
def __init__(self, api_key, api_secret=None, token=None, token_secret=None):
"""
Constructor
:api_key: API key generated at https://trello.com/1/appKey/generate
:api_secret: the secret component of api_key
:token_key: OAuth token generated by the user in
trello.util.create_oauth_token
:token_secret: the OAuth client secret for the given OAuth token
"""
# client key and secret for oauth1 session
if api_key or token:
self.oauth = OAuth1(client_key=api_key, client_secret=api_secret,
resource_owner_key=token, resource_owner_secret=token_secret)
else:
self.oauth = None
self.public_only = token is None
self.api_key = api_key
self.api_secret = api_secret
self.resource_owner_key = token
self.resource_owner_secret = token_secret
def info_for_all_boards(self, actions):
"""
Use this if you want to retrieve info for all your boards in one swoop
"""
if self.public_only:
return None
else:
json_obj = self.fetch_json(
'/members/me/boards/all',
query_params={'actions': actions})
self.all_info = json_obj
def logout(self):
"""Log out of Trello."""
# TODO: This function.
raise NotImplementedError()
def list_boards(self):
"""
Returns all boards for your Trello user
:return: a list of Python objects representing the Trello boards.
:rtype: Board
Each board has the following noteworthy attributes:
- id: the board's identifier
- name: Name of the board
- desc: Description of the board (optional - may be missing from the
returned JSON)
- closed: Boolean representing whether this board is closed or not
- url: URL to the board
"""
json_obj = self.fetch_json('/members/me/boards')
return [Board.from_json(self, json_obj=obj) for obj in json_obj]
def list_organizations(self):
"""
Returns all organizations for your Trello user
:return: a list of Python objects representing the Trello organizations.
:rtype: Organization
Each organization has the following noteworthy attributes:
- id: the organization's identifier
- name: Name of the organization
- desc: Description of the organization (optional - may be missing from the
returned JSON)
- closed: Boolean representing whether this organization is closed or not
- url: URL to the organization
"""
json_obj = self.fetch_json('members/me/organizations')
return [Organization.from_json(self, obj) for obj in json_obj]
def get_organization(self, organization_id):
'''Get organization
:rtype: Organization
'''
obj = self.fetch_json('/organizations/' + organization_id)
return Organization.from_json(self, obj)
def get_board(self, board_id):
'''Get board
:rtype: Board
'''
obj = self.fetch_json('/boards/' + board_id)
return Board.from_json(self, json_obj=obj)
def add_board(self, board_name, organization_id=None):
'''Create board
:rtype: Board
'''
post_args = {'name': board_name}
if organization_id:
post_args['idOrganization'] = organization_id
obj = self.fetch_json('/boards', http_method='POST', post_args=post_args)
return Board.from_json(self, json_obj=obj)
def get_member(self, member_id):
'''Get member
:rtype: Member
'''
return Member(self, member_id).fetch()
def get_card(self, card_id):
'''Get card
:rtype: Card
'''
card_json = self.fetch_json('/cards/' + card_id)
list_json = self.fetch_json('/lists/' + card_json['idList'])
board = self.get_board(card_json['idBoard'])
return Card.from_json(List.from_json(board, list_json), card_json)
def fetch_json(
self,
uri_path,
http_method='GET',
headers=None,
query_params=None,
post_args=None,
files=None):
""" Fetch some JSON from Trello """
# explicit values here to avoid mutable default values
if headers is None:
headers = {}
if query_params is None:
query_params = {}
if post_args is None:
post_args = {}
# if files specified, we don't want any data
data = None
if files is None:
data = json.dumps(post_args)
# set content type and accept headers to handle JSON
if http_method in ("POST", "PUT", "DELETE") and not files:
headers['Content-Type'] = 'application/json; charset=utf-8'
headers['Accept'] = 'application/json'
# construct the full URL without query parameters
if uri_path[0] == '/':
uri_path = uri_path[1:]
url = 'https://api.trello.com/1/%s' % uri_path
# perform the HTTP requests, if possible uses OAuth authentication
response = requests.request(http_method, url, params=query_params,
headers=headers, data=data,
auth=self.oauth, files=files)
if response.status_code == 401:
raise Unauthorized("%s at %s" % (response.text, url), response)
if response.status_code != 200:
raise ResourceUnavailable("%s at %s" % (response.text, url), response)
return response.json()
def list_hooks(self, token=None):
"""
Returns a list of all hooks associated with a specific token. If you don't pass in a token,
it tries to use the token associated with the TrelloClient object (if it exists)
"""
token = token or self.resource_owner_key
if token is None:
raise TokenError("You need to pass an auth token in to list hooks.")
else:
url = "/tokens/%s/webhooks" % token
return self._existing_hook_objs(self.fetch_json(url), token)
def _existing_hook_objs(self, hooks, token):
"""
Given a list of hook dicts passed from list_hooks, creates
the hook objects
"""
all_hooks = []
for hook in hooks:
new_hook = WebHook(self, token, hook['id'], hook['description'],
hook['idModel'],
hook['callbackURL'], hook['active'])
all_hooks.append(new_hook)
return all_hooks
def create_hook(self, callback_url, id_model, desc=None, token=None):
"""
Creates a new webhook. Returns the WebHook object created.
There seems to be some sort of bug that makes you unable to create a
hook using httplib2, so I'm using urllib2 for that instead.
"""
token = token or self.resource_owner_key
if token is None:
raise TokenError("You need to pass an auth token in to create a hook.")
url = "https://trello.com/1/tokens/%s/webhooks/" % token
data = {'callbackURL': callback_url, 'idModel': id_model,
'description': desc}
response = requests.post(url, data=data, auth=self.oauth)
if response.status_code == 200:
hook_id = response.json()['id']
return WebHook(self, token, hook_id, desc, id_model, callback_url, True)
else:
return False
|
|
"""Compatibility for older pythons."""
# OrderedDict from http://code.activestate.com/recipes/576693/
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6,
# 2.7 and pypy. Passes Python2.7's test suite and incorporates all
# the latest updates.
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
|
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#PY25 compatible for GAE.
#
# Copyright 2007 Google Inc. All Rights Reserved.
"""Contains routines for printing protocol messages in text format."""
__author__ = 'kenton@google.com (Kenton Varda)'
import io
import re
from google.protobuf.internal import type_checkers
from google.protobuf import descriptor
from google.protobuf import text_encoding
__all__ = ['MessageToString', 'PrintMessage', 'PrintField',
'PrintFieldValue', 'Merge']
_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
type_checkers.Int32ValueChecker(),
type_checkers.Uint64ValueChecker(),
type_checkers.Int64ValueChecker())
_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE)
_FLOAT_NAN = re.compile('nanf?', re.IGNORECASE)
_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
class Error(Exception):
"""Top-level module error for text_format."""
class ParseError(Error):
"""Thrown in case of ASCII parsing error."""
def MessageToString(message, as_utf8=False, as_one_line=False,
pointy_brackets=False, use_index_order=False,
float_format=None):
"""Convert protobuf message to text format.
Floating point values can be formatted compactly with 15 digits of
precision (which is the most that IEEE 754 "double" can guarantee)
using float_format='.15g'.
Args:
message: The protocol buffers message.
as_utf8: Produce text output in UTF8 format.
as_one_line: Don't introduce newlines between fields.
pointy_brackets: If True, use angle brackets instead of curly braces for
nesting.
use_index_order: If True, print fields of a proto message using the order
defined in source code instead of the field number. By default, use the
field number order.
float_format: If set, use this to specify floating point number formatting
(per the "Format Specification Mini-Language"); otherwise, str() is used.
Returns:
A string of the text formatted protocol buffer message.
"""
out = io.StringIO()
PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line,
pointy_brackets=pointy_brackets,
use_index_order=use_index_order,
float_format=float_format)
result = out.getvalue()
out.close()
if as_one_line:
return result.rstrip()
return result
def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False,
pointy_brackets=False, use_index_order=False,
float_format=None):
fields = message.ListFields()
if use_index_order:
fields.sort(key=lambda x: x[0].index)
for field, value in fields:
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
for element in value:
PrintField(field, element, out, indent, as_utf8, as_one_line,
pointy_brackets=pointy_brackets,
float_format=float_format)
else:
PrintField(field, value, out, indent, as_utf8, as_one_line,
pointy_brackets=pointy_brackets,
float_format=float_format)
def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False,
pointy_brackets=False, float_format=None):
"""Print a single field name/value pair. For repeated fields, the value
should be a single element."""
out.write(' ' * indent)
if field.is_extension:
out.write('[')
if (field.containing_type.GetOptions().message_set_wire_format and
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
field.message_type == field.extension_scope and
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
out.write(field.message_type.full_name)
else:
out.write(field.full_name)
out.write(']')
elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
# For groups, use the capitalized name.
out.write(field.message_type.name)
else:
out.write(field.name)
if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
# The colon is optional in this case, but our cross-language golden files
# don't include it.
out.write(': ')
PrintFieldValue(field, value, out, indent, as_utf8, as_one_line,
pointy_brackets=pointy_brackets,
float_format=float_format)
if as_one_line:
out.write(' ')
else:
out.write('\n')
def PrintFieldValue(field, value, out, indent=0, as_utf8=False,
as_one_line=False, pointy_brackets=False,
float_format=None):
"""Print a single field value (not including name). For repeated fields,
the value should be a single element."""
if pointy_brackets:
openb = '<'
closeb = '>'
else:
openb = '{'
closeb = '}'
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
if as_one_line:
out.write(' %s ' % openb)
PrintMessage(value, out, indent, as_utf8, as_one_line,
pointy_brackets=pointy_brackets,
float_format=float_format)
out.write(closeb)
else:
out.write(' %s\n' % openb)
PrintMessage(value, out, indent + 2, as_utf8, as_one_line,
pointy_brackets=pointy_brackets,
float_format=float_format)
out.write(' ' * indent + closeb)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
enum_value = field.enum_type.values_by_number.get(value, None)
if enum_value is not None:
out.write(enum_value.name)
else:
out.write(str(value))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
out.write('\"')
if isinstance(value, str):
out_value = value.encode('utf-8')
else:
out_value = value
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
# We need to escape non-UTF8 chars in TYPE_BYTES field.
out_as_utf8 = False
else:
out_as_utf8 = as_utf8
out.write(text_encoding.CEscape(out_value, out_as_utf8))
out.write('\"')
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
if value:
out.write('true')
else:
out.write('false')
elif field.cpp_type in _FLOAT_TYPES and float_format is not None:
out.write('{1:{0}}'.format(float_format, value))
else:
out.write(str(value))
def _ParseOrMerge(lines, message, allow_multiple_scalars):
"""Converts an ASCII representation of a protocol message into a message.
Args:
lines: Lines of a message's ASCII representation.
message: A protocol buffer message to merge into.
allow_multiple_scalars: Determines if repeated values for a non-repeated
field are permitted, e.g., the string "foo: 1 foo: 2" for a
required/optional field named "foo".
Raises:
ParseError: On ASCII parsing problems.
"""
tokenizer = _Tokenizer(lines)
while not tokenizer.AtEnd():
_MergeField(tokenizer, message, allow_multiple_scalars)
def Parse(text, message):
"""Parses an ASCII representation of a protocol message into a message.
Args:
text: Message ASCII representation.
message: A protocol buffer message to merge into.
Returns:
The same message passed as argument.
Raises:
ParseError: On ASCII parsing problems.
"""
if not isinstance(text, str): text = text.decode('utf-8')
return ParseLines(text.split('\n'), message)
def Merge(text, message):
"""Parses an ASCII representation of a protocol message into a message.
Like Parse(), but allows repeated values for a non-repeated field, and uses
the last one.
Args:
text: Message ASCII representation.
message: A protocol buffer message to merge into.
Returns:
The same message passed as argument.
Raises:
ParseError: On ASCII parsing problems.
"""
return MergeLines(text.split('\n'), message)
def ParseLines(lines, message):
"""Parses an ASCII representation of a protocol message into a message.
Args:
lines: An iterable of lines of a message's ASCII representation.
message: A protocol buffer message to merge into.
Returns:
The same message passed as argument.
Raises:
ParseError: On ASCII parsing problems.
"""
_ParseOrMerge(lines, message, False)
return message
def MergeLines(lines, message):
"""Parses an ASCII representation of a protocol message into a message.
Args:
lines: An iterable of lines of a message's ASCII representation.
message: A protocol buffer message to merge into.
Returns:
The same message passed as argument.
Raises:
ParseError: On ASCII parsing problems.
"""
_ParseOrMerge(lines, message, True)
return message
def _MergeField(tokenizer, message, allow_multiple_scalars):
"""Merges a single protocol message field into a message.
Args:
tokenizer: A tokenizer to parse the field name and values.
message: A protocol message to record the data.
allow_multiple_scalars: Determines if repeated values for a non-repeated
field are permitted, e.g., the string "foo: 1 foo: 2" for a
required/optional field named "foo".
Raises:
ParseError: In case of ASCII parsing problems.
"""
message_descriptor = message.DESCRIPTOR
if tokenizer.TryConsume('['):
name = [tokenizer.ConsumeIdentifier()]
while tokenizer.TryConsume('.'):
name.append(tokenizer.ConsumeIdentifier())
name = '.'.join(name)
if not message_descriptor.is_extendable:
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" does not have extensions.' %
message_descriptor.full_name)
# pylint: disable=protected-access
field = message.Extensions._FindExtensionByName(name)
# pylint: enable=protected-access
if not field:
raise tokenizer.ParseErrorPreviousToken(
'Extension "%s" not registered.' % name)
elif message_descriptor != field.containing_type:
raise tokenizer.ParseErrorPreviousToken(
'Extension "%s" does not extend message type "%s".' % (
name, message_descriptor.full_name))
tokenizer.Consume(']')
else:
name = tokenizer.ConsumeIdentifier()
field = message_descriptor.fields_by_name.get(name, None)
# Group names are expected to be capitalized as they appear in the
# .proto file, which actually matches their type names, not their field
# names.
if not field:
field = message_descriptor.fields_by_name.get(name.lower(), None)
if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
field = None
if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
field.message_type.name != name):
field = None
if not field:
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" has no field named "%s".' % (
message_descriptor.full_name, name))
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
tokenizer.TryConsume(':')
if tokenizer.TryConsume('<'):
end_token = '>'
else:
tokenizer.Consume('{')
end_token = '}'
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
if field.is_extension:
sub_message = message.Extensions[field].add()
else:
sub_message = getattr(message, field.name).add()
else:
if field.is_extension:
sub_message = message.Extensions[field]
else:
sub_message = getattr(message, field.name)
sub_message.SetInParent()
while not tokenizer.TryConsume(end_token):
if tokenizer.AtEnd():
raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token))
_MergeField(tokenizer, sub_message, allow_multiple_scalars)
else:
_MergeScalarField(tokenizer, message, field, allow_multiple_scalars)
# For historical reasons, fields may optionally be separated by commas or
# semicolons.
if not tokenizer.TryConsume(','):
tokenizer.TryConsume(';')
def _MergeScalarField(tokenizer, message, field, allow_multiple_scalars):
"""Merges a single protocol message scalar field into a message.
Args:
tokenizer: A tokenizer to parse the field value.
message: A protocol message to record the data.
field: The descriptor of the field to be merged.
allow_multiple_scalars: Determines if repeated values for a non-repeated
field are permitted, e.g., the string "foo: 1 foo: 2" for a
required/optional field named "foo".
Raises:
ParseError: In case of ASCII parsing problems.
RuntimeError: On runtime errors.
"""
tokenizer.Consume(':')
value = None
if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
descriptor.FieldDescriptor.TYPE_SINT32,
descriptor.FieldDescriptor.TYPE_SFIXED32):
value = tokenizer.ConsumeInt32()
elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
descriptor.FieldDescriptor.TYPE_SINT64,
descriptor.FieldDescriptor.TYPE_SFIXED64):
value = tokenizer.ConsumeInt64()
elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
descriptor.FieldDescriptor.TYPE_FIXED32):
value = tokenizer.ConsumeUint32()
elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
descriptor.FieldDescriptor.TYPE_FIXED64):
value = tokenizer.ConsumeUint64()
elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
descriptor.FieldDescriptor.TYPE_DOUBLE):
value = tokenizer.ConsumeFloat()
elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
value = tokenizer.ConsumeBool()
elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
value = tokenizer.ConsumeString()
elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
value = tokenizer.ConsumeByteString()
elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
value = tokenizer.ConsumeEnum(field)
else:
raise RuntimeError('Unknown field type %d' % field.type)
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
if field.is_extension:
message.Extensions[field].append(value)
else:
getattr(message, field.name).append(value)
else:
if field.is_extension:
if not allow_multiple_scalars and message.HasExtension(field):
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" extensions.' %
(message.DESCRIPTOR.full_name, field.full_name))
else:
message.Extensions[field] = value
else:
if not allow_multiple_scalars and message.HasField(field.name):
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" fields.' %
(message.DESCRIPTOR.full_name, field.name))
else:
setattr(message, field.name, value)
class _Tokenizer(object):
"""Protocol buffer ASCII representation tokenizer.
This class handles the lower level string parsing by splitting it into
meaningful tokens.
It was directly ported from the Java protocol buffer API.
"""
_WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE)
_TOKEN = re.compile(
'[a-zA-Z_][0-9a-zA-Z_+-]*|' # an identifier
'[0-9+-][0-9a-zA-Z_.+-]*|' # a number
'\"([^\"\n\\\\]|\\\\.)*(\"|\\\\?$)|' # a double-quoted string
'\'([^\'\n\\\\]|\\\\.)*(\'|\\\\?$)') # a single-quoted string
_IDENTIFIER = re.compile(r'\w+')
def __init__(self, lines):
self._position = 0
self._line = -1
self._column = 0
self._token_start = None
self.token = ''
self._lines = iter(lines)
self._current_line = ''
self._previous_line = 0
self._previous_column = 0
self._more_lines = True
self._SkipWhitespace()
self.NextToken()
def AtEnd(self):
"""Checks the end of the text was reached.
Returns:
True iff the end was reached.
"""
return not self.token
def _PopLine(self):
while len(self._current_line) <= self._column:
try:
self._current_line = next(self._lines)
except StopIteration:
self._current_line = ''
self._more_lines = False
return
else:
self._line += 1
self._column = 0
def _SkipWhitespace(self):
while True:
self._PopLine()
match = self._WHITESPACE.match(self._current_line, self._column)
if not match:
break
length = len(match.group(0))
self._column += length
def TryConsume(self, token):
"""Tries to consume a given piece of text.
Args:
token: Text to consume.
Returns:
True iff the text was consumed.
"""
if self.token == token:
self.NextToken()
return True
return False
def Consume(self, token):
"""Consumes a piece of text.
Args:
token: Text to consume.
Raises:
ParseError: If the text couldn't be consumed.
"""
if not self.TryConsume(token):
raise self._ParseError('Expected "%s".' % token)
def ConsumeIdentifier(self):
"""Consumes protocol message field identifier.
Returns:
Identifier string.
Raises:
ParseError: If an identifier couldn't be consumed.
"""
result = self.token
if not self._IDENTIFIER.match(result):
raise self._ParseError('Expected identifier.')
self.NextToken()
return result
def ConsumeInt32(self):
"""Consumes a signed 32bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 32bit integer couldn't be consumed.
"""
try:
result = ParseInteger(self.token, is_signed=True, is_long=False)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeUint32(self):
"""Consumes an unsigned 32bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If an unsigned 32bit integer couldn't be consumed.
"""
try:
result = ParseInteger(self.token, is_signed=False, is_long=False)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeInt64(self):
"""Consumes a signed 64bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 64bit integer couldn't be consumed.
"""
try:
result = ParseInteger(self.token, is_signed=True, is_long=True)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeUint64(self):
"""Consumes an unsigned 64bit integer number.
Returns:
The integer parsed.
Raises:
ParseError: If an unsigned 64bit integer couldn't be consumed.
"""
try:
result = ParseInteger(self.token, is_signed=False, is_long=True)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeFloat(self):
"""Consumes an floating point number.
Returns:
The number parsed.
Raises:
ParseError: If a floating point number couldn't be consumed.
"""
try:
result = ParseFloat(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeBool(self):
"""Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
try:
result = ParseBool(self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeString(self):
"""Consumes a string value.
Returns:
The string parsed.
Raises:
ParseError: If a string value couldn't be consumed.
"""
the_bytes = self.ConsumeByteString()
try:
return str(the_bytes, 'utf-8')
except UnicodeDecodeError as e:
raise self._StringParseError(e)
def ConsumeByteString(self):
"""Consumes a byte array value.
Returns:
The array parsed (as a string).
Raises:
ParseError: If a byte array value couldn't be consumed.
"""
the_list = [self._ConsumeSingleByteString()]
while self.token and self.token[0] in ('\'', '"'):
the_list.append(self._ConsumeSingleByteString())
return ''.encode('latin1').join(the_list) ##PY25
##!PY25 return b''.join(the_list)
def _ConsumeSingleByteString(self):
"""Consume one token of a string literal.
String literals (whether bytes or text) can come in multiple adjacent
tokens which are automatically concatenated, like in C or Python. This
method only consumes one token.
"""
text = self.token
if len(text) < 1 or text[0] not in ('\'', '"'):
raise self._ParseError('Expected string.')
if len(text) < 2 or text[-1] != text[0]:
raise self._ParseError('String missing ending quote.')
try:
result = text_encoding.CUnescape(text[1:-1])
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ConsumeEnum(self, field):
try:
result = ParseEnum(field, self.token)
except ValueError as e:
raise self._ParseError(str(e))
self.NextToken()
return result
def ParseErrorPreviousToken(self, message):
"""Creates and *returns* a ParseError for the previously read token.
Args:
message: A message to set for the exception.
Returns:
A ParseError instance.
"""
return ParseError('%d:%d : %s' % (
self._previous_line + 1, self._previous_column + 1, message))
def _ParseError(self, message):
"""Creates and *returns* a ParseError for the current token."""
return ParseError('%d:%d : %s' % (
self._line + 1, self._column + 1, message))
def _StringParseError(self, e):
return self._ParseError('Couldn\'t parse string: ' + str(e))
def NextToken(self):
"""Reads the next meaningful token."""
self._previous_line = self._line
self._previous_column = self._column
self._column += len(self.token)
self._SkipWhitespace()
if not self._more_lines:
self.token = ''
return
match = self._TOKEN.match(self._current_line, self._column)
if match:
token = match.group(0)
self.token = token
else:
self.token = self._current_line[self._column]
def ParseInteger(text, is_signed=False, is_long=False):
"""Parses an integer.
Args:
text: The text to parse.
is_signed: True if a signed integer must be parsed.
is_long: True if a long integer must be parsed.
Returns:
The integer value.
Raises:
ValueError: Thrown Iff the text is not a valid integer.
"""
# Do the actual parsing. Exception handling is propagated to caller.
try:
# We force 32-bit values to int and 64-bit values to long to make
# alternate implementations where the distinction is more significant
# (e.g. the C++ implementation) simpler.
if is_long:
result = int(text, 0)
else:
result = int(text, 0)
except ValueError:
raise ValueError('Couldn\'t parse integer: %s' % text)
# Check if the integer is sane. Exceptions handled by callers.
checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
checker.CheckValue(result)
return result
def ParseFloat(text):
"""Parse a floating point number.
Args:
text: Text to parse.
Returns:
The number parsed.
Raises:
ValueError: If a floating point number couldn't be parsed.
"""
try:
# Assume Python compatible syntax.
return float(text)
except ValueError:
# Check alternative spellings.
if _FLOAT_INFINITY.match(text):
if text[0] == '-':
return float('-inf')
else:
return float('inf')
elif _FLOAT_NAN.match(text):
return float('nan')
else:
# assume '1.0f' format
try:
return float(text.rstrip('f'))
except ValueError:
raise ValueError('Couldn\'t parse float: %s' % text)
def ParseBool(text):
"""Parse a boolean value.
Args:
text: Text to parse.
Returns:
Boolean values parsed
Raises:
ValueError: If text is not a valid boolean.
"""
if text in ('true', 't', '1'):
return True
elif text in ('false', 'f', '0'):
return False
else:
raise ValueError('Expected "true" or "false".')
def ParseEnum(field, value):
"""Parse an enum value.
The value can be specified by a number (the enum value), or by
a string literal (the enum name).
Args:
field: Enum field descriptor.
value: String value.
Returns:
Enum value number.
Raises:
ValueError: If the enum value could not be parsed.
"""
enum_descriptor = field.enum_type
try:
number = int(value, 0)
except ValueError:
# Identifier.
enum_value = enum_descriptor.values_by_name.get(value, None)
if enum_value is None:
raise ValueError(
'Enum type "%s" has no value named %s.' % (
enum_descriptor.full_name, value))
else:
# Numeric value.
enum_value = enum_descriptor.values_by_number.get(number, None)
if enum_value is None:
raise ValueError(
'Enum type "%s" has no value with number %d.' % (
enum_descriptor.full_name, number))
return enum_value.number
|
|
from __future__ import absolute_import
import sys
import time
from django.conf import settings
from django.db import transaction, connection
from django.db.utils import ConnectionHandler, DEFAULT_DB_ALIAS, DatabaseError
from django.test import (TransactionTestCase, skipIfDBFeature,
skipUnlessDBFeature)
from django.utils import unittest
from django.utils import six
from .models import Person
# Some tests require threading, which might not be available. So create a
# skip-test decorator for those test functions.
try:
import threading
except ImportError:
threading = None
requires_threading = unittest.skipUnless(threading, 'requires threading')
class SelectForUpdateTests(TransactionTestCase):
def setUp(self):
transaction.enter_transaction_management(True)
transaction.managed(True)
self.person = Person.objects.create(name='Reinhardt')
# We have to commit here so that code in run_select_for_update can
# see this data.
transaction.commit()
# We need another database connection to test that one connection
# issuing a SELECT ... FOR UPDATE will block.
new_connections = ConnectionHandler(settings.DATABASES)
self.new_connection = new_connections[DEFAULT_DB_ALIAS]
self.new_connection.enter_transaction_management()
self.new_connection.managed(True)
# We need to set settings.DEBUG to True so we can capture
# the output SQL to examine.
self._old_debug = settings.DEBUG
settings.DEBUG = True
def tearDown(self):
try:
# We don't really care if this fails - some of the tests will set
# this in the course of their run.
transaction.managed(False)
transaction.leave_transaction_management()
self.new_connection.leave_transaction_management()
except transaction.TransactionManagementError:
pass
self.new_connection.close()
settings.DEBUG = self._old_debug
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
def start_blocking_transaction(self):
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = 'SELECT * FROM %(db_table)s %(for_update)s;' % {
'db_table': Person._meta.db_table,
'for_update': self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.new_connection._rollback()
def has_for_update_sql(self, tested_connection, nowait=False):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = tested_connection.ops.for_update_sql(nowait)
sql = tested_connection.queries[-1]['sql']
if isinstance(sql, six.text_type): sql = sql.encode('utf-8')
return bool(sql.find(for_update_sql.encode('utf-8')) > -1)
def check_exc(self, exc):
self.assertTrue(isinstance(exc, DatabaseError))
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_sql_generated(self):
"""
Test that the backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
list(Person.objects.all().select_for_update())
self.assertTrue(self.has_for_update_sql(connection))
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_for_update_sql_generated_nowait(self):
"""
Test that the backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
list(Person.objects.all().select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(connection, nowait=True))
# In Python 2.6 beta and some final releases, exceptions raised in __len__
# are swallowed (Python issue 1242657), so these cases return an empty
# list, rather than raising an exception. Not a lot we can do about that,
# unfortunately, due to the way Python handles list() calls internally.
# Python 2.6.1 is the "in the wild" version affected by this, so we skip
# the test for that version.
@requires_threading
@skipUnlessDBFeature('has_select_for_update_nowait')
@unittest.skipIf(sys.version_info[:3] == (2, 6, 1), "Python version is 2.6.1")
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={'nowait': True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.check_exc(status[-1])
# In Python 2.6 beta and some final releases, exceptions raised in __len__
# are swallowed (Python issue 1242657), so these cases return an empty
# list, rather than raising an exception. Not a lot we can do about that,
# unfortunately, due to the way Python handles list() calls internally.
# Python 2.6.1 is the "in the wild" version affected by this, so we skip
# the test for that version.
@skipIfDBFeature('has_select_for_update_nowait')
@skipUnlessDBFeature('has_select_for_update')
@unittest.skipIf(sys.version_info[:3] == (2, 6, 1), "Python version is 2.6.1")
def test_unsupported_nowait_raises_error(self):
"""
If a SELECT...FOR UPDATE NOWAIT is run on a database backend
that supports FOR UPDATE but not NOWAIT, then we should find
that a DatabaseError is raised.
"""
self.assertRaises(
DatabaseError,
list,
Person.objects.all().select_for_update(nowait=True)
)
def run_select_for_update(self, status, nowait=False):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append('started')
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
transaction.enter_transaction_management(True)
transaction.managed(True)
people = list(
Person.objects.all().select_for_update(nowait=nowait)
)
people[0].name = 'Fred'
people[0].save()
transaction.commit()
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@requires_threading
@skipUnlessDBFeature('has_select_for_update')
@skipUnlessDBFeature('supports_transactions')
def test_block(self):
"""
Check that a thread running a select_for_update that
accesses rows being touched by a similar operation
on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(
target=self.run_select_for_update, args=(status,)
)
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError('Thread did not run and block')
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Reinhardt', p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.isAlive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Fred', p.name)
@requires_threading
@skipUnlessDBFeature('has_select_for_update')
def test_raw_lock_not_available(self):
"""
Check that running a raw query which can't obtain a FOR UPDATE lock
raises the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
'SELECT * FROM %s %s' % (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True)
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={'status': status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.check_exc(status[-1])
@skipUnlessDBFeature('has_select_for_update')
def test_transaction_dirty_managed(self):
""" Check that a select_for_update sets the transaction to be
dirty when executed under txn management. Setting the txn dirty
means that it will be either committed or rolled back by Django,
which will release any locks held by the SELECT FOR UPDATE.
"""
people = list(Person.objects.select_for_update())
self.assertTrue(transaction.is_dirty())
@skipUnlessDBFeature('has_select_for_update')
def test_transaction_not_dirty_unmanaged(self):
""" If we're not under txn management, the txn will never be
marked as dirty.
"""
transaction.managed(False)
transaction.leave_transaction_management()
people = list(Person.objects.select_for_update())
self.assertFalse(transaction.is_dirty())
|
|
"""Provide methods to bootstrap a Home Assistant instance."""
import asyncio
import logging
import logging.handlers
import os
import sys
from time import time
from collections import OrderedDict
from typing import Any, Optional, Dict, Set
import voluptuous as vol
from homeassistant import core, config as conf_util, config_entries, loader
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
from homeassistant.setup import async_setup_component
from homeassistant.util.logging import AsyncHandler
from homeassistant.util.package import async_get_user_site, is_virtual_env
from homeassistant.util.yaml import clear_secret_cache
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
ERROR_LOG_FILENAME = 'home-assistant.log'
# hass.data key for logging information.
DATA_LOGGING = 'logging'
CORE_INTEGRATIONS = ('homeassistant', 'persistent_notification')
LOGGING_INTEGRATIONS = {'logger', 'system_log'}
STAGE_1_INTEGRATIONS = {
# To record data
'recorder',
# To make sure we forward data to other instances
'mqtt_eventstream',
}
async def async_from_config_dict(config: Dict[str, Any],
hass: core.HomeAssistant,
config_dir: Optional[str] = None,
enable_log: bool = True,
verbose: bool = False,
skip_pip: bool = False,
log_rotate_days: Any = None,
log_file: Any = None,
log_no_color: bool = False) \
-> Optional[core.HomeAssistant]:
"""Try to configure Home Assistant from a configuration dictionary.
Dynamically loads required components and its dependencies.
This method is a coroutine.
"""
start = time()
if enable_log:
async_enable_logging(hass, verbose, log_rotate_days, log_file,
log_no_color)
hass.config.skip_pip = skip_pip
if skip_pip:
_LOGGER.warning("Skipping pip installation of required modules. "
"This may cause issues")
core_config = config.get(core.DOMAIN, {})
api_password = config.get('http', {}).get('api_password')
trusted_networks = config.get('http', {}).get('trusted_networks')
try:
await conf_util.async_process_ha_core_config(
hass, core_config, api_password, trusted_networks)
except vol.Invalid as config_err:
conf_util.async_log_exception(
config_err, 'homeassistant', core_config, hass)
return None
except HomeAssistantError:
_LOGGER.error("Home Assistant core failed to initialize. "
"Further initialization aborted")
return None
# Make a copy because we are mutating it.
config = OrderedDict(config)
# Merge packages
await conf_util.merge_packages_config(
hass, config, core_config.get(conf_util.CONF_PACKAGES, {}))
hass.config_entries = config_entries.ConfigEntries(hass, config)
await hass.config_entries.async_initialize()
await _async_set_up_integrations(hass, config)
stop = time()
_LOGGER.info("Home Assistant initialized in %.2fs", stop-start)
# TEMP: warn users for invalid slugs
# Remove after 0.94 or 1.0
if cv.INVALID_SLUGS_FOUND or cv.INVALID_ENTITY_IDS_FOUND:
msg = []
if cv.INVALID_ENTITY_IDS_FOUND:
msg.append(
"Your configuration contains invalid entity ID references. "
"Please find and update the following. "
"This will become a breaking change."
)
msg.append('\n'.join('- {} -> {}'.format(*item)
for item
in cv.INVALID_ENTITY_IDS_FOUND.items()))
if cv.INVALID_SLUGS_FOUND:
msg.append(
"Your configuration contains invalid slugs. "
"Please find and update the following. "
"This will become a breaking change."
)
msg.append('\n'.join('- {} -> {}'.format(*item)
for item in cv.INVALID_SLUGS_FOUND.items()))
hass.components.persistent_notification.async_create(
'\n\n'.join(msg), "Config Warning", "config_warning"
)
# TEMP: warn users of invalid extra keys
# Remove after 0.92
if cv.INVALID_EXTRA_KEYS_FOUND:
msg = []
msg.append(
"Your configuration contains extra keys "
"that the platform does not support (but were silently "
"accepted before 0.88). Please find and remove the following."
"This will become a breaking change."
)
msg.append('\n'.join('- {}'.format(it)
for it in cv.INVALID_EXTRA_KEYS_FOUND))
hass.components.persistent_notification.async_create(
'\n\n'.join(msg), "Config Warning", "config_warning"
)
return hass
async def async_from_config_file(config_path: str,
hass: core.HomeAssistant,
verbose: bool = False,
skip_pip: bool = True,
log_rotate_days: Any = None,
log_file: Any = None,
log_no_color: bool = False)\
-> Optional[core.HomeAssistant]:
"""Read the configuration file and try to start all the functionality.
Will add functionality to 'hass' parameter.
This method is a coroutine.
"""
# Set config dir to directory holding config file
config_dir = os.path.abspath(os.path.dirname(config_path))
hass.config.config_dir = config_dir
if not is_virtual_env():
await async_mount_local_lib_path(config_dir)
async_enable_logging(hass, verbose, log_rotate_days, log_file,
log_no_color)
await hass.async_add_executor_job(
conf_util.process_ha_config_upgrade, hass)
try:
config_dict = await hass.async_add_executor_job(
conf_util.load_yaml_config_file, config_path)
except HomeAssistantError as err:
_LOGGER.error("Error loading %s: %s", config_path, err)
return None
finally:
clear_secret_cache()
return await async_from_config_dict(
config_dict, hass, enable_log=False, skip_pip=skip_pip)
@core.callback
def async_enable_logging(hass: core.HomeAssistant,
verbose: bool = False,
log_rotate_days: Optional[int] = None,
log_file: Optional[str] = None,
log_no_color: bool = False) -> None:
"""Set up the logging.
This method must be run in the event loop.
"""
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
"[%(name)s] %(message)s")
datefmt = '%Y-%m-%d %H:%M:%S'
if not log_no_color:
try:
from colorlog import ColoredFormatter
# basicConfig must be called after importing colorlog in order to
# ensure that the handlers it sets up wraps the correct streams.
logging.basicConfig(level=logging.INFO)
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
))
except ImportError:
pass
# If the above initialization failed for any reason, setup the default
# formatting. If the above succeeds, this wil result in a no-op.
logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO)
# Suppress overly verbose logs from libraries that aren't helpful
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
logging.getLogger('aiohttp.access').setLevel(logging.WARNING)
# Log errors to a file if we have write access to file or config dir
if log_file is None:
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
else:
err_log_path = os.path.abspath(log_file)
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or \
(not err_path_exists and os.access(err_dir, os.W_OK)):
if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when='midnight',
backupCount=log_rotate_days) # type: logging.FileHandler
else:
err_handler = logging.FileHandler(
err_log_path, mode='w', delay=True)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
async_handler = AsyncHandler(hass.loop, err_handler)
async def async_stop_async_handler(_: Any) -> None:
"""Cleanup async handler."""
logging.getLogger('').removeHandler(async_handler) # type: ignore
await async_handler.async_close(blocking=True)
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler)
logger = logging.getLogger('')
logger.addHandler(async_handler) # type: ignore
logger.setLevel(logging.INFO)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error(
"Unable to set up error log %s (access denied)", err_log_path)
async def async_mount_local_lib_path(config_dir: str) -> str:
"""Add local library to Python Path.
This function is a coroutine.
"""
deps_dir = os.path.join(config_dir, 'deps')
lib_dir = await async_get_user_site(deps_dir)
if lib_dir not in sys.path:
sys.path.insert(0, lib_dir)
return deps_dir
@core.callback
def _get_domains(hass: core.HomeAssistant, config: Dict[str, Any]) -> Set[str]:
"""Get domains of components to set up."""
# Filter out the repeating and common config section [homeassistant]
domains = set(key.split(' ')[0] for key in config.keys()
if key != core.DOMAIN)
# Add config entry domains
domains.update(hass.config_entries.async_domains()) # type: ignore
# Make sure the Hass.io component is loaded
if 'HASSIO' in os.environ:
domains.add('hassio')
return domains
async def _async_set_up_integrations(
hass: core.HomeAssistant, config: Dict[str, Any]) -> None:
"""Set up all the integrations."""
domains = _get_domains(hass, config)
# Resolve all dependencies of all components so we can find the logging
# and integrations that need faster initialization.
resolved_domains_task = asyncio.gather(*[
loader.async_component_dependencies(hass, domain)
for domain in domains
], return_exceptions=True)
# Set up core.
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
if not all(await asyncio.gather(*[
async_setup_component(hass, domain, config)
for domain in CORE_INTEGRATIONS
])):
_LOGGER.error("Home Assistant core failed to initialize. "
"Further initialization aborted")
return
_LOGGER.debug("Home Assistant core initialized")
# Finish resolving domains
for dep_domains in await resolved_domains_task:
# Result is either a set or an exception. We ignore exceptions
# It will be properly handled during setup of the domain.
if isinstance(dep_domains, set):
domains.update(dep_domains)
# setup components
logging_domains = domains & LOGGING_INTEGRATIONS
stage_1_domains = domains & STAGE_1_INTEGRATIONS
stage_2_domains = domains - logging_domains - stage_1_domains
if logging_domains:
_LOGGER.debug("Setting up %s", logging_domains)
await asyncio.gather(*[
async_setup_component(hass, domain, config)
for domain in logging_domains
])
# Kick off loading the registries. They don't need to be awaited.
asyncio.gather(
hass.helpers.device_registry.async_get_registry(),
hass.helpers.entity_registry.async_get_registry(),
hass.helpers.area_registry.async_get_registry())
if stage_1_domains:
await asyncio.gather(*[
async_setup_component(hass, domain, config)
for domain in stage_1_domains
])
# Load all integrations
after_dependencies = {} # type: Dict[str, Set[str]]
for int_or_exc in await asyncio.gather(*[
loader.async_get_integration(hass, domain)
for domain in stage_2_domains
], return_exceptions=True):
# Exceptions are handled in async_setup_component.
if (isinstance(int_or_exc, loader.Integration) and
int_or_exc.after_dependencies):
after_dependencies[int_or_exc.domain] = set(
int_or_exc.after_dependencies
)
last_load = None
while stage_2_domains:
domains_to_load = set()
for domain in stage_2_domains:
after_deps = after_dependencies.get(domain)
# Load if integration has no after_dependencies or they are
# all loaded
if (not after_deps or
not after_deps-hass.config.components):
domains_to_load.add(domain)
if not domains_to_load or domains_to_load == last_load:
break
_LOGGER.debug("Setting up %s", domains_to_load)
await asyncio.gather(*[
async_setup_component(hass, domain, config)
for domain in domains_to_load
])
last_load = domains_to_load
stage_2_domains -= domains_to_load
# These are stage 2 domains that never have their after_dependencies
# satisfied.
if stage_2_domains:
_LOGGER.debug("Final set up: %s", stage_2_domains)
await asyncio.gather(*[
async_setup_component(hass, domain, config)
for domain in stage_2_domains
])
# Wrap up startup
await hass.async_block_till_done()
|
|
from pyticketswitch.trolley import Trolley
from pyticketswitch.bundle import Bundle
from pyticketswitch.event import Event
from pyticketswitch.order import Order
class TestTrolley:
def test_from_api_data_with_trolley_data(self):
data = {
'discarded_orders': [
{'item_number': 3},
{'item_number': 6},
],
'trolley_contents': {
'bundle': [
{'bundle_source_code': 'foo'},
{'bundle_source_code': 'bar'},
],
"purchase_result": {'success': True},
},
'trolley_token': 'ABC123',
'trolley_order_count': 8,
}
trolley = Trolley.from_api_data(data)
assert trolley.token == 'ABC123'
assert len(trolley.bundles) == 2
assert trolley.bundles[0].source_code == 'foo'
assert trolley.bundles[1].source_code == 'bar'
assert len(trolley.discarded_orders) == 2
assert trolley.discarded_orders[0].item == 3
assert trolley.discarded_orders[1].item == 6
assert trolley.order_count == 8
assert trolley.purchase_result.success is True
def test_from_api_data_with_reservation_data(self):
data = {
'discarded_orders': [
{'item_number': 3},
{'item_number': 6},
],
'reserved_trolley': {
'bundle': [
{'bundle_source_code': 'foo'},
{'bundle_source_code': 'bar'},
],
'transaction_id': 'T1000-0000',
'transaction_uuid': 'DEF456',
},
}
trolley = Trolley.from_api_data(data)
assert trolley.transaction_uuid == 'DEF456'
assert trolley.transaction_id == 'T1000-0000'
assert len(trolley.bundles) == 2
assert trolley.bundles[0].source_code == 'foo'
assert trolley.bundles[1].source_code == 'bar'
assert len(trolley.discarded_orders) == 2
assert trolley.discarded_orders[0].item == 3
assert trolley.discarded_orders[1].item == 6
def test_from_api_data_with_empty_trolley(self):
data = {
"discarded_orders": [],
"input_contained_unavailable_order": True,
"trolley_token": "abc123",
"trolley_token_contents": {
"trolley_bundle_count": 0,
"trolley_order_count": 0
}
}
trolley = Trolley.from_api_data(data)
assert trolley.token == 'abc123'
assert trolley.input_contained_unavailable_order is True
def test_get_events(self):
event_one = Event(id_='abc123')
event_two = Event(id_='def456')
event_three = Event(id_='ghi789')
event_four = Event(id_='jlk012')
bundle_one = Bundle(
'tests',
orders=[
Order(item=1, event=event_one),
Order(item=2, event=event_two),
]
)
bundle_two = Bundle(
'tests_two',
orders=[
Order(item=3, event=event_three),
Order(item=4, event=event_four),
]
)
trolley = Trolley(
bundles=[bundle_one, bundle_two]
)
events = trolley.get_events()
assert events == [event_one, event_two, event_three, event_four]
def test_get_events_with_no_bundles(self):
trolley = Trolley(bundles=None)
events = trolley.get_events()
assert events == []
def test_get_event_ids(self):
event_one = Event(id_='abc123')
event_two = Event(id_='def456')
event_three = Event(id_='ghi789')
event_four = Event(id_='abc123')
bundle_one = Bundle(
'tests',
orders=[
Order(item=1, event=event_one),
Order(item=2, event=event_two),
]
)
bundle_two = Bundle(
'tests_two',
orders=[
Order(item=3, event=event_three),
Order(item=4, event=event_four),
]
)
trolley = Trolley(
bundles=[bundle_one, bundle_two]
)
events = trolley.get_event_ids()
assert events == {'abc123', 'def456', 'ghi789'}
def test_get_bundle(self):
# state
bundle_one = Bundle('tests')
bundle_two = Bundle('tests_two')
trolley = Trolley(bundles=[bundle_one, bundle_two])
# action
bundle = trolley.get_bundle('tests_two')
# results
assert bundle is bundle_two
def test_get_bundle_when_none(self):
# state
trolley = Trolley(bundles=[])
# action
bundle = trolley.get_bundle('tests_two')
# results
assert bundle is None
def test_get_bundle_when_no_match(self):
# state
bundle_one = Bundle('tests')
bundle_two = Bundle('tests_two')
trolley = Trolley(bundles=[bundle_one, bundle_two])
# action
bundle = trolley.get_bundle('tests_three')
# results
assert bundle is None
def test_get_item(self):
#state
order_one = Order(1)
order_two = Order(2)
order_three = Order(3)
order_four = Order(4)
bundle_one = Bundle('tests', orders=[order_one, order_three])
bundle_two = Bundle('tests_two', orders=[order_two, order_four])
trolley = Trolley(bundles=[bundle_one, bundle_two])
# results
order = trolley.get_item(3)
assert order is order_three
# results
order = trolley.get_item(2)
assert order is order_two
def test_get_orders(self):
#state
order_one = Order(1)
order_two = Order(2)
order_three = Order(3)
order_four = Order(4)
bundle_one = Bundle('tests', orders=[order_one, order_three])
bundle_two = Bundle('tests_two', orders=[order_two, order_four])
trolley = Trolley(bundles=[bundle_one, bundle_two])
# results
orders = trolley.get_orders()
assert orders == [order_one, order_three, order_two, order_four]
|
|
"""
Support for HydroQuebec.
Get data from 'My Consumption Profile' page:
https://www.hydroquebec.com/portail/en/group/clientele/portrait-de-consommation
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.hydroquebec/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_USERNAME,
CONF_PASSWORD,
ENERGY_KILO_WATT_HOUR,
CONF_NAME,
CONF_MONITORED_VARIABLES,
TEMP_CELSIUS,
)
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
KILOWATT_HOUR = ENERGY_KILO_WATT_HOUR
PRICE = "CAD"
DAYS = "days"
CONF_CONTRACT = "contract"
DEFAULT_NAME = "HydroQuebec"
REQUESTS_TIMEOUT = 15
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=1)
SCAN_INTERVAL = timedelta(hours=1)
SENSOR_TYPES = {
"balance": ["Balance", PRICE, "mdi:square-inc-cash"],
"period_total_bill": ["Period total bill", PRICE, "mdi:square-inc-cash"],
"period_length": ["Period length", DAYS, "mdi:calendar-today"],
"period_total_days": ["Period total days", DAYS, "mdi:calendar-today"],
"period_mean_daily_bill": ["Period mean daily bill", PRICE, "mdi:square-inc-cash"],
"period_mean_daily_consumption": [
"Period mean daily consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"period_total_consumption": [
"Period total consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"period_lower_price_consumption": [
"Period lower price consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"period_higher_price_consumption": [
"Period higher price consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"yesterday_total_consumption": [
"Yesterday total consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"yesterday_lower_price_consumption": [
"Yesterday lower price consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"yesterday_higher_price_consumption": [
"Yesterday higher price consumption",
KILOWATT_HOUR,
"mdi:flash",
],
"yesterday_average_temperature": [
"Yesterday average temperature",
TEMP_CELSIUS,
"mdi:thermometer",
],
"period_average_temperature": [
"Period average temperature",
TEMP_CELSIUS,
"mdi:thermometer",
],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_VARIABLES): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CONTRACT): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
HOST = "https://www.hydroquebec.com"
HOME_URL = f"{HOST}/portail/web/clientele/authentification"
PROFILE_URL = "{}/portail/fr/group/clientele/" "portrait-de-consommation".format(HOST)
MONTHLY_MAP = (
("period_total_bill", "montantFacturePeriode"),
("period_length", "nbJourLecturePeriode"),
("period_total_days", "nbJourPrevuPeriode"),
("period_mean_daily_bill", "moyenneDollarsJourPeriode"),
("period_mean_daily_consumption", "moyenneKwhJourPeriode"),
("period_total_consumption", "consoTotalPeriode"),
("period_lower_price_consumption", "consoRegPeriode"),
("period_higher_price_consumption", "consoHautPeriode"),
)
DAILY_MAP = (
("yesterday_total_consumption", "consoTotalQuot"),
("yesterday_lower_price_consumption", "consoRegQuot"),
("yesterday_higher_price_consumption", "consoHautQuot"),
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the HydroQuebec sensor."""
# Create a data fetcher to support all of the configured sensors. Then make
# the first call to init the data.
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
contract = config.get(CONF_CONTRACT)
httpsession = hass.helpers.aiohttp_client.async_get_clientsession()
hydroquebec_data = HydroquebecData(username, password, httpsession, contract)
contracts = await hydroquebec_data.get_contract_list()
if not contracts:
return
_LOGGER.info("Contract list: %s", ", ".join(contracts))
name = config.get(CONF_NAME)
sensors = []
for variable in config[CONF_MONITORED_VARIABLES]:
sensors.append(HydroQuebecSensor(hydroquebec_data, variable, name))
async_add_entities(sensors, True)
class HydroQuebecSensor(Entity):
"""Implementation of a HydroQuebec sensor."""
def __init__(self, hydroquebec_data, sensor_type, name):
"""Initialize the sensor."""
self.client_name = name
self.type = sensor_type
self._name = SENSOR_TYPES[sensor_type][0]
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._icon = SENSOR_TYPES[sensor_type][2]
self.hydroquebec_data = hydroquebec_data
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
async def async_update(self):
"""Get the latest data from Hydroquebec and update the state."""
await self.hydroquebec_data.async_update()
if self.hydroquebec_data.data.get(self.type) is not None:
self._state = round(self.hydroquebec_data.data[self.type], 2)
class HydroquebecData:
"""Get data from HydroQuebec."""
def __init__(self, username, password, httpsession, contract=None):
"""Initialize the data object."""
from pyhydroquebec import HydroQuebecClient
self.client = HydroQuebecClient(
username, password, REQUESTS_TIMEOUT, httpsession
)
self._contract = contract
self.data = {}
async def get_contract_list(self):
"""Return the contract list."""
# Fetch data
ret = await self._fetch_data()
if ret:
return self.client.get_contracts()
return []
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def _fetch_data(self):
"""Fetch latest data from HydroQuebec."""
from pyhydroquebec.client import PyHydroQuebecError
try:
await self.client.fetch_data()
except PyHydroQuebecError as exp:
_LOGGER.error("Error on receive last Hydroquebec data: %s", exp)
return False
return True
async def async_update(self):
"""Return the latest collected data from HydroQuebec."""
await self._fetch_data()
self.data = self.client.get_data(self._contract)[self._contract]
|
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from core import perf_benchmark
from benchmarks import silk_flags
from measurements import smoothness
import page_sets
import page_sets.key_silk_cases
from telemetry import benchmark
class SmoothnessTop25(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics while scrolling down the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = smoothness.Smoothness
page_set = page_sets.Top25SmoothPageSet
@classmethod
def Name(cls):
return 'smoothness.top_25_smooth'
class SmoothnessToughFiltersCases(perf_benchmark.PerfBenchmark):
"""Measures frame rate and a variety of other statistics.
Uses a selection of pages making use of SVG and CSS Filter Effects.
"""
test = smoothness.Smoothness
page_set = page_sets.ToughFiltersCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_filters_cases'
class SmoothnessToughPathRenderingCases(perf_benchmark.PerfBenchmark):
"""Tests a selection of pages with SVG and 2D Canvas paths.
Measures frame rate and a variety of other statistics. """
test = smoothness.Smoothness
page_set = page_sets.ToughPathRenderingCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_path_rendering_cases'
@benchmark.Disabled('android') # crbug.com/526901
class SmoothnessToughCanvasCases(perf_benchmark.PerfBenchmark):
"""Measures frame rate and a variety of other statistics.
Uses a selection of pages making use of the 2D Canvas API.
"""
test = smoothness.Smoothness
page_set = page_sets.ToughCanvasCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_canvas_cases'
@benchmark.Disabled('android') # crbug.com/373812
class SmoothnessToughWebGLCases(perf_benchmark.PerfBenchmark):
test = smoothness.Smoothness
page_set = page_sets.ToughWebglCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_webgl_cases'
@benchmark.Enabled('android')
class SmoothnessMaps(perf_benchmark.PerfBenchmark):
page_set = page_sets.MapsPageSet
@classmethod
def Name(cls):
return 'smoothness.maps'
@benchmark.Disabled('android')
class SmoothnessKeyDesktopMoveCases(perf_benchmark.PerfBenchmark):
test = smoothness.Smoothness
page_set = page_sets.KeyDesktopMoveCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.key_desktop_move_cases'
@benchmark.Enabled('android')
class SmoothnessKeyMobileSites(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics while scrolling down the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks
"""
test = smoothness.Smoothness
page_set = page_sets.KeyMobileSitesSmoothPageSet
@classmethod
def Name(cls):
return 'smoothness.key_mobile_sites_smooth'
class SmoothnessToughAnimationCases(perf_benchmark.PerfBenchmark):
test = smoothness.SmoothnessWithRestart
page_set = page_sets.ToughAnimationCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_animation_cases'
@benchmark.Enabled('android')
class SmoothnessKeySilkCases(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for the key silk cases without GPU
rasterization.
"""
test = smoothness.Smoothness
page_set = page_sets.KeySilkCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.key_silk_cases'
def CreateStorySet(self, options):
stories = super(SmoothnessKeySilkCases, self).CreateStorySet(options)
# Page26 (befamous) is too noisy to be useful; crbug.com/461127
to_remove = [story for story in stories
if isinstance(story, page_sets.key_silk_cases.Page26)]
for story in to_remove:
stories.RemoveStory(story)
return stories
@benchmark.Enabled('android')
class SmoothnessGpuRasterizationTop25(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for the top 25 with GPU rasterization.
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.Top25SmoothPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.gpu_rasterization.top_25_smooth'
@benchmark.Enabled('android')
class SmoothnessGpuRasterizationKeyMobileSites(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for the key mobile sites with GPU
rasterization.
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.KeyMobileSitesSmoothPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.gpu_rasterization.key_mobile_sites_smooth'
class SmoothnessGpuRasterizationToughPathRenderingCases(
perf_benchmark.PerfBenchmark):
"""Tests a selection of pages with SVG and 2D canvas paths with GPU
rasterization.
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.ToughPathRenderingCasesPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.gpu_rasterization.tough_path_rendering_cases'
class SmoothnessGpuRasterizationFiltersCases(perf_benchmark.PerfBenchmark):
"""Tests a selection of pages with SVG and CSS filter effects with GPU
rasterization.
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.ToughFiltersCasesPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.gpu_rasterization.tough_filters_cases'
@benchmark.Enabled('android')
class SmoothnessSyncScrollKeyMobileSites(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for the key mobile sites with synchronous
(main thread) scrolling.
"""
tag = 'sync_scroll'
test = smoothness.Smoothness
page_set = page_sets.KeyMobileSitesSmoothPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForSyncScrolling(options)
@classmethod
def Name(cls):
return 'smoothness.sync_scroll.key_mobile_sites_smooth'
@benchmark.Enabled('android')
class SmoothnessSimpleMobilePages(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for simple mobile sites page set.
"""
test = smoothness.Smoothness
page_set = page_sets.SimpleMobileSitesPageSet
@classmethod
def Name(cls):
return 'smoothness.simple_mobile_sites'
@benchmark.Enabled('android')
class SmoothnessFlingSimpleMobilePages(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for flinging a simple mobile sites page set.
"""
test = smoothness.Smoothness
page_set = page_sets.SimpleMobileSitesFlingPageSet
def SetExtraBrowserOptions(self, options):
# As the fling parameters cannot be analytically determined to not
# overscroll, disable overscrolling explicitly. Overscroll behavior is
# orthogonal to fling performance, and its activation is only more noise.
options.AppendExtraBrowserArgs('--disable-overscroll-edge-effect')
@classmethod
def Name(cls):
return 'smoothness.fling.simple_mobile_sites'
@benchmark.Enabled('android', 'chromeos')
class SmoothnessToughPinchZoomCases(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for pinch-zooming into the tough pinch zoom
cases.
"""
test = smoothness.Smoothness
page_set = page_sets.ToughPinchZoomCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_pinch_zoom_cases'
@benchmark.Enabled('android', 'chromeos')
class SmoothnessToughScrollingWhileZoomedInCases(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for pinch-zooming then diagonal scrolling"""
test = smoothness.Smoothness
page_set = page_sets.ToughScrollingWhileZoomedInCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_scrolling_while_zoomed_in_cases'
@benchmark.Enabled('android')
class SmoothnessPolymer(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for Polymer cases.
"""
test = smoothness.Smoothness
page_set = page_sets.PolymerPageSet
@classmethod
def Name(cls):
return 'smoothness.polymer'
@benchmark.Enabled('android')
class SmoothnessGpuRasterizationPolymer(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics for the Polymer cases with GPU rasterization.
"""
tag = 'gpu_rasterization'
test = smoothness.Smoothness
page_set = page_sets.PolymerPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.gpu_rasterization.polymer'
class SmoothnessToughScrollingCases(perf_benchmark.PerfBenchmark):
test = smoothness.Smoothness
page_set = page_sets.ToughScrollingCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_scrolling_cases'
class SmoothnessToughImageDecodeCases(perf_benchmark.PerfBenchmark):
test = smoothness.Smoothness
page_set = page_sets.ToughImageDecodeCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_image_decode_cases'
@benchmark.Disabled('android') # http://crbug.com/513699
class SmoothnessImageDecodingCases(perf_benchmark.PerfBenchmark):
"""Measures decoding statistics for jpeg images.
"""
test = smoothness.Smoothness
page_set = page_sets.ImageDecodingCasesPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
options.AppendExtraBrowserArgs('--disable-accelerated-jpeg-decoding')
@classmethod
def Name(cls):
return 'smoothness.image_decoding_cases'
@benchmark.Disabled('android') # http://crbug.com/513699
class SmoothnessGpuImageDecodingCases(perf_benchmark.PerfBenchmark):
"""Measures decoding statistics for jpeg images with GPU rasterization.
"""
tag = 'gpu_rasterization_and_decoding'
test = smoothness.Smoothness
page_set = page_sets.ImageDecodingCasesPageSet
def SetExtraBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
# TODO(sugoi): Remove the following line once M41 goes stable
options.AppendExtraBrowserArgs('--enable-accelerated-jpeg-decoding')
@classmethod
def Name(cls):
return 'smoothness.gpu_rasterization_and_decoding.image_decoding_cases'
@benchmark.Enabled('android')
class SmoothnessPathologicalMobileSites(perf_benchmark.PerfBenchmark):
"""Measures task execution statistics while scrolling pathological sites.
"""
test = smoothness.Smoothness
page_set = page_sets.PathologicalMobileSitesPageSet
@classmethod
def Name(cls):
return 'smoothness.pathological_mobile_sites'
class SmoothnessToughAnimatedImageCases(perf_benchmark.PerfBenchmark):
test = smoothness.Smoothness
page_set = page_sets.ToughAnimatedImageCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_animated_image_cases'
@benchmark.Disabled('reference') # http://crbug.com/499489
class SmoothnessToughTextureUploadCases(perf_benchmark.PerfBenchmark):
test = smoothness.Smoothness
page_set = page_sets.ToughTextureUploadCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_texture_upload_cases'
@benchmark.Disabled('reference') # http://crbug.com/496684
class SmoothnessToughAdCases(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics while displaying advertisements."""
test = smoothness.Smoothness
page_set = page_sets.ToughAdCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_ad_cases'
# http://crbug.com/496684 (reference)
# http://crbug.com/522619 (mac/win)
@benchmark.Disabled('reference', 'win', 'mac')
class SmoothnessScrollingToughAdCases(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics while scrolling advertisements."""
test = smoothness.Smoothness
page_set = page_sets.ScrollingToughAdCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.scrolling_tough_ad_cases'
# http://crbug.com/496684 (reference)
# http://crbug.com/522619 (mac/win)
@benchmark.Disabled('reference', 'win', 'mac')
class SmoothnessBidirectionallyScrollingToughAdCases(
perf_benchmark.PerfBenchmark):
"""Measures rendering statistics while scrolling advertisements."""
test = smoothness.Smoothness
page_set = page_sets.BidirectionallyScrollingToughAdCasesPageSet
def SetExtraBrowserOptions(self, options):
# Don't accidentally reload the page while scrolling.
options.AppendExtraBrowserArgs('--disable-pull-to-refresh-effect')
@classmethod
def Name(cls):
return 'smoothness.bidirectionally_scrolling_tough_ad_cases'
@benchmark.Disabled('reference') # http://crbug.com/496684
class SmoothnessToughWebGLAdCases(perf_benchmark.PerfBenchmark):
"""Measures rendering statistics while scrolling advertisements."""
test = smoothness.Smoothness
page_set = page_sets.ToughWebglAdCasesPageSet
@classmethod
def Name(cls):
return 'smoothness.tough_webgl_ad_cases'
|
|
import time
from electrum.i18n import _
from electrum.plugin import hook
from electrum.wallet import Standard_Wallet
from electrum.gui.qt.util import *
from .coldcard import ColdcardPlugin
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
class Plugin(ColdcardPlugin, QtPluginBase):
icon_unpaired = ":icons/coldcard_unpaired.png"
icon_paired = ":icons/coldcard.png"
def create_handler(self, window):
return Coldcard_Handler(window)
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) is not Standard_Wallet:
return
keystore = wallet.get_keystore()
if type(keystore) == self.keystore_class and len(addrs) == 1:
def show_address():
keystore.thread.add(partial(self.show_address, wallet, addrs[0]))
menu.addAction(_("Show on Coldcard"), show_address)
@hook
def transaction_dialog(self, dia):
# see gui/qt/transaction_dialog.py
keystore = dia.wallet.get_keystore()
if type(keystore) != self.keystore_class:
# not a Coldcard wallet, hide feature
return
# - add a new button, near "export"
btn = QPushButton(_("Save PSBT"))
btn.clicked.connect(lambda unused: self.export_psbt(dia))
if dia.tx.is_complete():
# but disable it for signed transactions (nothing to do if already signed)
btn.setDisabled(True)
dia.sharing_buttons.append(btn)
def export_psbt(self, dia):
# Called from hook in transaction dialog
tx = dia.tx
if tx.is_complete():
# if they sign while dialog is open, it can transition from unsigned to signed,
# which we don't support here, so do nothing
return
# can only expect Coldcard wallets to work with these files (right now)
keystore = dia.wallet.get_keystore()
assert type(keystore) == self.keystore_class
# convert to PSBT
raw_psbt = keystore.build_psbt(tx, wallet=dia.wallet)
name = (dia.wallet.basename() + time.strftime('-%y%m%d-%H%M.psbt')).replace(' ', '-')
fileName = dia.main_window.getSaveFileName(_("Select where to save the PSBT file"),
name, "*.psbt")
if fileName:
with open(fileName, "wb+") as f:
f.write(raw_psbt)
dia.show_message(_("Transaction exported successfully"))
dia.saved = True
def show_settings_dialog(self, window, keystore):
# When they click on the icon for CC we come here.
device_id = self.choose_device(window, keystore)
if device_id:
CKCCSettingsDialog(window, self, keystore, device_id).exec_()
class Coldcard_Handler(QtHandlerBase):
setup_signal = pyqtSignal()
#auth_signal = pyqtSignal(object)
def __init__(self, win):
super(Coldcard_Handler, self).__init__(win, 'Coldcard')
self.setup_signal.connect(self.setup_dialog)
#self.auth_signal.connect(self.auth_dialog)
def message_dialog(self, msg):
self.clear_dialog()
self.dialog = dialog = WindowModalDialog(self.top_level_window(), _("Coldcard Status"))
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
vbox.addWidget(l)
dialog.show()
def get_setup(self):
self.done.clear()
self.setup_signal.emit()
self.done.wait()
return
def setup_dialog(self):
self.show_error(_('Please initialize your Coldcard while disconnected.'))
return
class CKCCSettingsDialog(WindowModalDialog):
'''This dialog doesn't require a device be paired with a wallet.
We want users to be able to wipe a device even if they've forgotten
their PIN.'''
def __init__(self, window, plugin, keystore, device_id):
title = _("{} Settings").format(plugin.device)
super(CKCCSettingsDialog, self).__init__(window, title)
self.setMaximumWidth(540)
devmgr = plugin.device_manager()
config = devmgr.config
handler = keystore.handler
self.thread = thread = keystore.thread
def connect_and_doit():
client = devmgr.client_by_id(device_id)
if not client:
raise RuntimeError("Device not connected")
return client
body = QWidget()
body_layout = QVBoxLayout(body)
grid = QGridLayout()
grid.setColumnStretch(2, 1)
# see <http://doc.qt.io/archives/qt-4.8/richtext-html-subset.html>
title = QLabel('''<center>
<span style="font-size: x-large">Coldcard Wallet</span>
<br><span style="font-size: medium">from Coinkite Inc.</span>
<br><a href="https://coldcardwallet.com">coldcardwallet.com</a>''')
title.setTextInteractionFlags(Qt.LinksAccessibleByMouse)
grid.addWidget(title , 0,0, 1,2, Qt.AlignHCenter)
y = 3
rows = [
('fw_version', _("Firmware Version")),
('fw_built', _("Build Date")),
('bl_version', _("Bootloader")),
('xfp', _("Master Fingerprint")),
('serial', _("USB Serial")),
]
for row_num, (member_name, label) in enumerate(rows):
widget = QLabel('<tt>000000000000')
widget.setTextInteractionFlags(Qt.TextSelectableByMouse | Qt.TextSelectableByKeyboard)
grid.addWidget(QLabel(label), y, 0, 1,1, Qt.AlignRight)
grid.addWidget(widget, y, 1, 1, 1, Qt.AlignLeft)
setattr(self, member_name, widget)
y += 1
body_layout.addLayout(grid)
upg_btn = QPushButton('Upgrade')
#upg_btn.setDefault(False)
def _start_upgrade():
thread.add(connect_and_doit, on_success=self.start_upgrade)
upg_btn.clicked.connect(_start_upgrade)
y += 3
grid.addWidget(upg_btn, y, 0)
grid.addWidget(CloseButton(self), y, 1)
dialog_vbox = QVBoxLayout(self)
dialog_vbox.addWidget(body)
# Fetch values and show them
thread.add(connect_and_doit, on_success=self.show_values)
def show_values(self, client):
dev = client.dev
self.xfp.setText('<tt>0x%08x' % dev.master_fingerprint)
self.serial.setText('<tt>%s' % dev.serial)
# ask device for versions: allow extras for future
fw_date, fw_rel, bl_rel, *rfu = client.get_version()
self.fw_version.setText('<tt>%s' % fw_rel)
self.fw_built.setText('<tt>%s' % fw_date)
self.bl_version.setText('<tt>%s' % bl_rel)
def start_upgrade(self, client):
# ask for a filename (must have already downloaded it)
mw = get_parent_main_window(self)
dev = client.dev
fileName = mw.getOpenFileName("Select upgraded firmware file", "*.dfu")
if not fileName:
return
from ckcc.utils import dfu_parse
from ckcc.sigheader import FW_HEADER_SIZE, FW_HEADER_OFFSET, FW_HEADER_MAGIC
from ckcc.protocol import CCProtocolPacker
from hashlib import sha256
import struct
try:
with open(fileName, 'rb') as fd:
# unwrap firmware from the DFU
offset, size, *ignored = dfu_parse(fd)
fd.seek(offset)
firmware = fd.read(size)
hpos = FW_HEADER_OFFSET
hdr = bytes(firmware[hpos:hpos + FW_HEADER_SIZE]) # needed later too
magic = struct.unpack_from("<I", hdr)[0]
if magic != FW_HEADER_MAGIC:
raise ValueError("Bad magic")
except Exception as exc:
mw.show_error("Does not appear to be a Coldcard firmware file.\n\n%s" % exc)
return
# TODO:
# - detect if they are trying to downgrade; aint gonna work
# - warn them about the reboot?
# - length checks
# - add progress local bar
mw.show_message("Ready to Upgrade.\n\nBe patient. Unit will reboot itself when complete.")
def doit():
dlen, _ = dev.upload_file(firmware, verify=True)
assert dlen == len(firmware)
# append the firmware header a second time
result = dev.send_recv(CCProtocolPacker.upload(size, size+FW_HEADER_SIZE, hdr))
# make it reboot into bootlaoder which might install it
dev.send_recv(CCProtocolPacker.reboot())
self.thread.add(doit)
self.close()
|
|
import json
import logging
import logging.handlers
import ethereum.slogging as slogging
class TestHandler(logging.handlers.BufferingHandler):
def __init__(self):
logging.Handler.__init__(self)
self.capacity = 10000
self.buffer = []
@property
def logged(self):
# returns just the message part (no formatting)
if len(self.buffer):
return self.buffer.pop().getMessage()
return None
def does_log(self, logcall):
assert self.logged is None
logcall('abc')
sl = self.logged
return bool(sl and 'abc' in sl)
def get_test_handler():
"handler.bufffer = [] has the logged lines"
th = TestHandler()
logging.getLogger().handlers = [th]
return th
def setup_logging(config_string='', log_json=False):
# setsup default logging
slogging.configure(config_string=config_string, log_json=log_json)
th = get_test_handler()
return th
########## TESTS ###############
def test_testhandler():
th = get_test_handler()
assert th.logged == None
th = setup_logging()
assert th.logged is None
log = slogging.get_logger('a')
log.warn('abc')
assert 'abc' in th.logged
assert th.logged is None
# same with does_log
assert th.does_log(log.warn)
assert not th.does_log(log.debug)
def test_baseconfig():
# test default loglevel INFO
th = setup_logging()
log = slogging.get_logger()
assert th.does_log(log.error)
assert th.does_log(log.critical)
assert th.does_log(log.warn)
assert th.does_log(log.warn)
assert th.does_log(log.info)
assert not th.does_log(log.debug)
assert not th.does_log(log.trace)
config_string = ':inFO,a:trace,a.b:debug'
th = setup_logging(config_string=config_string)
def test_is_active2():
setup_logging(':info')
tester = slogging.get_logger('tester')
assert tester.is_active(level_name='info')
assert not tester.is_active(level_name='trace')
def test_lvl_trace():
config_string = ':trace'
th = setup_logging(config_string=config_string)
log = slogging.get_logger()
assert th.does_log(log.debug)
assert th.does_log(log.trace)
def test_incremental():
config_string = ':trace'
th = setup_logging(config_string=config_string)
log = slogging.get_logger()
# incremental context
log = log.bind(first='one')
log.error('nice', a=1, b=2)
assert 'first' in th.logged
log = log.bind(second='two')
log.error('nice', a=1, b=2)
l = th.logged
assert 'first' in l and 'two' in l
def test_jsonconfig():
th = setup_logging(log_json=True)
log = slogging.get_logger('prefix')
log.warn('abc', a=1)
assert json.loads(th.logged) == dict(event='prefix.abc', a=1)
def test_kvprinter():
# we can not test formatting
config_string = ':inFO,a:trace,a.b:debug'
th = setup_logging(config_string=config_string)
# log level info
log = slogging.get_logger('foo')
log.info('baz', arg=2)
l = th.logged
assert 'baz' in l
def test_namespaces():
config_string = ':inFO,a:trace,a.b:debug'
th = setup_logging(config_string=config_string)
# log level info
log = slogging.get_logger()
log_a = slogging.get_logger('a')
log_a_b = slogging.get_logger('a.b')
assert th.does_log(log.info)
assert not th.does_log(log.debug)
assert th.does_log(log_a.trace)
assert th.does_log(log_a_b.debug)
assert not th.does_log(log_a_b.trace)
def test_tracebacks():
th = setup_logging()
log = slogging.get_logger()
def div(a, b):
try:
r = a / b
log.error('heres the stack', stack_info=True)
except Exception as e:
log.error('an Exception trace should preceed this msg', exc_info=True)
div(1, 0)
assert 'an Exception' in th.logged
div(1, 1)
assert 'the stack' in th.logged
def test_listeners():
th = setup_logging()
log = slogging.get_logger()
called = []
def log_cb(event_dict):
called.append(event_dict)
# activate listener
slogging.log_listeners.listeners.append(log_cb)
log.error('test listener', abc='thislistener')
assert 'thislistener' in th.logged
r = called.pop()
assert r == dict(event='test listener', abc='thislistener')
log.trace('trace is usually filtered', abc='thislistener')
assert th.logged is None
assert 'abc' in called.pop()
# deactivate listener
slogging.log_listeners.listeners.remove(log_cb)
log.error('test listener', abc='nolistener')
assert 'nolistener' in th.logged
assert not called
def test_logger_names():
th = setup_logging()
names = set(['a', 'b', 'c'])
for n in names:
slogging.get_logger(n)
assert names.issubset(set(slogging.get_logger_names()))
def test_is_active():
th = setup_logging()
log = slogging.get_logger()
assert not log.is_active('trace')
assert not log.is_active('debug')
assert log.is_active('info')
assert log.is_active('warn')
# activate w/ listner
slogging.log_listeners.listeners.append(lambda x: x)
assert log.is_active('trace')
slogging.log_listeners.listeners.pop()
assert not log.is_active('trace')
def test_lazy_log():
"""
test lacy evaluation of json log data
e.g.
class LogState
class LogMemory
"""
called_json = []
called_print = []
class Expensive(object):
def __structlog__(self):
called_json.append(1)
return 'expensive data preparation'
def __repr__(self):
called_print.append(1)
return 'expensive data preparation'
th = setup_logging(log_json=True)
log = slogging.get_logger()
log.trace('no', data=Expensive())
assert not called_print
assert not called_json
log.info('yes', data=Expensive())
assert called_json.pop()
assert not called_print
th = setup_logging()
log = slogging.get_logger()
log.trace('no', data=Expensive())
assert not called_print
assert not called_json
log.info('yes', data=Expensive())
assert not called_json
assert called_print.pop()
def test_get_configuration():
config_string = ':INFO,a:TRACE,a.b:DEBUG'
log_json = False
slogging.configure(config_string=config_string, log_json=log_json)
config = slogging.get_configuration()
assert config['log_json'] == log_json
assert set(config['config_string'].split(',')) == set(config_string.split(','))
log_json = True
slogging.configure(config_string=config_string, log_json=log_json)
config = slogging.get_configuration()
assert config['log_json'] == log_json
assert set(config['config_string'].split(',')) == set(config_string.split(','))
# set config differntly
slogging.configure(config_string=':TRACE', log_json=False)
config2 = slogging.get_configuration()
# test whether we get original config
slogging.configure(**config)
config = slogging.get_configuration()
assert config['log_json'] == log_json
assert set(config['config_string'].split(',')) == set(config_string.split(','))
def test_recorder():
th = setup_logging()
log = slogging.get_logger()
# test info
recorder = slogging.LogRecorder()
assert len(slogging.log_listeners.listeners) == 1
log.info('a', v=1)
assert th.logged
r = recorder.pop_records()
assert r[0] == dict(event='a', v=1)
assert len(slogging.log_listeners.listeners) == 0
# test trace
recorder = slogging.LogRecorder()
assert len(slogging.log_listeners.listeners) == 1
log.trace('a', v=1)
assert not th.logged
r = recorder.pop_records()
assert r[0] == dict(event='a', v=1)
assert len(slogging.log_listeners.listeners) == 0
# examples
def test_howto_use_in_tests():
# select what you want to see.
# e.g. TRACE from vm except for pre_state :DEBUG otherwise
config_string = ':DEBUG,eth.vm:TRACE,vm.pre_state:INFO'
slogging.configure(config_string=config_string)
log = slogging.get_logger('tests.logging')
log.info('test starts')
def test_how_to_use_as_vm_logger():
"""
don't log until there was an error
"""
config_string = ':DEBUG,eth.vm:INFO'
slogging.configure(config_string=config_string)
log = slogging.get_logger('eth.vm')
# record all logs
def run_vm(raise_error=False):
log.trace('op', pc=1)
log.trace('op', pc=2)
if raise_error:
raise Exception
recorder = slogging.LogRecorder()
try:
run_vm(raise_error=True)
except:
log = slogging.get_logger('eth.vm')
for x in recorder.pop_records():
log.info(x.pop('event'), **x)
def test_cleanup():
config_string = ':debug'
slogging.configure(config_string=config_string)
if __name__ == '__main__':
slogging.configure(':debug')
tester = slogging.get_logger('tester')
assert tester.is_active(level_name='info')
slogging.set_level('tester', 'trace')
assert tester.is_active(level_name='trace')
tester.info('done')
|
|
import os
import sys
from unittest import mock
import pytest
from _pytest.main import ExitCode
from _pytest.mark import EMPTY_PARAMETERSET_OPTION
from _pytest.mark import MarkGenerator as Mark
from _pytest.nodes import Collector
from _pytest.nodes import Node
class TestMark:
@pytest.mark.parametrize("attr", ["mark", "param"])
@pytest.mark.parametrize("modulename", ["py.test", "pytest"])
def test_pytest_exists_in_namespace_all(self, attr, modulename):
module = sys.modules[modulename]
assert attr in module.__all__
def test_pytest_mark_notcallable(self):
mark = Mark()
with pytest.raises(TypeError):
mark()
def test_mark_with_param(self):
def some_function(abc):
pass
class SomeClass:
pass
assert pytest.mark.foo(some_function) is some_function
assert pytest.mark.foo.with_args(some_function) is not some_function
assert pytest.mark.foo(SomeClass) is SomeClass
assert pytest.mark.foo.with_args(SomeClass) is not SomeClass
def test_pytest_mark_name_starts_with_underscore(self):
mark = Mark()
with pytest.raises(AttributeError):
mark._some_name
def test_marked_class_run_twice(testdir, request):
"""Test fails file is run twice that contains marked class.
See issue#683.
"""
py_file = testdir.makepyfile(
"""
import pytest
@pytest.mark.parametrize('abc', [1, 2, 3])
class Test1(object):
def test_1(self, abc):
assert abc in [1, 2, 3]
"""
)
file_name = os.path.basename(py_file.strpath)
rec = testdir.inline_run(file_name, file_name)
rec.assertoutcome(passed=6)
def test_ini_markers(testdir):
testdir.makeini(
"""
[pytest]
markers =
a1: this is a webtest marker
a2: this is a smoke marker
"""
)
testdir.makepyfile(
"""
def test_markers(pytestconfig):
markers = pytestconfig.getini("markers")
print(markers)
assert len(markers) >= 2
assert markers[0].startswith("a1:")
assert markers[1].startswith("a2:")
"""
)
rec = testdir.inline_run()
rec.assertoutcome(passed=1)
def test_markers_option(testdir):
testdir.makeini(
"""
[pytest]
markers =
a1: this is a webtest marker
a1some: another marker
nodescription
"""
)
result = testdir.runpytest("--markers")
result.stdout.fnmatch_lines(
["*a1*this is a webtest*", "*a1some*another marker", "*nodescription*"]
)
def test_ini_markers_whitespace(testdir):
testdir.makeini(
"""
[pytest]
markers =
a1 : this is a whitespace marker
"""
)
testdir.makepyfile(
"""
import pytest
@pytest.mark.a1
def test_markers():
assert True
"""
)
rec = testdir.inline_run("--strict-markers", "-m", "a1")
rec.assertoutcome(passed=1)
def test_marker_without_description(testdir):
testdir.makefile(
".cfg",
setup="""
[tool:pytest]
markers=slow
""",
)
testdir.makeconftest(
"""
import pytest
pytest.mark.xfail('FAIL')
"""
)
ftdir = testdir.mkdir("ft1_dummy")
testdir.tmpdir.join("conftest.py").move(ftdir.join("conftest.py"))
rec = testdir.runpytest("--strict-markers")
rec.assert_outcomes()
def test_markers_option_with_plugin_in_current_dir(testdir):
testdir.makeconftest('pytest_plugins = "flip_flop"')
testdir.makepyfile(
flip_flop="""\
def pytest_configure(config):
config.addinivalue_line("markers", "flip:flop")
def pytest_generate_tests(metafunc):
try:
mark = metafunc.function.flipper
except AttributeError:
return
metafunc.parametrize("x", (10, 20))"""
)
testdir.makepyfile(
"""\
import pytest
@pytest.mark.flipper
def test_example(x):
assert x"""
)
result = testdir.runpytest("--markers")
result.stdout.fnmatch_lines(["*flip*flop*"])
def test_mark_on_pseudo_function(testdir):
testdir.makepyfile(
"""
import pytest
@pytest.mark.r(lambda x: 0/0)
def test_hello():
pass
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
@pytest.mark.parametrize("option_name", ["--strict-markers", "--strict"])
def test_strict_prohibits_unregistered_markers(testdir, option_name):
testdir.makepyfile(
"""
import pytest
@pytest.mark.unregisteredmark
def test_hello():
pass
"""
)
result = testdir.runpytest(option_name)
assert result.ret != 0
result.stdout.fnmatch_lines(
["'unregisteredmark' not found in `markers` configuration option"]
)
@pytest.mark.parametrize(
"spec",
[
("xyz", ("test_one",)),
("xyz and xyz2", ()),
("xyz2", ("test_two",)),
("xyz or xyz2", ("test_one", "test_two")),
],
)
def test_mark_option(spec, testdir):
testdir.makepyfile(
"""
import pytest
@pytest.mark.xyz
def test_one():
pass
@pytest.mark.xyz2
def test_two():
pass
"""
)
opt, passed_result = spec
rec = testdir.inline_run("-m", opt)
passed, skipped, fail = rec.listoutcomes()
passed = [x.nodeid.split("::")[-1] for x in passed]
assert len(passed) == len(passed_result)
assert list(passed) == list(passed_result)
@pytest.mark.parametrize(
"spec", [("interface", ("test_interface",)), ("not interface", ("test_nointer",))]
)
def test_mark_option_custom(spec, testdir):
testdir.makeconftest(
"""
import pytest
def pytest_collection_modifyitems(items):
for item in items:
if "interface" in item.nodeid:
item.add_marker(pytest.mark.interface)
"""
)
testdir.makepyfile(
"""
def test_interface():
pass
def test_nointer():
pass
"""
)
opt, passed_result = spec
rec = testdir.inline_run("-m", opt)
passed, skipped, fail = rec.listoutcomes()
passed = [x.nodeid.split("::")[-1] for x in passed]
assert len(passed) == len(passed_result)
assert list(passed) == list(passed_result)
@pytest.mark.parametrize(
"spec",
[
("interface", ("test_interface",)),
("not interface", ("test_nointer", "test_pass")),
("pass", ("test_pass",)),
("not pass", ("test_interface", "test_nointer")),
],
)
def test_keyword_option_custom(spec, testdir):
testdir.makepyfile(
"""
def test_interface():
pass
def test_nointer():
pass
def test_pass():
pass
"""
)
opt, passed_result = spec
rec = testdir.inline_run("-k", opt)
passed, skipped, fail = rec.listoutcomes()
passed = [x.nodeid.split("::")[-1] for x in passed]
assert len(passed) == len(passed_result)
assert list(passed) == list(passed_result)
def test_keyword_option_considers_mark(testdir):
testdir.copy_example("marks/marks_considered_keywords")
rec = testdir.inline_run("-k", "foo")
passed = rec.listoutcomes()[0]
assert len(passed) == 1
@pytest.mark.parametrize(
"spec",
[
("None", ("test_func[None]",)),
("1.3", ("test_func[1.3]",)),
("2-3", ("test_func[2-3]",)),
],
)
def test_keyword_option_parametrize(spec, testdir):
testdir.makepyfile(
"""
import pytest
@pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
def test_func(arg):
pass
"""
)
opt, passed_result = spec
rec = testdir.inline_run("-k", opt)
passed, skipped, fail = rec.listoutcomes()
passed = [x.nodeid.split("::")[-1] for x in passed]
assert len(passed) == len(passed_result)
assert list(passed) == list(passed_result)
@pytest.mark.parametrize(
"spec",
[
(
"foo or import",
"ERROR: Python keyword 'import' not accepted in expressions passed to '-k'",
),
("foo or", "ERROR: Wrong expression passed to '-k': foo or"),
],
)
def test_keyword_option_wrong_arguments(spec, testdir, capsys):
testdir.makepyfile(
"""
def test_func(arg):
pass
"""
)
opt, expected_result = spec
testdir.inline_run("-k", opt)
out = capsys.readouterr().err
assert expected_result in out
def test_parametrized_collected_from_command_line(testdir):
"""Parametrized test not collected if test named specified
in command line issue#649.
"""
py_file = testdir.makepyfile(
"""
import pytest
@pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
def test_func(arg):
pass
"""
)
file_name = os.path.basename(py_file.strpath)
rec = testdir.inline_run(file_name + "::" + "test_func")
rec.assertoutcome(passed=3)
def test_parametrized_collect_with_wrong_args(testdir):
"""Test collect parametrized func with wrong number of args."""
py_file = testdir.makepyfile(
"""
import pytest
@pytest.mark.parametrize('foo, bar', [(1, 2, 3)])
def test_func(foo, bar):
pass
"""
)
result = testdir.runpytest(py_file)
result.stdout.fnmatch_lines(
[
'test_parametrized_collect_with_wrong_args.py::test_func: in "parametrize" the number of names (2):',
" ['foo', 'bar']",
"must be equal to the number of values (3):",
" (1, 2, 3)",
]
)
def test_parametrized_with_kwargs(testdir):
"""Test collect parametrized func with wrong number of args."""
py_file = testdir.makepyfile(
"""
import pytest
@pytest.fixture(params=[1,2])
def a(request):
return request.param
@pytest.mark.parametrize(argnames='b', argvalues=[1, 2])
def test_func(a, b):
pass
"""
)
result = testdir.runpytest(py_file)
assert result.ret == 0
def test_parametrize_iterator(testdir):
"""parametrize should work with generators (#5354)."""
py_file = testdir.makepyfile(
"""\
import pytest
def gen():
yield 1
yield 2
yield 3
@pytest.mark.parametrize('a', gen())
def test(a):
assert a >= 1
"""
)
result = testdir.runpytest(py_file)
assert result.ret == 0
# should not skip any tests
result.stdout.fnmatch_lines(["*3 passed*"])
class TestFunctional:
def test_merging_markers_deep(self, testdir):
# issue 199 - propagate markers into nested classes
p = testdir.makepyfile(
"""
import pytest
class TestA(object):
pytestmark = pytest.mark.a
def test_b(self):
assert True
class TestC(object):
# this one didn't get marked
def test_d(self):
assert True
"""
)
items, rec = testdir.inline_genitems(p)
for item in items:
print(item, item.keywords)
assert [x for x in item.iter_markers() if x.name == "a"]
def test_mark_decorator_subclass_does_not_propagate_to_base(self, testdir):
p = testdir.makepyfile(
"""
import pytest
@pytest.mark.a
class Base(object): pass
@pytest.mark.b
class Test1(Base):
def test_foo(self): pass
class Test2(Base):
def test_bar(self): pass
"""
)
items, rec = testdir.inline_genitems(p)
self.assert_markers(items, test_foo=("a", "b"), test_bar=("a",))
def test_mark_should_not_pass_to_siebling_class(self, testdir):
"""#568"""
p = testdir.makepyfile(
"""
import pytest
class TestBase(object):
def test_foo(self):
pass
@pytest.mark.b
class TestSub(TestBase):
pass
class TestOtherSub(TestBase):
pass
"""
)
items, rec = testdir.inline_genitems(p)
base_item, sub_item, sub_item_other = items
print(items, [x.nodeid for x in items])
# new api seregates
assert not list(base_item.iter_markers(name="b"))
assert not list(sub_item_other.iter_markers(name="b"))
assert list(sub_item.iter_markers(name="b"))
def test_mark_decorator_baseclasses_merged(self, testdir):
p = testdir.makepyfile(
"""
import pytest
@pytest.mark.a
class Base(object): pass
@pytest.mark.b
class Base2(Base): pass
@pytest.mark.c
class Test1(Base2):
def test_foo(self): pass
class Test2(Base2):
@pytest.mark.d
def test_bar(self): pass
"""
)
items, rec = testdir.inline_genitems(p)
self.assert_markers(items, test_foo=("a", "b", "c"), test_bar=("a", "b", "d"))
def test_mark_closest(self, testdir):
p = testdir.makepyfile(
"""
import pytest
@pytest.mark.c(location="class")
class Test:
@pytest.mark.c(location="function")
def test_has_own():
pass
def test_has_inherited():
pass
"""
)
items, rec = testdir.inline_genitems(p)
has_own, has_inherited = items
assert has_own.get_closest_marker("c").kwargs == {"location": "function"}
assert has_inherited.get_closest_marker("c").kwargs == {"location": "class"}
assert has_own.get_closest_marker("missing") is None
def test_mark_with_wrong_marker(self, testdir):
reprec = testdir.inline_runsource(
"""
import pytest
class pytestmark(object):
pass
def test_func():
pass
"""
)
values = reprec.getfailedcollections()
assert len(values) == 1
assert "TypeError" in str(values[0].longrepr)
def test_mark_dynamically_in_funcarg(self, testdir):
testdir.makeconftest(
"""
import pytest
@pytest.fixture
def arg(request):
request.applymarker(pytest.mark.hello)
def pytest_terminal_summary(terminalreporter):
values = terminalreporter.stats['passed']
terminalreporter._tw.line("keyword: %s" % values[0].keywords)
"""
)
testdir.makepyfile(
"""
def test_func(arg):
pass
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines(["keyword: *hello*"])
def test_no_marker_match_on_unmarked_names(self, testdir):
p = testdir.makepyfile(
"""
import pytest
@pytest.mark.shouldmatch
def test_marked():
assert 1
def test_unmarked():
assert 1
"""
)
reprec = testdir.inline_run("-m", "test_unmarked", p)
passed, skipped, failed = reprec.listoutcomes()
assert len(passed) + len(skipped) + len(failed) == 0
dlist = reprec.getcalls("pytest_deselected")
deselected_tests = dlist[0].items
assert len(deselected_tests) == 2
def test_invalid_m_option(self, testdir):
testdir.makepyfile(
"""
def test_a():
pass
"""
)
result = testdir.runpytest("-m bogus/")
result.stdout.fnmatch_lines(
["INTERNALERROR> Marker expression must be valid Python!"]
)
def test_keywords_at_node_level(self, testdir):
testdir.makepyfile(
"""
import pytest
@pytest.fixture(scope="session", autouse=True)
def some(request):
request.keywords["hello"] = 42
assert "world" not in request.keywords
@pytest.fixture(scope="function", autouse=True)
def funcsetup(request):
assert "world" in request.keywords
assert "hello" in request.keywords
@pytest.mark.world
def test_function():
pass
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_keyword_added_for_session(self, testdir):
testdir.makeconftest(
"""
import pytest
def pytest_collection_modifyitems(session):
session.add_marker("mark1")
session.add_marker(pytest.mark.mark2)
session.add_marker(pytest.mark.mark3)
pytest.raises(ValueError, lambda:
session.add_marker(10))
"""
)
testdir.makepyfile(
"""
def test_some(request):
assert "mark1" in request.keywords
assert "mark2" in request.keywords
assert "mark3" in request.keywords
assert 10 not in request.keywords
marker = request.node.get_closest_marker("mark1")
assert marker.name == "mark1"
assert marker.args == ()
assert marker.kwargs == {}
"""
)
reprec = testdir.inline_run("-m", "mark1")
reprec.assertoutcome(passed=1)
def assert_markers(self, items, **expected):
"""assert that given items have expected marker names applied to them.
expected should be a dict of (item name -> seq of expected marker names)
.. note:: this could be moved to ``testdir`` if proven to be useful
to other modules.
"""
items = {x.name: x for x in items}
for name, expected_markers in expected.items():
markers = {m.name for m in items[name].iter_markers()}
assert markers == set(expected_markers)
@pytest.mark.filterwarnings("ignore")
def test_mark_from_parameters(self, testdir):
"""#1540"""
testdir.makepyfile(
"""
import pytest
pytestmark = pytest.mark.skipif(True, reason='skip all')
# skipifs inside fixture params
params = [pytest.mark.skipif(False, reason='dont skip')('parameter')]
@pytest.fixture(params=params)
def parameter(request):
return request.param
def test_1(parameter):
assert True
"""
)
reprec = testdir.inline_run()
reprec.assertoutcome(skipped=1)
class TestKeywordSelection:
def test_select_simple(self, testdir):
file_test = testdir.makepyfile(
"""
def test_one():
assert 0
class TestClass(object):
def test_method_one(self):
assert 42 == 43
"""
)
def check(keyword, name):
reprec = testdir.inline_run("-s", "-k", keyword, file_test)
passed, skipped, failed = reprec.listoutcomes()
assert len(failed) == 1
assert failed[0].nodeid.split("::")[-1] == name
assert len(reprec.getcalls("pytest_deselected")) == 1
for keyword in ["test_one", "est_on"]:
check(keyword, "test_one")
check("TestClass and test", "test_method_one")
@pytest.mark.parametrize(
"keyword",
[
"xxx",
"xxx and test_2",
"TestClass",
"xxx and not test_1",
"TestClass and test_2",
"xxx and TestClass and test_2",
],
)
def test_select_extra_keywords(self, testdir, keyword):
p = testdir.makepyfile(
test_select="""
def test_1():
pass
class TestClass(object):
def test_2(self):
pass
"""
)
testdir.makepyfile(
conftest="""
import pytest
@pytest.hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem(name):
outcome = yield
if name == "TestClass":
item = outcome.get_result()
item.extra_keyword_matches.add("xxx")
"""
)
reprec = testdir.inline_run(p.dirpath(), "-s", "-k", keyword)
print("keyword", repr(keyword))
passed, skipped, failed = reprec.listoutcomes()
assert len(passed) == 1
assert passed[0].nodeid.endswith("test_2")
dlist = reprec.getcalls("pytest_deselected")
assert len(dlist) == 1
assert dlist[0].items[0].name == "test_1"
def test_select_starton(self, testdir):
threepass = testdir.makepyfile(
test_threepass="""
def test_one(): assert 1
def test_two(): assert 1
def test_three(): assert 1
"""
)
reprec = testdir.inline_run("-k", "test_two:", threepass)
passed, skipped, failed = reprec.listoutcomes()
assert len(passed) == 2
assert not failed
dlist = reprec.getcalls("pytest_deselected")
assert len(dlist) == 1
item = dlist[0].items[0]
assert item.name == "test_one"
def test_keyword_extra(self, testdir):
p = testdir.makepyfile(
"""
def test_one():
assert 0
test_one.mykeyword = True
"""
)
reprec = testdir.inline_run("-k", "mykeyword", p)
passed, skipped, failed = reprec.countoutcomes()
assert failed == 1
@pytest.mark.xfail
def test_keyword_extra_dash(self, testdir):
p = testdir.makepyfile(
"""
def test_one():
assert 0
test_one.mykeyword = True
"""
)
# with argparse the argument to an option cannot
# start with '-'
reprec = testdir.inline_run("-k", "-mykeyword", p)
passed, skipped, failed = reprec.countoutcomes()
assert passed + skipped + failed == 0
def test_no_magic_values(self, testdir):
"""Make sure the tests do not match on magic values,
no double underscored values, like '__dict__',
and no instance values, like '()'.
"""
p = testdir.makepyfile(
"""
def test_one(): assert 1
"""
)
def assert_test_is_not_selected(keyword):
reprec = testdir.inline_run("-k", keyword, p)
passed, skipped, failed = reprec.countoutcomes()
dlist = reprec.getcalls("pytest_deselected")
assert passed + skipped + failed == 0
deselected_tests = dlist[0].items
assert len(deselected_tests) == 1
assert_test_is_not_selected("__")
assert_test_is_not_selected("()")
class TestMarkDecorator:
@pytest.mark.parametrize(
"lhs, rhs, expected",
[
(pytest.mark.foo(), pytest.mark.foo(), True),
(pytest.mark.foo(), pytest.mark.bar(), False),
(pytest.mark.foo(), "bar", False),
("foo", pytest.mark.bar(), False),
],
)
def test__eq__(self, lhs, rhs, expected):
assert (lhs == rhs) == expected
@pytest.mark.parametrize("mark", [None, "", "skip", "xfail"])
def test_parameterset_for_parametrize_marks(testdir, mark):
if mark is not None:
testdir.makeini(
"""
[pytest]
{}={}
""".format(
EMPTY_PARAMETERSET_OPTION, mark
)
)
config = testdir.parseconfig()
from _pytest.mark import pytest_configure, get_empty_parameterset_mark
pytest_configure(config)
result_mark = get_empty_parameterset_mark(config, ["a"], all)
if mark in (None, ""):
# normalize to the requested name
mark = "skip"
assert result_mark.name == mark
assert result_mark.kwargs["reason"].startswith("got empty parameter set ")
if mark == "xfail":
assert result_mark.kwargs.get("run") is False
def test_parameterset_for_fail_at_collect(testdir):
testdir.makeini(
"""
[pytest]
{}=fail_at_collect
""".format(
EMPTY_PARAMETERSET_OPTION
)
)
config = testdir.parseconfig()
from _pytest.mark import pytest_configure, get_empty_parameterset_mark
pytest_configure(config)
with pytest.raises(
Collector.CollectError,
match=r"Empty parameter set in 'pytest_configure' at line \d\d+",
):
get_empty_parameterset_mark(config, ["a"], pytest_configure)
p1 = testdir.makepyfile(
"""
import pytest
@pytest.mark.parametrize("empty", [])
def test():
pass
"""
)
result = testdir.runpytest(str(p1))
result.stdout.fnmatch_lines(
[
"collected 0 items / 1 errors",
"* ERROR collecting test_parameterset_for_fail_at_collect.py *",
"Empty parameter set in 'test' at line 3",
"*= 1 error in *",
]
)
assert result.ret == ExitCode.INTERRUPTED
def test_parameterset_for_parametrize_bad_markname(testdir):
with pytest.raises(pytest.UsageError):
test_parameterset_for_parametrize_marks(testdir, "bad")
def test_mark_expressions_no_smear(testdir):
testdir.makepyfile(
"""
import pytest
class BaseTests(object):
def test_something(self):
pass
@pytest.mark.FOO
class TestFooClass(BaseTests):
pass
@pytest.mark.BAR
class TestBarClass(BaseTests):
pass
"""
)
reprec = testdir.inline_run("-m", "FOO")
passed, skipped, failed = reprec.countoutcomes()
dlist = reprec.getcalls("pytest_deselected")
assert passed == 1
assert skipped == failed == 0
deselected_tests = dlist[0].items
assert len(deselected_tests) == 1
# todo: fixed
# keywords smear - expected behaviour
# reprec_keywords = testdir.inline_run("-k", "FOO")
# passed_k, skipped_k, failed_k = reprec_keywords.countoutcomes()
# assert passed_k == 2
# assert skipped_k == failed_k == 0
def test_addmarker_order():
node = Node("Test", config=mock.Mock(), session=mock.Mock(), nodeid="Test")
node.add_marker("foo")
node.add_marker("bar")
node.add_marker("baz", append=False)
extracted = [x.name for x in node.iter_markers()]
assert extracted == ["baz", "foo", "bar"]
@pytest.mark.filterwarnings("ignore")
def test_markers_from_parametrize(testdir):
"""#3605"""
testdir.makepyfile(
"""
import pytest
first_custom_mark = pytest.mark.custom_marker
custom_mark = pytest.mark.custom_mark
@pytest.fixture(autouse=True)
def trigger(request):
custom_mark = list(request.node.iter_markers('custom_mark'))
print("Custom mark %s" % custom_mark)
@custom_mark("custom mark non parametrized")
def test_custom_mark_non_parametrized():
print("Hey from test")
@pytest.mark.parametrize(
"obj_type",
[
first_custom_mark("first custom mark")("template"),
pytest.param( # Think this should be recommended way?
"disk",
marks=custom_mark('custom mark1')
),
custom_mark("custom mark2")("vm"), # Tried also this
]
)
def test_custom_mark_parametrized(obj_type):
print("obj_type is:", obj_type)
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=4)
def test_pytest_param_id_requires_string():
with pytest.raises(TypeError) as excinfo:
pytest.param(id=True)
msg, = excinfo.value.args
assert msg == "Expected id to be a string, got <class 'bool'>: True"
@pytest.mark.parametrize("s", (None, "hello world"))
def test_pytest_param_id_allows_none_or_string(s):
assert pytest.param(id=s)
|
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""Make sure associations in the old model are in the new model."""
for criterion in orm.JudgingCriterion.objects.all():
for phase in criterion.phases.all():
orm.PhaseCriterion.objects.get_or_create(criterion=criterion,
phase=phase)
def backwards(self, orm):
"""Make sure all the objects in the old model reflect the new model.
The weighting factors aren't preserved here, but that's what you get if
you migrate backwards.
"""
for pc in orm.PhaseCriterion.objects.all():
pc.criterion.phases.add(pc.phase)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'challenges.category': {
'Meta': {'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'challenges.challenge': {
'Meta': {'object_name': 'Challenge'},
'allow_voting': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'moderate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'}),
'summary': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'})
},
'challenges.exclusionflag': {
'Meta': {'object_name': 'ExclusionFlag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Submission']"})
},
'challenges.externallink': {
'Meta': {'object_name': 'ExternalLink'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Submission']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255'})
},
'challenges.judgeassignment': {
'Meta': {'unique_together': "(('submission', 'judge'),)", 'object_name': 'JudgeAssignment'},
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'judge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.Profile']"}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Submission']"})
},
'challenges.judgement': {
'Meta': {'unique_together': "(('submission', 'judge'),)", 'object_name': 'Judgement'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'judge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.Profile']"}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Submission']"})
},
'challenges.judginganswer': {
'Meta': {'unique_together': "(('judgement', 'criterion'),)", 'object_name': 'JudgingAnswer'},
'criterion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.JudgingCriterion']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'judgement': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['challenges.Judgement']"}),
'rating': ('django.db.models.fields.IntegerField', [], {})
},
'challenges.judgingcriterion': {
'Meta': {'ordering': "('id',)", 'object_name': 'JudgingCriterion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_value': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'min_value': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'phases': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'judgement_criteria'", 'blank': 'True', 'to': "orm['challenges.Phase']"}),
'question': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'})
},
'challenges.phase': {
'Meta': {'ordering': "('order',)", 'unique_together': "(('challenge', 'name'),)", 'object_name': 'Phase'},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'phases'", 'to': "orm['challenges.Challenge']"}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 8, 23, 10, 40, 9, 664627)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'})
},
'challenges.phasecriterion': {
'Meta': {'unique_together': "(('phase', 'criterion'),)", 'object_name': 'PhaseCriterion'},
'criterion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.JudgingCriterion']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'phase': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Phase']"}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': '10', 'max_digits': '4', 'decimal_places': '2'})
},
'challenges.submission': {
'Meta': {'ordering': "['-id']", 'object_name': 'Submission'},
'brief_description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Category']"}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.Profile']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_winner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'phase': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenges.Phase']"}),
'sketh_note': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'projects.project': {
'Meta': {'object_name': 'Project'},
'allow_participation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_sub_projects': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'featured_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects_following'", 'symmetrical': 'False', 'to': "orm['users.Profile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'long_description': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent_project_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'sub_project_label': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'team_members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['users.Profile']", 'symmetrical': 'False'}),
'topics': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['topics.Topic']", 'symmetrical': 'False'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
},
'topics.topic': {
'Meta': {'object_name': 'Topic'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'draft': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'long_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'users.profile': {
'Meta': {'object_name': 'Profile'},
'avatar': ('django.db.models.fields.files.ImageField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'featured_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['challenges']
|
|
""" This module is generated by transpiling Haxe into Python and cleaning
the resulting code by hand, e.g. removing unused Haxe classes. To try it
yourself, clone https://github.com/hsluv/hsluv and run:
haxe -cp haxe/src hsluv.Hsluv -python hsluv.py
"""
from functools import wraps as _wraps, partial as _partial # unexport, see #17
import math as _math # unexport, see #17
__version__ = '5.0.2'
_m = [[3.240969941904521, -1.537383177570093, -0.498610760293],
[-0.96924363628087, 1.87596750150772, 0.041555057407175],
[0.055630079696993, -0.20397695888897, 1.056971514242878]]
_min_v = [[0.41239079926595, 0.35758433938387, 0.18048078840183],
[0.21263900587151, 0.71516867876775, 0.072192315360733],
[0.019330818715591, 0.11919477979462, 0.95053215224966]]
_ref_y = 1.0
_ref_u = 0.19783000664283
_ref_v = 0.46831999493879
_kappa = 903.2962962
_epsilon = 0.0088564516
def _normalize_output(conversion):
# as in snapshot rev 4, the tolerance should be 1e-11
normalize = _partial(round, ndigits=11-1)
@_wraps(conversion)
def normalized(*args, **kwargs):
color = conversion(*args, **kwargs)
return tuple(normalize(c) for c in color)
return normalized
def _distance_line_from_origin(line):
v = line['slope'] ** 2 + 1
return abs(line['intercept']) / _math.sqrt(v)
def _length_of_ray_until_intersect(theta, line):
return line['intercept']\
/ (_math.sin(theta) - line['slope'] * _math.cos(theta))
def _get_bounds(l):
result = []
sub1 = ((l + 16) ** 3) / 1560896
if sub1 > _epsilon:
sub2 = sub1
else:
sub2 = l / _kappa
_g = 0
while _g < 3:
c = _g
_g += 1
m1 = _m[c][0]
m2 = _m[c][1]
m3 = _m[c][2]
_g1 = 0
while _g1 < 2:
t = _g1
_g1 += 1
top1 = (284517 * m1 - 94839 * m3) * sub2
top2 = (838422 * m3 + 769860 * m2 + 731718 * m1)\
* l * sub2 - (769860 * t) * l
bottom = (632260 * m3 - 126452 * m2) * sub2 + 126452 * t
result.append({'slope': top1 / bottom, 'intercept': top2 / bottom})
return result
def _max_safe_chroma_for_l(l):
return min(_distance_line_from_origin(bound)
for bound in _get_bounds(l))
def _max_chroma_for_lh(l, h):
hrad = _math.radians(h)
lengths = [_length_of_ray_until_intersect(hrad, bound) for bound in _get_bounds(l)]
return min(length for length in lengths if length >= 0)
def _dot_product(a, b):
return sum(i * j for i, j in zip(a, b))
def _from_linear(c):
if c <= 0.0031308:
return 12.92 * c
return 1.055 * _math.pow(c, 5 / 12) - 0.055
def _to_linear(c):
if c > 0.04045:
return _math.pow((c + 0.055) / 1.055, 2.4)
return c / 12.92
def _y_to_l(y):
if y <= _epsilon:
return y / _ref_y * _kappa
return 116 * _math.pow(y / _ref_y, 1 / 3) - 16
def _l_to_y(l):
if l <= 8:
return _ref_y * l / _kappa
return _ref_y * (((l + 16) / 116) ** 3)
def xyz_to_rgb(_hx_tuple):
return (
_from_linear(_dot_product(_m[0], _hx_tuple)),
_from_linear(_dot_product(_m[1], _hx_tuple)),
_from_linear(_dot_product(_m[2], _hx_tuple)))
def rgb_to_xyz(_hx_tuple):
rgbl = (_to_linear(_hx_tuple[0]),
_to_linear(_hx_tuple[1]),
_to_linear(_hx_tuple[2]))
return (_dot_product(_min_v[0], rgbl),
_dot_product(_min_v[1], rgbl),
_dot_product(_min_v[2], rgbl))
def xyz_to_luv(_hx_tuple):
x = float(_hx_tuple[0])
y = float(_hx_tuple[1])
z = float(_hx_tuple[2])
l = _y_to_l(y)
if l == 0:
return (0, 0, 0)
divider = x + 15 * y + 3 * z
if divider == 0:
u = v = float("nan")
return (l, u, v)
var_u = 4 * x / divider
var_v = 9 * y / divider
u = 13 * l * (var_u - _ref_u)
v = 13 * l * (var_v - _ref_v)
return (l, u, v)
def luv_to_xyz(_hx_tuple):
l = float(_hx_tuple[0])
u = float(_hx_tuple[1])
v = float(_hx_tuple[2])
if l == 0:
return (0, 0, 0)
var_u = u / (13 * l) + _ref_u
var_v = v / (13 * l) + _ref_v
y = _l_to_y(l)
x = y * 9 * var_u / (4 * var_v)
z = y * (12 - 3 * var_u - 20 * var_v) / (4 * var_v)
return (x, y, z)
def luv_to_lch(_hx_tuple):
l = float(_hx_tuple[0])
u = float(_hx_tuple[1])
v = float(_hx_tuple[2])
c = _math.hypot(u, v)
if c < 1e-08:
h = 0
else:
hrad = _math.atan2(v, u)
h = _math.degrees(hrad)
if h < 0:
h += 360
return (l, c, h)
def lch_to_luv(_hx_tuple):
l = float(_hx_tuple[0])
c = float(_hx_tuple[1])
h = float(_hx_tuple[2])
hrad = _math.radians(h)
u = _math.cos(hrad) * c
v = _math.sin(hrad) * c
return (l, u, v)
def hsluv_to_lch(_hx_tuple):
h = float(_hx_tuple[0])
s = float(_hx_tuple[1])
l = float(_hx_tuple[2])
if l > 100-1e-7:
return (100, 0, h)
if l < 1e-08:
return (0, 0, h)
_hx_max = _max_chroma_for_lh(l, h)
c = _hx_max / 100 * s
return (l, c, h)
def lch_to_hsluv(_hx_tuple):
l = float(_hx_tuple[0])
c = float(_hx_tuple[1])
h = float(_hx_tuple[2])
if l > 100-1e-7:
return (h, 0, 100)
if l < 1e-08:
return (h, 0, 0)
_hx_max = _max_chroma_for_lh(l, h)
s = c / _hx_max * 100
return (h, s, l)
def hpluv_to_lch(_hx_tuple):
h = float(_hx_tuple[0])
s = float(_hx_tuple[1])
l = float(_hx_tuple[2])
if l > 100-1e-7:
return (100, 0, h)
if l < 1e-08:
return (0, 0, h)
_hx_max = _max_safe_chroma_for_l(l)
c = _hx_max / 100 * s
return (l, c, h)
def lch_to_hpluv(_hx_tuple):
l = float(_hx_tuple[0])
c = float(_hx_tuple[1])
h = float(_hx_tuple[2])
if l > 100-1e-7:
return (h, 0, 100)
if l < 1e-08:
return (h, 0, 0)
_hx_max = _max_safe_chroma_for_l(l)
s = c / _hx_max * 100
return (h, s, l)
def rgb_to_hex(_hx_tuple):
return '#{:02x}{:02x}{:02x}'.format(
int(_math.floor(_hx_tuple[0] * 255 + 0.5)),
int(_math.floor(_hx_tuple[1] * 255 + 0.5)),
int(_math.floor(_hx_tuple[2] * 255 + 0.5)))
def hex_to_rgb(_hex):
# skip leading '#'
r = int(_hex[1:3], base=16) / 255.0
g = int(_hex[3:5], base=16) / 255.0
b = int(_hex[5:7], base=16) / 255.0
return (r, g, b)
def lch_to_rgb(_hx_tuple):
return xyz_to_rgb(luv_to_xyz(lch_to_luv(_hx_tuple)))
def rgb_to_lch(_hx_tuple):
return luv_to_lch(xyz_to_luv(rgb_to_xyz(_hx_tuple)))
def _hsluv_to_rgb(_hx_tuple):
return lch_to_rgb(hsluv_to_lch(_hx_tuple))
hsluv_to_rgb = _normalize_output(_hsluv_to_rgb)
def rgb_to_hsluv(_hx_tuple):
return lch_to_hsluv(rgb_to_lch(_hx_tuple))
def _hpluv_to_rgb(_hx_tuple):
return lch_to_rgb(hpluv_to_lch(_hx_tuple))
hpluv_to_rgb = _normalize_output(_hpluv_to_rgb)
def rgb_to_hpluv(_hx_tuple):
return lch_to_hpluv(rgb_to_lch(_hx_tuple))
def hsluv_to_hex(_hx_tuple):
return rgb_to_hex(hsluv_to_rgb(_hx_tuple))
def hpluv_to_hex(_hx_tuple):
return rgb_to_hex(hpluv_to_rgb(_hx_tuple))
def hex_to_hsluv(s):
return rgb_to_hsluv(hex_to_rgb(s))
def hex_to_hpluv(s):
return rgb_to_hpluv(hex_to_rgb(s))
|
|
#
# Copyright (C) 2011-2018 Red Hat, Inc. (https://github.com/Commonjava/indy)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import re
import yaml
import time
import math
from dateutil import tz
UTC=tz.tzutc()
LOCAL=tz.tzlocal()
START='start'
REFS='refs'
END='end'
FROM='from'
START_RE='start_regexp'
END_RE = 'end_regexp'
START_TIME='start_time'
END_TIME='end_time'
START_LINE='start_line'
END_LINE='end_line'
ELAPSED='elapsed'
CONCURRENCY='concurrent_requests'
CONCURRENCY_FROM = 'concurrent_requests_from'
TIMESTAMP_FORMAT='%Y-%m-%d %H:%M:%S.%f'
ELAPSED_FORMAT='%H:%M:%S'
def timerConfigSample():
sample = []
sample.append({
START: '.+ o\.c\.i\.b\.j\.ResourceManagementFilter - START request: (\S+ \S+) .+',
REFS: '\\1',
END: '.+ o\.c\.i\.b\.j\.ResourceManagementFilter - END request: %(refs)s .+',
})
return sample
def getDatestampFields(line):
return ' '.join(line.split(' ')[0:2])
def parseTime(line):
tstamp = getDatestampFields(line)
return time.mktime(time.strptime(tstamp, TIMESTAMP_FORMAT))
def formatElapsed(seconds):
return time.strftime(ELAPSED_FORMAT, time.gmtime(seconds))
def formatTime(seconds):
return time.strftime(TIMESTAMP_FORMAT[:-3], time.gmtime(seconds))
def findTimings(timer_config, logdir, filename_prefix):
expression = "%s(\.\d+).log" % filename_prefix
basefile = "%s.log" % filename_prefix
end_matchers = []
end_matchers_from = {}
for timing in timer_config:
timing[START_RE] = re.compile(timing[START])
if timing.get(FROM) is not None:
timing[FROM] = re.compile(timing[FROM])
done=[]
lines=0
errors = 0
avgTime=None
sumSqr=0
avgCount=0
minTime=None
maxTime=None
processed = []
firstStart = None
lastEnd = None
maxConcurrent=0
fnames = sorted([fname for fname in os.listdir(logdir) if re.match(expression, fname)], reverse=True)
if os.path.exists(os.path.join(logdir, basefile)):
fnames.append(basefile)
for fname in fnames:
processed.append(fname)
print "Scanning %s (%d current end-matchers in progress. Output contains: %d entries)" % (fname, len(end_matchers), len(done))
with open(os.path.join(logdir, fname)) as f:
for line in f:
line = line.rstrip()
lines = lines+1
found = False
for timing in timer_config:
exp = timing[START_RE]
m = exp.match(line)
if m is not None:
raw_refs = exp.sub(timing[REFS], line)
end_refs = re.escape(raw_refs)
end_exp = timing[END] % {REFS: end_refs}
end_re = re.compile(end_exp)
matcher = {}
matcher[END_RE] = end_re
matcher[REFS] = raw_refs
fromIP = 'unknown'
if timing.get(FROM) is not None:
fromMatch = timing[FROM].search(line)
if fromMatch is not None:
fromIP = fromMatch.group(1)
startTime = parseTime(line)
if firstStart is None:
firstStart = getDatestampFields(line)
matcher[START_TIME] = startTime
matcher[FROM] = fromIP
end_matchers.append(matcher)
matchers = end_matchers_from.get(fromIP) or []
matchers.append(matcher)
end_matchers_from[fromIP] = matchers
concurrent = len(end_matchers)
concurrent_from = len(matchers)
maxConcurrent = concurrent if concurrent > maxConcurrent else maxConcurrent
print "Found new START (%d/%d)." % (concurrent, (concurrent + len(done)))
# print "END will be:\n'%s'\n\n" % end_exp
found = True
break
if found is False:
remove_entry=None
for idx,entry in enumerate(end_matchers):
if entry[END_RE].match(line) is not None:
endTime = parseTime(line)
lastEnd = getDatestampFields(line)
elapsedSeconds=endTime - entry[START_TIME]
if elapsedSeconds < 0:
print "ERROR: Elapsed time is %d (negative) for: %s" % (elapsedSeconds, line)
else:
if avgTime is None:
avgTime = elapsedSeconds
minTime = elapsedSeconds
maxTime = elapsedSeconds
else:
avgTime = (((avgTime * avgCount) + elapsedSeconds) / (avgCount+1))
minTime = elapsedSeconds if minTime > elapsedSeconds else minTime
maxTime = elapsedSeconds if maxTime < elapsedSeconds else maxTime
sumSqr = sumSqr + (elapsedSeconds ** 2 )
avgCount = avgCount+1
entry[START_TIME] = formatTime(entry[START_TIME])
entry[END_TIME] = formatTime(endTime)
entry[ELAPSED] = formatElapsed(elapsedSeconds)
entry[CONCURRENCY] = len(end_matchers)
matchers_from = end_matchers_from[entry[FROM]]
entry[CONCURRENCY_FROM] = len(matchers_from)
entry.pop(START_RE, None)
entry.pop(END_RE, None)
print "Found END (%d/%d)." % (len(end_matchers), (len(end_matchers) + len(done)))
done.append(entry)
remove_entry = entry
break
if remove_entry is not None:
end_matchers.remove(remove_entry)
matchers_from = end_matchers_from[entry[FROM]]
matchers_from.remove(remove_entry)
output={
'_summary': {
'processed_logs': processed,
'span': {
'first_start': firstStart,
'last_end': lastEnd
},
'counts': {
'entries_ended': avgCount,
'lines_processed': lines,
'unmatched_starts': len(end_matchers),
'max_concurrency': maxConcurrent
},
'times': {
'avg': formatElapsed(avgTime),
'max': formatElapsed(maxTime),
'min': formatElapsed(minTime),
'std_dev': formatElapsed(math.sqrt((sumSqr/avgCount) - (avgTime**2)))
}
},
'entries': done
}
return output
|
|
"""
File: psoc_4m_base_class.py
This file is the base class that all modbus 5lp modules use
Variables which can be read as modbus type 3 measages
#define MOD_UNIT_ID 0
#define MOD_UNIT_TEMP 1
#define MOD_RTU_WATCH_DOG_FLAG 2
#define MOD_CONTROLLER_WATCH_DOG_FLAG 3
#define MOD_COMMISSIONING_FLAG 4
#define MOD_POWER_UP_EVENT 5
#define MOD_MINUTE_ROLLOVER 6
#define MOD_DISCRETE_IO_CHANGE 7
#define MOD_YEAR 8
#define MOD_MONTH 9
#define MOD_DAY 10
#define MOD_HOUR 11
#define MOD_MINUTE 12
#define MOD_SECOND 13
Registers which can be changes are through type 16 messages
The only functions which can be changed are the following registers
System level changes
change_time
change_modbus_address
clear_watch_dog_flags
clear_power_up_event
clear_minute_rollover
Python Functions are provided for all common accesses
Register MOD_RESET_REASON has the following bit field definitions
CY_SYS_RESET_WDT - WDT caused a reset
CY_SYS_RESET_PROTFAULT - Occured protection violation that requires reset
CY_SYS_RESET_SW - Cortex-M0 requested a system reset.
/* CySysGetResetReason() */
#define CY_SYS_RESET_WDT_SHIFT (0u)
#define CY_SYS_RESET_PROTFAULT_SHIFT (3u)
#define CY_SYS_RESET_SW_SHIFT (4u)
#define CY_SYS_RESET_WDT ((uint32)1u << CY_SYS_RESET_WDT_SHIFT )
#define CY_SYS_RESET_PROTFAULT ((uint32)1u << CY_SYS_RESET_PROTFAULT_SHIFT)
#define CY_SYS_RESET_SW ((uint32)1u << CY_SYS_RESET_SW_SHIFT )
"""
import datetime
import time
class PSOC_BASE_4M():
def __init__(self,ip,port, modbus_address,instrument, system_id):
self.ip = ip
self.port = port
self.instrument = instrument
self.modbus_address = modbus_address
self.system_id = system_id
self.commission_address = 0xc0
# write address definitions definitions
self.change_time_addr = 20
self.change_modbus_addr = 21
self.clear_watch_dog_flags_addr = 22
self.clear_power_up_event_addr = 23
self.clear_discrete_io_change_addr = 24
self.clear_minute_rollover_addr = 25
self.clear_controller_watch_dog_flags_addr = 26
#System Variables
self.system_var_start = 0
self.system_var_list = [
"MOD_UNIT_ID" , #0
"MOD_RTU_WATCH_DOG_FLAG",#1
"MOD_CONTROLLER_WATCH_DOG_FLAG" ,#2
"MOD_COMMISSIONING_FLAG " ,#3
"MOD_POWER_UP_EVENT", #4
"MOD_RESET_REASON",#5
"MOD_MINUTE_ROLLOVER" #6
]
def set_ip( self, ip ):
self.ip = ip
def set_port( self, port ):
self.port = port
def set_modbus_address( self, address ):
self.modbus_address = address
def set_instrument( self, instrument ):
self.instrument = instrument
def set_system_id( self, system_id ):
self.system_id = system_id
#
#
# Read Variables
#
#
def read_system_variables( self ):
return_value = {}
self.instrument.set_ip( self.ip,self.port )
data = self.instrument.read_registers( self.modbus_address, self.system_var_start ,len(self.system_var_list) , 3 ,False)
for i in range(0,len(self.system_var_list)):
return_value[self.system_var_list[i]] = data[i]
return return_value
def read_time( self ):
self.instrument.set_ip( self.ip,self.port )
return_value = {}
data = self.instrument.read_longs( self.modbus_address, 8, 2, 3, False)
return data[0]
#
#
# Write routines
#
#
def update_current_time(self):
self.instrument.set_ip( self.ip,self.port )
now = int(time.time())
self.instrument.write_longs( self.modbus_address, self.change_time_addr, [int(now),0],functioncode=16, signed=False)
def commission_modbus_address( self, new_address , new_commissioning_address = None ):
if new_commissioning_address == None:
new_commissioning_address = self.commission_address
print new_commissioning_address
self.instrument.set_ip( self.ip,self.port )
self.instrument.write_registers( new_commissioning_address, self.change_modbus_addr , [new_address, new_commissioning_address ] )
def clear_watch_dog_flag(self):
self.instrument.set_ip( self.ip,self.port )
self.instrument.write_registers( self.modbus_address, self.clear_watch_dog_flags_addr, [0] )
def set_controller_watch_dog_flag(self ):
self.instrument.set_ip( self.ip,self.port )
self.instrument.write_registers( self.modbus_address, self.clear_controller_watch_dog_flags_addr, [1] )
def clear_power_on_reset(self):
self.instrument.set_ip( self.ip,self.port )
self.instrument.write_registers(self.modbus_address, self.clear_power_up_event_addr, [0] )
def clear_minute_rollover(self):
self.instrument.set_ip( self.ip,self.port )
self.instrument.write_registers(self.modbus_address, self.clear_minute_rollover_addr, [0] )
def verify_unit(self ):
self.instrument.set_ip( self.ip,self.port )
data = self.instrument.read_registers( self.modbus_address,self.system_var_list.index( "MOD_UNIT_ID"),1)
if data[0] == self.system_id:
return_value = True
else:
return_value = False
return return_value
def process_event_queue( self, fifo_index ):
#returns a list of a list [ event, event_data]
#event id's are system depenedent but 0 is POWER_UP
self.instrument.set_ip( self.ip,self.port )
return self.instrument.read_fifo( self.modbus_address,fifo_index)
def update_flash( self ):
self.instrument.set_ip( self.ip,self.port )
self.instrument.write_registers( self.modbus_address, self.update_flash_addr, [0])
if __name__ == "__main__":
import new_instrument
import time
instrument = new_instrument.Modbus_Instrument()
psoc_4m = PSOC_BASE_4M( "192.168.1.82",5005,40, instrument, 0x201 )
for i in range(0,100):
print i, psoc_4m.read_system_variables()
print psoc_4m.read_system_variables()
print psoc_4m.read_time( )
print psoc_4m.update_current_time()
print psoc_4m.read_time( )
print psoc_4m.clear_watch_dog_flag()
print psoc_4m.read_system_variables()
print psoc_4m.set_controller_watch_dog_flag()
print psoc_4m.read_system_variables()
print psoc_4m.clear_power_on_reset()
print psoc_4m.clear_minute_rollover()
print psoc_4m.verify_unit()
print psoc_4m.read_system_variables()
print psoc_4m.clear_watch_dog_flag()
print psoc_4m.read_system_variables()
print psoc_4m.process_event_queue( 0 )
#psoc_4m.commission_modbus_address()
|
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Relation'
db.create_table('portal_relation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('subject', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['portal.Resource'])),
('object', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['portal.Resource'])),
('type', self.gf('django.db.models.fields.CharField')(max_length=10)),
))
db.send_create_signal('portal', ['Relation'])
def backwards(self, orm):
# Deleting model 'Relation'
db.delete_table('portal_relation')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'portal.authority': {
'Meta': {'object_name': 'Authority', '_ormbases': ['portal.Resource']},
'dates_of_existence': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'functions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'general_context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'institution_responsible_identifier': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'internal_structures': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'legal_status': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'lod': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mandates': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'places': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Resource']", 'unique': 'True', 'primary_key': 'True'}),
'revision_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()', 'db_index': 'True'}),
'sources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type_of_entity': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'portal.collection': {
'Meta': {'object_name': 'Collection', '_ormbases': ['portal.Resource']},
'access_conditions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'accruals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'acquisition': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'alternate_title': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'appraisal': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'archival_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'arrangement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Authority']", 'null': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'extent_and_medium': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'finding_aids': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'institution_responsible_identifier': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'location_of_copies': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'location_of_originals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'lod': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'physical_characteristics': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'related_units_of_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'repository': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Repository']"}),
'reproduction_conditions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Resource']", 'unique': 'True', 'primary_key': 'True'}),
'revision_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scope_and_content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()', 'db_index': 'True'}),
'sources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'portal.contact': {
'Meta': {'object_name': 'Contact'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'contact_person': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'contact_type': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'primary': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'repository': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Repository']"}),
'street_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'portal.fuzzydate': {
'Meta': {'object_name': 'FuzzyDate'},
'circa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'collection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'date_set'", 'to': "orm['portal.Collection']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'precision': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'})
},
'portal.othername': {
'Meta': {'object_name': 'OtherName'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"})
},
'portal.place': {
'Meta': {'object_name': 'Place'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"})
},
'portal.property': {
'Meta': {'object_name': 'Property'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'portal.relation': {
'Meta': {'object_name': 'Relation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['portal.Resource']"}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['portal.Resource']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'portal.repository': {
'Meta': {'object_name': 'Repository', '_ormbases': ['portal.Resource']},
'access_conditions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'buildings': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'collecting_policies': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'dates_of_existence': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disabled_access': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'finding_aids': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'functions': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'general_context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'geocultural_context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'holdings': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'internal_structures': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'legal_status': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'lod': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'logo': ('portal.thumbs.ImageWithThumbsField', [], {'name': "'logo'", 'sizes': '((100, 100), (300, 300))', 'max_length': '100', 'blank': 'True', 'null': 'True'}),
'maintenance_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'mandates': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'opening_times': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'places': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'reproduction_services': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'research_services': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['portal.Resource']", 'unique': 'True', 'primary_key': 'True'}),
'rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()', 'db_index': 'True'}),
'sources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'type_of_entity': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'portal.resource': {
'Meta': {'object_name': 'Resource'},
'created_on': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'portal.resourceimage': {
'Meta': {'object_name': 'ResourceImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('portal.thumbs.ImageWithThumbsField', [], {'max_length': '100', 'name': "'image'", 'sizes': '((100, 100), (300, 300))'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Resource']"})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['portal']
|
|
"""Support for Netatmo Smart thermostats."""
from datetime import timedelta
import logging
from typing import Optional, List
import pyatmo
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA
from homeassistant.components.climate.const import (
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_PRESET_MODE,
DEFAULT_MIN_TEMP,
)
from homeassistant.const import (
TEMP_CELSIUS,
ATTR_TEMPERATURE,
CONF_NAME,
PRECISION_HALVES,
STATE_OFF,
ATTR_BATTERY_LEVEL,
)
from homeassistant.util import Throttle
from .const import DATA_NETATMO_AUTH
_LOGGER = logging.getLogger(__name__)
PRESET_FROST_GUARD = "Frost Guard"
PRESET_SCHEDULE = "Schedule"
PRESET_MANUAL = "Manual"
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
SUPPORT_HVAC = [HVAC_MODE_HEAT, HVAC_MODE_AUTO, HVAC_MODE_OFF]
SUPPORT_PRESET = [PRESET_AWAY, PRESET_BOOST, PRESET_FROST_GUARD, PRESET_SCHEDULE]
STATE_NETATMO_SCHEDULE = "schedule"
STATE_NETATMO_HG = "hg"
STATE_NETATMO_MAX = "max"
STATE_NETATMO_AWAY = PRESET_AWAY
STATE_NETATMO_OFF = STATE_OFF
STATE_NETATMO_MANUAL = "manual"
PRESET_MAP_NETATMO = {
PRESET_FROST_GUARD: STATE_NETATMO_HG,
PRESET_BOOST: STATE_NETATMO_MAX,
PRESET_SCHEDULE: STATE_NETATMO_SCHEDULE,
PRESET_AWAY: STATE_NETATMO_AWAY,
STATE_NETATMO_OFF: STATE_NETATMO_OFF,
}
NETATMO_MAP_PRESET = {
STATE_NETATMO_HG: PRESET_FROST_GUARD,
STATE_NETATMO_MAX: PRESET_BOOST,
STATE_NETATMO_SCHEDULE: PRESET_SCHEDULE,
STATE_NETATMO_AWAY: PRESET_AWAY,
STATE_NETATMO_OFF: STATE_NETATMO_OFF,
STATE_NETATMO_MANUAL: STATE_NETATMO_MANUAL,
}
HVAC_MAP_NETATMO = {
PRESET_SCHEDULE: HVAC_MODE_AUTO,
STATE_NETATMO_HG: HVAC_MODE_AUTO,
PRESET_FROST_GUARD: HVAC_MODE_AUTO,
PRESET_BOOST: HVAC_MODE_HEAT,
STATE_NETATMO_OFF: HVAC_MODE_OFF,
STATE_NETATMO_MANUAL: HVAC_MODE_AUTO,
PRESET_MANUAL: HVAC_MODE_AUTO,
STATE_NETATMO_AWAY: HVAC_MODE_AUTO,
}
CURRENT_HVAC_MAP_NETATMO = {True: CURRENT_HVAC_HEAT, False: CURRENT_HVAC_IDLE}
CONF_HOMES = "homes"
CONF_ROOMS = "rooms"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300)
HOME_CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_ROOMS, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_HOMES): vol.All(cv.ensure_list, [HOME_CONFIG_SCHEMA])}
)
DEFAULT_MAX_TEMP = 30
NA_THERM = "NATherm1"
NA_VALVE = "NRV"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NetAtmo Thermostat."""
homes_conf = config.get(CONF_HOMES)
auth = hass.data[DATA_NETATMO_AUTH]
home_data = HomeData(auth)
try:
home_data.setup()
except pyatmo.NoDevice:
return
home_ids = []
rooms = {}
if homes_conf is not None:
for home_conf in homes_conf:
home = home_conf[CONF_NAME]
home_id = home_data.homedata.gethomeId(home)
if home_conf[CONF_ROOMS] != []:
rooms[home_id] = home_conf[CONF_ROOMS]
home_ids.append(home_id)
else:
home_ids = home_data.get_home_ids()
devices = []
for home_id in home_ids:
_LOGGER.debug("Setting up %s ...", home_id)
try:
room_data = ThermostatData(auth, home_id)
except pyatmo.NoDevice:
continue
for room_id in room_data.get_room_ids():
room_name = room_data.homedata.rooms[home_id][room_id]["name"]
_LOGGER.debug("Setting up %s (%s) ...", room_name, room_id)
if home_id in rooms and room_name not in rooms[home_id]:
_LOGGER.debug("Excluding %s ...", room_name)
continue
_LOGGER.debug("Adding devices for room %s (%s) ...", room_name, room_id)
devices.append(NetatmoThermostat(room_data, room_id))
add_entities(devices, True)
class NetatmoThermostat(ClimateDevice):
"""Representation a Netatmo thermostat."""
def __init__(self, data, room_id):
"""Initialize the sensor."""
self._data = data
self._state = None
self._room_id = room_id
self._room_name = self._data.homedata.rooms[self._data.home_id][room_id]["name"]
self._name = f"netatmo_{self._room_name}"
self._current_temperature = None
self._target_temperature = None
self._preset = None
self._away = None
self._operation_list = [HVAC_MODE_AUTO, HVAC_MODE_HEAT]
self._support_flags = SUPPORT_FLAGS
self._hvac_mode = None
self._battery_level = None
self.update_without_throttle = False
self._module_type = self._data.room_status.get(room_id, {}).get("module_type")
if self._module_type == NA_THERM:
self._operation_list.append(HVAC_MODE_OFF)
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def name(self):
"""Return the name of the thermostat."""
return self._name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def target_temperature_step(self) -> Optional[float]:
"""Return the supported step of target temperature."""
return PRECISION_HALVES
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode."""
return self._hvac_mode
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes."""
return self._operation_list
@property
def hvac_action(self) -> Optional[str]:
"""Return the current running hvac operation if supported."""
if self._module_type == NA_THERM:
return CURRENT_HVAC_MAP_NETATMO[self._data.boilerstatus]
# Maybe it is a valve
if self._room_id in self._data.room_status:
if (
self._data.room_status[self._room_id].get("heating_power_request", 0)
> 0
):
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
def set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target hvac mode."""
mode = None
if hvac_mode == HVAC_MODE_OFF:
mode = STATE_NETATMO_OFF
elif hvac_mode == HVAC_MODE_AUTO:
mode = PRESET_SCHEDULE
elif hvac_mode == HVAC_MODE_HEAT:
mode = PRESET_BOOST
self.set_preset_mode(mode)
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if self.target_temperature == 0:
self._data.homestatus.setroomThermpoint(
self._data.home_id,
self._room_id,
STATE_NETATMO_MANUAL,
DEFAULT_MIN_TEMP,
)
if (
preset_mode in [PRESET_BOOST, STATE_NETATMO_MAX]
and self._module_type == NA_VALVE
):
self._data.homestatus.setroomThermpoint(
self._data.home_id,
self._room_id,
STATE_NETATMO_MANUAL,
DEFAULT_MAX_TEMP,
)
elif preset_mode in [PRESET_BOOST, STATE_NETATMO_MAX, STATE_NETATMO_OFF]:
self._data.homestatus.setroomThermpoint(
self._data.home_id, self._room_id, PRESET_MAP_NETATMO[preset_mode]
)
elif preset_mode in [PRESET_SCHEDULE, PRESET_FROST_GUARD, PRESET_AWAY]:
self._data.homestatus.setThermmode(
self._data.home_id, PRESET_MAP_NETATMO[preset_mode]
)
else:
_LOGGER.error("Preset mode '%s' not available", preset_mode)
self.update_without_throttle = True
self.schedule_update_ha_state()
@property
def preset_mode(self) -> Optional[str]:
"""Return the current preset mode, e.g., home, away, temp."""
return self._preset
@property
def preset_modes(self) -> Optional[List[str]]:
"""Return a list of available preset modes."""
return SUPPORT_PRESET
def set_temperature(self, **kwargs):
"""Set new target temperature for 2 hours."""
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is None:
return
self._data.homestatus.setroomThermpoint(
self._data.home_id, self._room_id, STATE_NETATMO_MANUAL, temp
)
self.update_without_throttle = True
self.schedule_update_ha_state()
@property
def device_state_attributes(self):
"""Return the state attributes of the thermostat."""
attr = {}
if self._battery_level is not None:
attr[ATTR_BATTERY_LEVEL] = self._battery_level
return attr
def update(self):
"""Get the latest data from NetAtmo API and updates the states."""
try:
if self.update_without_throttle:
self._data.update(no_throttle=True)
self.update_without_throttle = False
else:
self._data.update()
except AttributeError:
_LOGGER.error("NetatmoThermostat::update() got exception")
return
try:
if self._module_type is None:
self._module_type = self._data.room_status[self._room_id]["module_type"]
self._current_temperature = self._data.room_status[self._room_id][
"current_temperature"
]
self._target_temperature = self._data.room_status[self._room_id][
"target_temperature"
]
self._preset = NETATMO_MAP_PRESET[
self._data.room_status[self._room_id]["setpoint_mode"]
]
self._hvac_mode = HVAC_MAP_NETATMO[self._preset]
self._battery_level = self._data.room_status[self._room_id].get(
"battery_level"
)
except KeyError as err:
_LOGGER.error(
"The thermostat in room %s seems to be out of reach. (%s)",
self._room_id,
err,
)
self._away = self._hvac_mode == HVAC_MAP_NETATMO[STATE_NETATMO_AWAY]
class HomeData:
"""Representation Netatmo homes."""
def __init__(self, auth, home=None):
"""Initialize the HomeData object."""
self.auth = auth
self.homedata = None
self.home_ids = []
self.home_names = []
self.room_names = []
self.schedules = []
self.home = home
self.home_id = None
def get_home_ids(self):
"""Get all the home ids returned by NetAtmo API."""
if self.homedata is None:
return []
for home_id in self.homedata.homes:
if (
"therm_schedules" in self.homedata.homes[home_id]
and "modules" in self.homedata.homes[home_id]
):
self.home_ids.append(self.homedata.homes[home_id]["id"])
return self.home_ids
def setup(self):
"""Retrieve HomeData by NetAtmo API."""
try:
self.homedata = pyatmo.HomeData(self.auth)
self.home_id = self.homedata.gethomeId(self.home)
except TypeError:
_LOGGER.error("Error when getting home data")
except AttributeError:
_LOGGER.error("No default_home in HomeData")
except pyatmo.NoDevice:
_LOGGER.debug("No thermostat devices available")
except pyatmo.InvalidHome:
_LOGGER.debug("Invalid home %s", self.home)
class ThermostatData:
"""Get the latest data from Netatmo."""
def __init__(self, auth, home_id=None):
"""Initialize the data object."""
self.auth = auth
self.homedata = None
self.homestatus = None
self.room_ids = []
self.room_status = {}
self.schedules = []
self.home_id = home_id
self.home_name = None
self.away_temperature = None
self.hg_temperature = None
self.boilerstatus = None
self.setpoint_duration = None
def get_room_ids(self):
"""Return all module available on the API as a list."""
if not self.setup():
return []
for room in self.homestatus.rooms:
self.room_ids.append(room)
return self.room_ids
def setup(self):
"""Retrieve HomeData and HomeStatus by NetAtmo API."""
try:
self.homedata = pyatmo.HomeData(self.auth)
self.homestatus = pyatmo.HomeStatus(self.auth, home_id=self.home_id)
self.home_name = self.homedata.getHomeName(self.home_id)
self.update()
except TypeError:
_LOGGER.error("ThermostatData::setup() got error")
return False
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Call the NetAtmo API to update the data."""
try:
self.homestatus = pyatmo.HomeStatus(self.auth, home_id=self.home_id)
except TypeError:
_LOGGER.error("Error when getting homestatus")
return
except requests.exceptions.Timeout:
_LOGGER.warning("Timed out when connecting to Netatmo server")
return
_LOGGER.debug("Following is the debugging output for homestatus:")
_LOGGER.debug(self.homestatus.rawData)
for room in self.homestatus.rooms:
try:
roomstatus = {}
homestatus_room = self.homestatus.rooms[room]
homedata_room = self.homedata.rooms[self.home_id][room]
roomstatus["roomID"] = homestatus_room["id"]
if homestatus_room["reachable"]:
roomstatus["roomname"] = homedata_room["name"]
roomstatus["target_temperature"] = homestatus_room[
"therm_setpoint_temperature"
]
roomstatus["setpoint_mode"] = homestatus_room["therm_setpoint_mode"]
roomstatus["current_temperature"] = homestatus_room[
"therm_measured_temperature"
]
roomstatus["module_type"] = self.homestatus.thermostatType(
home_id=self.home_id, rid=room, home=self.home_name
)
roomstatus["module_id"] = None
roomstatus["heating_status"] = None
roomstatus["heating_power_request"] = None
batterylevel = None
for module_id in homedata_room["module_ids"]:
if (
self.homedata.modules[self.home_id][module_id]["type"]
== NA_THERM
or roomstatus["module_id"] is None
):
roomstatus["module_id"] = module_id
if roomstatus["module_type"] == NA_THERM:
self.boilerstatus = self.homestatus.boilerStatus(
rid=roomstatus["module_id"]
)
roomstatus["heating_status"] = self.boilerstatus
batterylevel = self.homestatus.thermostats[
roomstatus["module_id"]
].get("battery_level")
elif roomstatus["module_type"] == NA_VALVE:
roomstatus["heating_power_request"] = homestatus_room[
"heating_power_request"
]
roomstatus["heating_status"] = (
roomstatus["heating_power_request"] > 0
)
if self.boilerstatus is not None:
roomstatus["heating_status"] = (
self.boilerstatus and roomstatus["heating_status"]
)
batterylevel = self.homestatus.valves[
roomstatus["module_id"]
].get("battery_level")
if batterylevel:
batterypct = interpolate(
batterylevel, roomstatus["module_type"]
)
if roomstatus.get("battery_level") is None:
roomstatus["battery_level"] = batterypct
elif batterypct < roomstatus["battery_level"]:
roomstatus["battery_level"] = batterypct
self.room_status[room] = roomstatus
except KeyError as err:
_LOGGER.error("Update of room %s failed. Error: %s", room, err)
self.away_temperature = self.homestatus.getAwaytemp(home_id=self.home_id)
self.hg_temperature = self.homestatus.getHgtemp(home_id=self.home_id)
self.setpoint_duration = self.homedata.setpoint_duration[self.home_id]
def interpolate(batterylevel, module_type):
"""Interpolate battery level depending on device type."""
na_battery_levels = {
NA_THERM: {
"full": 4100,
"high": 3600,
"medium": 3300,
"low": 3000,
"empty": 2800,
},
NA_VALVE: {
"full": 3200,
"high": 2700,
"medium": 2400,
"low": 2200,
"empty": 2200,
},
}
levels = sorted(na_battery_levels[module_type].values())
steps = [20, 50, 80, 100]
na_battery_level = na_battery_levels[module_type]
if batterylevel >= na_battery_level["full"]:
return 100
if batterylevel >= na_battery_level["high"]:
i = 3
elif batterylevel >= na_battery_level["medium"]:
i = 2
elif batterylevel >= na_battery_level["low"]:
i = 1
else:
return 0
pct = steps[i - 1] + (
(steps[i] - steps[i - 1])
* (batterylevel - levels[i])
/ (levels[i + 1] - levels[i])
)
return int(pct)
|
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import operator
import re
from oslo_utils import units
import six
import datetime
from django.template.defaultfilters import filesizeformat # noqa
from django.utils.text import normalize_newlines # noqa
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from django.views.decorators.debug import sensitive_variables # noqa
from horizon import exceptions
from horizon import messages
from horizon import forms
from horizon.utils import functions
from horizon.utils import memoized
from horizon.utils import validators
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import nova
from openstack_dashboard.usage import quotas
from openstack_dashboard.dashboards.admin.images \
import utils as image_utils
from openstack_dashboard.dashboards.admin.vgpu \
import utils as instance_utils
LOG = logging.getLogger(__name__)
class SelectProjectUserAction(workflows.Action):
project_id = forms.ThemableChoiceField(label=_("Project"))
user_id = forms.ThemableChoiceField(label=_("User"))
def __init__(self, request, *args, **kwargs):
super(SelectProjectUserAction, self).__init__(request, *args, **kwargs)
# Set our project choices
projects = [(tenant.id, tenant.name)
for tenant in request.user.authorized_tenants]
self.fields['project_id'].choices = projects
# Set our user options
users = [(request.user.id, request.user.username)]
self.fields['user_id'].choices = users
class Meta(object):
name = _("Project & User")
# Unusable permission so this is always hidden. However, we
# keep this step in the workflow for validation/verification purposes.
permissions = ("!",)
class SelectProjectUser(workflows.Step):
action_class = SelectProjectUserAction
contributes = ("project_id", "user_id")
class TerminalControlAction(workflows.Action):
TIME_ZONE_CHOICES = (
("utc", _("UTC")),
("localtime", _("Localtime")))
time_zone = forms.ChoiceField(label=_("Instance Time Zone"),
required=False,
initial="localtime",
choices=TIME_ZONE_CHOICES,
help_text=_("Choose Instance Time Zone"))
screen = forms.ChoiceField(label=_("Instance Screen"),
initial=1,
required=False,
choices=[(1, _('1')),
(2, _('2')),
(4, _('4'))])
image_format = forms.ChoiceField(label =_("Disk Mode"),
initial="",
required=False,
choices=[("", _("Thin Provision")),("raw", _("RAW"))],
help_text=_("The default disk mode of virtual machine is thin provision,"
"if you want to create a real-mode,please change the drop-down box."))
disk_type = forms.ChoiceField(label=_("Disk Interface"),
initial="virtio",
required=False,
choices=[("ide", _('Ide')),
("virtio", _('Virtio'))])
usb_control = forms.BooleanField(label=_("USB"),
required=False,
initial=True)
clipboard_control = forms.BooleanField(label=_("Whether to allow access Clipboard"),
required=False,
initial=True)
quatity_control = forms.ChoiceField(label=_("Quatity"),
initial="low",
required=False,
choices=[("low", _('Low')),
("high", _('High'))])
jostle = forms.ChoiceField(label=_("Desktop Mode"),
required=False,
initial="shared",
help_text=_("Choose a desktop model"),
choices=[("single", _('Desktop Not snatch')),
("shared", _('Desktop snatch'))])
multi_user= forms.ChoiceField(label=_('Multi user connection'),
required=False,
initial = "off",
choices=[("on", _("Yes")),("off",_("No"))])
broadcast = forms.BooleanField(label=_("Allow Screen Broadcast"),
required=False,
initial = False)
shutdown_restore = forms.ChoiceField(label=_('Shutdown Restore'),
required=False,
initial = "no",
choices=[("yes", _("Yes")),("no",_("No"))])
during = forms.ChoiceField(label=_("Persistent Mode"),
initial="immobilization",
required=False,
help_text=_("Choose a persistent mode"),
choices=[("immobilization", _('Permanent')),
("variable", _('Non Durable'))])
per = forms.ChoiceField(label=_("Per"),
required=False)
month_id = forms.CharField(label=_("Month"),
required=False,
max_length=255)
day_id = forms.ChoiceField(label=_("Day"),
required=False)
week_id = forms.ChoiceField(label=_("Week"),
required=False)
class Meta:
name = _("TerminalControl")
help_text_template= ("admin/vgpu/_launch_instance_terminal_help.html")
def __init__(self, request, context, *args, **kwargs):
self.request = request
self.context = context
super(TerminalControlAction, self).__init__(
request, context, *args, **kwargs)
per_choices= [('None', _("Select source")),
("day_id", _('Day')),
("week_id", _('Week')),
("month_id", _('Month'))]
day_id_choices = [('1:00', _("1:00")),
("2:00", _('2:00')),
("3:00", _('3:00')),
("4:00", _('4:00')),
("5:00", _('5:00'))]
week_id_choices = [('monday', _("Monday")),
("tuesday", _('Tuesday')),
("wednesday", _('Wednesday')),
("thursday", _('Thursday')),
("friday", _('Friday')),
("saturday", _('Saturday')),
("sunday", _('Sunday'))]
self.fields['per'].choices = per_choices
self.fields['day_id'].choices = day_id_choices
self.fields['week_id'].choices = week_id_choices
def clean(self):
cleaned_data = super(TerminalControlAction, self).clean()
month_id = cleaned_data.get('month_id', 1)
p = re.compile('^([1-9]|[12]\d|3[01])$',re.S)
if month_id:
if p.match(month_id):
pass
else:
error_message = _('Set the maximum reduction date than the date of each month, please reset')
raise forms.ValidationError(error_message)
return cleaned_data
class TerminalControl(workflows.Step):
action_class = TerminalControlAction
contributes = ("time_zone","screen","image_format", "disk_type",
"usb_control","clipboard_control", "quatity_control",
"shutdown_restore","multi_user", "broadcast",
"jostle", "during","spice_secure","per","dev_time",
"day_id","week_id","month_id",)
def contribute(self, data, context):
if data:
param = dict()
param['time_zone']=data.get("time_zone", None)
param['screen']=data.get("screen", None)
param['image_format']=data.get("image_format", None)
param['disk_type']=data.get("disk_type", None)
param['usb_control']=data.get("usb_control", None)
param['clipboard_control']=data.get("clipboard_control", None)
if data.get("quatity_control") == "low":
param['quatity_control'] = True
else:
param['quatity_control'] = False
param['shutdown_restore']=data.get("shutdown_restore", None)
param['multi_user']=data.get("multi_user", None)
allow_screen_broadcast = data.get("broadcast", "")
if param['multi_user'] != "on":
allow_screen_broadcast = False
param['allow_screen_broadcast']=allow_screen_broadcast
control = data.get("jostle","")
if control == "single":
param['jostle'] = True
else:
param['jostle'] = False
param['during']=data.get("during", None)
param['spice_secure']=False
param['per']=data.get("per", "")
param['dev_time']=None
if param['shutdown_restore'] == "yes":
param['during'] = "immobilization"
if param['during'] == "immobilization":
param['per']=None
a = param['per']
if a == None:
param['dev_time'] = None
else:
if a == "day_id":
param['dev_time'] = None
elif a == "week_id":
param['dev_time'] = data.get("week_id", "")
elif a == "month_id":
param['dev_time'] = data.get("month_id", "")
if param['dev_time'] == "":
param['dev_time'] = 1
context['terminal']=[param]
return context
class SetInstanceDetailsAction(workflows.Action):
availability_zone = forms.ThemableChoiceField(label=_("Availability Zone"),
required=False)
name = forms.CharField(label=_("Instance Name"),
required=False,
max_length=255)
pool = forms.ChoiceField(label=_("Pool"),
required=False,
help_text=_("Choose a pool for the instance to launch."))
flavor = forms.ThemableChoiceField(label=_("Flavor"),
help_text=_("Size of image to launch."))
count = forms.IntegerField(label=_("Number of Instances"),
min_value=1,
initial=1)
source_type = forms.ThemableChoiceField(
label=_("Instance Boot Source"),
help_text=_("Choose Your Boot Source "
"Type."))
instance_snapshot_id = forms.ThemableChoiceField(
label=_("Instance Snapshot"),
required=False)
volume_id = forms.ThemableChoiceField(label=_("Volume"), required=False)
volume_snapshot_id = forms.ThemableChoiceField(label=_("Volume Snapshot"),
required=False)
image_id = forms.ChoiceField(
label=_("Image Name"),
required=False,
widget=forms.ThemableSelectWidget(
data_attrs=('volume_size',),
transform=lambda x: ("%s (%s)" % (x.name,
filesizeformat(x.bytes)))))
image_or_snapshot = forms.ChoiceField(
label=_("Image Snapshot"),
required=False,
widget=forms.SelectWidget(
data_attrs=('volume_size',),
transform=lambda x: ("%s (%s)" % (x.name,
filesizeformat(x.bytes)))))
templet_id = forms.ChoiceField(label=_("templet Name"),
required=False,
widget=forms.SelectWidget(
data_attrs=('volume_size',),
transform=lambda x: ("%s (%s)" % (x.name,
filesizeformat(x.bytes)))))
custom_id = forms.ChoiceField(label=_("Custom"), required=False)
volume_size = forms.IntegerField(label=_("Device size (GB)"),
initial=1,
min_value=0,
required=False,
help_text=_("Volume size in gigabytes "
"(integer value)."))
device_name = forms.CharField(label=_("Device Name"),
required=False,
initial="vda",
help_text=_("Volume mount point (e.g. 'vda' "
"mounts at '/dev/vda'). Leave "
"this field blank to let the "
"system choose a device name "
"for you."))
vol_delete_on_instance_delete = forms.BooleanField(
label=_("Delete Volume on Instance Delete"),
initial=False,
required=False,
help_text=_("Delete volume when the instance is deleted"))
class Meta(object):
name = _("Details")
help_text_template = ("admin/vgpu/"
"_launch_details_help.html")
def __init__(self, request, context, *args, **kwargs):
self._init_images_cache()
self.request = request
self.context = context
super(SetInstanceDetailsAction, self).__init__(
request, context, *args, **kwargs)
# Hide the device field if the hypervisor doesn't support it.
if not nova.can_set_mount_point():
self.fields['device_name'].widget = forms.widgets.HiddenInput()
source_type_choices = [
('', _("Select source")),
("image_id", _("Boot from image")),
("templet_id",_("Boot from templet")),
#("custom_id", _("Boot from custom")),
("instance_snapshot_id", _("Boot from snapshot")),
]
if cinder.is_volume_service_enabled(request):
source_type_choices.append(("volume_id", _("Boot from volume")))
try:
if api.nova.extension_supported("BlockDeviceMappingV2Boot",
request):
source_type_choices.append(
("volume_image_id",
_("Boot from image (creates a new volume)")))
except Exception:
exceptions.handle(request, _('Unable to retrieve extensions '
'information.'))
source_type_choices.append(
("volume_snapshot_id",
_("Boot from volume snapshot (creates a new volume)")))
self.fields['source_type'].choices = source_type_choices
@memoized.memoized_method
def _get_flavor(self, flavor_id):
try:
# We want to retrieve details for a given flavor,
# however flavor_list uses a memoized decorator
# so it is used instead of flavor_get to reduce the number
# of API calls.
flavors = instance_utils.flavor_list(self.request)
flavor = [x for x in flavors if x.id == flavor_id][0]
except IndexError:
flavor = None
return flavor
@memoized.memoized_method
def _get_image(self, image_id):
try:
# We want to retrieve details for a given image,
# however get_available_images uses a cache of image list,
# so it is used instead of image_get to reduce the number
# of API calls.
images = image_utils.get_available_images(
self.request,
self.context.get('project_id'),
self._images_cache)
image = [x for x in images if x.id == image_id][0]
except IndexError:
image = None
return image
def _check_licence(self, cleaned_data):
try:
licence = api.nova.get_licence(self.request)
decoded_string = api.authcode.AuthCode.code_init(licence)
probation = getattr(licence, 'probation', False)
if not probation:
startTime = datetime.datetime.strptime(\
decoded_string['time'],'%Y-%m-%dT%H:%M:%S.%f')
rtf = datetime.datetime.strptime(licence.starttime,\
'%Y-%m-%dT%H:%M:%S.%f')
endTime = datetime.timedelta(days=decoded_string['during'])
runTime = datetime.datetime(rtf.year, rtf.month, rtf.day)
ef = (startTime + endTime)
expiretime =datetime.datetime(ef.year, ef.month, ef.day)
num = decoded_string['num']
used = licence.used
if used >= num or runTime >= expiretime or licence.disabled is True:
msg = _("Sequence number has %(expiretime)s expired or exceeds\
the maximum number %(num)s of virtual machine")
params = {'expiretime':expiretime, 'num':num }
raise forms.ValidationError(msg % params)
cleaned_data.update({'num':num})
except Exception:
raise forms.ValidationError(_("License Expired"))
def _check_quotas(self, cleaned_data):
pool_id = cleaned_data.get('pool',None)
count = cleaned_data.get('count', 1)
# Prevent launching more instances than the quota allows
usages = quotas.tenant_quota_usages(self.request, pool_id)
#LOG.info("usages ============================%s" % usages)
available_count = usages['instances']['available']
if available_count < count:
error_message = ungettext_lazy(
'The requested instance cannot be launched as you only '
'have %(avail)i of your quota available. ',
'The requested %(req)i instances cannot be launched as you '
'only have %(avail)i of your quota available.',
count)
params = {'req': count,
'avail': available_count}
raise forms.ValidationError(error_message % params)
source_type = cleaned_data.get('source_type')
if source_type in ('volume_image_id', 'volume_snapshot_id'):
available_volume = usages['volumes']['available']
if available_volume < count:
msg = (_('The requested instance cannot be launched. '
'Requested volume exceeds quota: Available: '
'%(avail)s, Requested: %(req)s.')
% {'avail': available_volume, 'req': count})
raise forms.ValidationError(msg)
flavor_id = cleaned_data.get('flavor')
flavor = self._get_flavor(flavor_id)
count_error = []
# Validate cores and ram.
available_cores = usages['cores']['available']
if flavor and available_cores < count * flavor.vcpus:
count_error.append(_("Cores(Available: %(avail)s, "
"Requested: %(req)s)")
% {'avail': available_cores,
'req': count * flavor.vcpus})
available_ram = usages['ram']['available']
if flavor and available_ram < count * flavor.ram:
count_error.append(_("RAM(Available: %(avail)s, "
"Requested: %(req)s)")
% {'avail': available_ram,
'req': count * flavor.ram})
if count_error:
value_str = ", ".join(count_error)
msg = (_('The requested instance cannot be launched. '
'The following requested resource(s) exceed '
'quota(s): %s.') % value_str)
if count == 1:
self._errors['flavor'] = self.error_class([msg])
else:
self._errors['count'] = self.error_class([msg])
def _check_flavor_for_image(self, cleaned_data):
# Prevents trying to launch an image needing more resources.
image_id = cleaned_data.get('image_id')
image = self._get_image(image_id)
flavor_id = cleaned_data.get('flavor')
flavor = self._get_flavor(flavor_id)
if not image or not flavor:
return
props_mapping = (("min_ram", "ram"), ("min_disk", "disk"))
for iprop, fprop in props_mapping:
if getattr(image, iprop) > 0 and \
getattr(image, iprop) > getattr(flavor, fprop):
msg = (_("The flavor '%(flavor)s' is too small "
"for requested image.\n"
"Minimum requirements: "
"%(min_ram)s MB of RAM and "
"%(min_disk)s GB of Root Disk.") %
{'flavor': flavor.name,
'min_ram': image.min_ram,
'min_disk': image.min_disk})
self._errors['image_id'] = self.error_class([msg])
break # Not necessary to continue the tests.
def _check_volume_for_image(self, cleaned_data):
image_id = cleaned_data.get('image_id')
image = self._get_image(image_id)
volume_size = cleaned_data.get('volume_size')
if not image or not volume_size:
return
volume_size = int(volume_size)
img_gigs = functions.bytes_to_gigabytes(image.size)
smallest_size = max(img_gigs, image.min_disk)
if volume_size < smallest_size:
msg = (_("The Volume size is too small for the"
" '%(image_name)s' image and has to be"
" greater than or equal to "
"'%(smallest_size)d' GB.") %
{'image_name': image.name,
'smallest_size': smallest_size})
self._errors['volume_size'] = self.error_class([msg])
def _check_source_image(self, cleaned_data):
if not cleaned_data.get('image_id'):
msg = _("You must select an image.")
self._errors['image_id'] = self.error_class([msg])
else:
self._check_flavor_for_image(cleaned_data)
def _check_source_templet(self, cleaned_data):
if not cleaned_data.get('templet_id'):
msg =_("You must select an templet")
self._errors['templet_id'] = self.error_class([msg])
elif not cleaned_data.get('image_or_snapshot'):
msg =_("You must select an image or snapshot")
self._errors['image_or_snapshot'] = self.error_class([msg])
else:
templet_id = cleaned_data.get('templet_id', None)
try:
image_templet = api.glance.image_get(self.request, templet_id)
templet = api.glance.templet_download(self.request, image_templet, True)
users = api.keystone.user_list(self.request)
count =0
for info in templet:
if info['count']:
count+=int(info['count'], 10)
else:
count+=1
if count >= cleaned_data.get('num', 0):
msg =_("The largest number is greater than the \
authorized permission to create instance")
self._errors['templet_id'] = self.error_class([msg])
cleaned_data.update({'templet':templet, 'users':users})
except Exception:
msg =_("Batch to create Excel file(%s) content format is not correct" % templet_id)
self._errors['templet_id'] = self.error_class([msg])
#LOG.info("_check_source_templet ===============%s" % cleaned_data)
def _check_source_volume_image(self, cleaned_data):
volume_size = self.data.get('volume_size', None)
if not volume_size:
msg = _("You must set volume size")
self._errors['volume_size'] = self.error_class([msg])
if float(volume_size) <= 0:
msg = _("Volume size must be greater than 0")
self._errors['volume_size'] = self.error_class([msg])
if not cleaned_data.get('image_id'):
msg = _("You must select an image.")
self._errors['image_id'] = self.error_class([msg])
return
else:
self._check_flavor_for_image(cleaned_data)
self._check_volume_for_image(cleaned_data)
def _check_source_instance_snapshot(self, cleaned_data):
# using the array form of get blows up with KeyError
# if instance_snapshot_id is nil
if not cleaned_data.get('instance_snapshot_id'):
msg = _("You must select a snapshot.")
self._errors['instance_snapshot_id'] = self.error_class([msg])
def _check_source_volume(self, cleaned_data):
if not cleaned_data.get('volume_id'):
msg = _("You must select a volume.")
self._errors['volume_id'] = self.error_class([msg])
# Prevent launching multiple instances with the same volume.
# TODO(gabriel): is it safe to launch multiple instances with
# a snapshot since it should be cloned to new volumes?
count = cleaned_data.get('count', 1)
if count > 1:
msg = _('Launching multiple instances is only supported for '
'images and instance snapshots.')
raise forms.ValidationError(msg)
def _check_source_volume_snapshot(self, cleaned_data):
if not cleaned_data.get('volume_snapshot_id'):
msg = _("You must select a snapshot.")
self._errors['volume_snapshot_id'] = self.error_class([msg])
def _check_source(self, cleaned_data):
# Validate our instance source.
source_type = self.data.get('source_type', None)
source_check_methods = {
'templet_id': self._check_source_templet,
'image_id': self._check_source_image,
'volume_image_id': self._check_source_volume_image,
'instance_snapshot_id': self._check_source_instance_snapshot,
'volume_id': self._check_source_volume,
'volume_snapshot_id': self._check_source_volume_snapshot
}
check_method = source_check_methods.get(source_type)
if check_method:
check_method(cleaned_data)
def clean(self):
cleaned_data = super(SetInstanceDetailsAction, self).clean()
self._check_licence(cleaned_data)
LOG.info("clean =========================%s" % cleaned_data)
self._check_quotas(cleaned_data)
self._check_source(cleaned_data)
return cleaned_data
def populate_templet_id_choices(self, request, context):
choices = []
images = image_utils.get_filter_properties(request)
#images = image_utils.get_available_images(request,
# context.get('project_id'),
# self._images_cache)
for image in images:
image.bytes = image.size
image.volume_size = max(
image.min_disk, functions.bytes_to_gigabytes(image.bytes))
if image.properties.get('image_type'):
if image.properties.get('image_type') != 'snapshot':
choices.append((image.id, image))
if context.get('image_id') == image.id and \
'volume_size' not in context:
context['volume_size'] = image.volume_size
if choices:
choices.sort(key=lambda c: c[1].name)
else:
choices.insert(0, ("", _("No templet available")))
return choices
def populate_image_or_snapshot_choices(self, request, context):
choices = []
images = image_utils.get_available_images(request,
context.get('project_id'),
self._images_cache)
for image in images:
image.bytes = image.size
image.volume_size = max(
image.min_disk, functions.bytes_to_gigabytes(image.bytes))
if image.properties.get('image_type'):
if image.properties.get('image_type') == 'snapshot':
choices.append((image.id, image))
if not image.properties.get('image_type'):
choices.append((image.id, image))
if context.get('image_id') == image.id and \
'volume_size' not in context:
context['volume_size'] = image.volume_size
if choices:
choices.sort(key=lambda c: c[1].name)
choices.insert(0, ("", _("Select Image or Snapshot")))
else:
choices.insert(0, ("", _("No images available")))
return choices
def populate_pool_choices(self, request, context):
tenants, has_more = api.keystone.tenant_list(self.request)
choices = [(choice.id, _('Default Pool') if choice.name == 'admin' else choice.name)
for choice in tenants if choice.name != 'services']
adminPool = [default for default in choices if type(default[1]) != unicode][0]
if adminPool in choices:
choices.remove(adminPool)
choices.insert(0, adminPool)
#LOG.info('choices================%s' % choices)
return choices
def populate_flavor_choices(self, request, context):
return instance_utils.flavor_field_data(request, False)
def populate_availability_zone_choices(self, request, context):
try:
zones = api.nova.availability_zone_list(request)
except Exception:
zones = []
exceptions.handle(request,
_('Unable to retrieve availability zones.'))
zone_list = [(zone.zoneName, zone.zoneName)
for zone in zones if zone.zoneState['available']]
zone_list.sort()
if not zone_list:
zone_list.insert(0, ("", _("No availability zones found")))
elif len(zone_list) > 1:
zone_list.insert(0, ("", _("Any Availability Zone")))
return zone_list
def get_help_text(self, extra_context=None):
extra = {} if extra_context is None else dict(extra_context)
try:
extra['usages'] = api.nova.tenant_absolute_limits(self.request,
reserved=True)
extra['usages_json'] = json.dumps(extra['usages'])
flavors = json.dumps([f._info for f in
instance_utils.flavor_list(self.request)])
extra['flavors'] = flavors
images = image_utils.get_available_images(
self.request, self.initial['project_id'], self._images_cache)
if images is not None:
attrs = [{'id': i.id,
'min_disk': getattr(i, 'min_disk', 0),
'min_ram': getattr(i, 'min_ram', 0),
'size': functions.bytes_to_gigabytes(i.size)}
for i in images]
extra['images'] = json.dumps(attrs)
except Exception:
exceptions.handle(self.request,
_("Unable to retrieve quota information."))
return super(SetInstanceDetailsAction, self).get_help_text(extra)
def _init_images_cache(self):
if not hasattr(self, '_images_cache'):
self._images_cache = {}
def _get_volume_display_name(self, volume):
if hasattr(volume, "volume_id"):
vol_type = "snap"
visible_label = _("Snapshot")
else:
vol_type = "vol"
visible_label = _("Volume")
return (("%s:%s" % (volume.id, vol_type)),
(_("%(name)s - %(size)s GB (%(label)s)") %
{'name': volume.name,
'size': volume.size,
'label': visible_label}))
def populate_image_id_choices(self, request, context):
choices = []
images = image_utils.get_available_images(request,
context.get('project_id'),
self._images_cache)
for image in images:
image.bytes = getattr(image, 'virtual_size', None) or image.size
image.volume_size = max(
image.min_disk, functions.bytes_to_gigabytes(image.bytes))
choices.append((image.id, image))
if context.get('image_id') == image.id and \
'volume_size' not in context:
context['volume_size'] = image.volume_size
if choices:
choices.sort(key=lambda c: c[1].name or '')
choices.insert(0, ("", _("Select Image")))
else:
choices.insert(0, ("", _("No images available")))
return choices
def populate_instance_snapshot_id_choices(self, request, context):
images = image_utils.get_available_images(request,
context.get('project_id'),
self._images_cache)
choices = [(image.id, image.name)
for image in images
if image.properties.get("image_type", '') == "snapshot"]
if choices:
choices.sort(key=operator.itemgetter(1))
choices.insert(0, ("", _("Select Instance Snapshot")))
else:
choices.insert(0, ("", _("No snapshots available")))
return choices
def populate_volume_id_choices(self, request, context):
volumes = []
try:
if cinder.is_volume_service_enabled(request):
available = api.cinder.VOLUME_STATE_AVAILABLE
volumes = [self._get_volume_display_name(v)
for v in cinder.volume_list(self.request,
search_opts=dict(status=available, bootable=True))]
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve list of volumes.'))
if volumes:
volumes.insert(0, ("", _("Select Volume")))
else:
volumes.insert(0, ("", _("No volumes available")))
return volumes
def populate_volume_snapshot_id_choices(self, request, context):
snapshots = []
try:
if cinder.is_volume_service_enabled(request):
available = api.cinder.VOLUME_STATE_AVAILABLE
snapshots = [self._get_volume_display_name(s)
for s in cinder.volume_snapshot_list(
self.request, search_opts=dict(status=available))]
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve list of volume '
'snapshots.'))
if snapshots:
snapshots.insert(0, ("", _("Select Volume Snapshot")))
else:
snapshots.insert(0, ("", _("No volume snapshots available")))
return snapshots
class SetInstanceDetails(workflows.Step):
action_class = SetInstanceDetailsAction
depends_on = ("project_id", "user_id")
contributes = ("source_type", "source_id",
"availability_zone", "name", "count", "flavor",
"device_name", "pool", # Can be None for an image.
"vol_delete_on_instance_delete")
def prepare_action_context(self, request, context):
if 'source_type' in context and 'source_id' in context:
context[context['source_type']] = context['source_id']
return context
def contribute(self, data, context):
context = super(SetInstanceDetails, self).contribute(data, context)
# Allow setting the source dynamically.
if ("source_type" in context and "source_id" in context
and context["source_type"] not in context):
context[context["source_type"]] = context["source_id"]
# Translate form input to context for source values.
if "source_type" in data:
if data["source_type"] in ["image_id", "templet_id", "custom_id","volume_image_id"]:
if data['image_id']:
context["source_id"] = data.get("image_id", None)
else:
context['source_id'] = data.get("image_or_snapshot", None)
if 'templet_id' in context:
context['templet_id'] = data.get("templet_id", None)
context['templet'] = data.get("templet", None)
context['users'] = data.get("users", None)
else:
context["source_id"] = data.get(data["source_type"], None)
if "volume_size" in data:
context["volume_size"] = data["volume_size"]
LOG.info("data ============================%s" % data)
return context
KEYPAIR_IMPORT_URL = "horizon:admin:access_and_security:keypairs:import"
class SetAccessControlsAction(workflows.Action):
keypair = forms.ThemableDynamicChoiceField(
label=_("Key Pair"),
help_text=_("Key pair to use for "
"authentication."),
add_item_link=KEYPAIR_IMPORT_URL)
admin_pass = forms.RegexField(
label=_("Admin Password"),
required=False,
widget=forms.PasswordInput(render_value=False),
regex=validators.password_validator(),
error_messages={'invalid': validators.password_validator_msg()})
confirm_admin_pass = forms.CharField(
label=_("Confirm Admin Password"),
required=False,
widget=forms.PasswordInput(render_value=False))
groups = forms.MultipleChoiceField(
label=_("Security Groups"),
required=False,
initial=["default"],
widget=forms.ThemableCheckboxSelectMultiple(),
help_text=_("Launch instance in these "
"security groups."))
class Meta(object):
name = _("Access & Security")
help_text = _("Control access to your instance via key pairs, "
"security groups, and other mechanisms.")
def __init__(self, request, *args, **kwargs):
super(SetAccessControlsAction, self).__init__(request, *args, **kwargs)
if not api.nova.can_set_server_password():
del self.fields['admin_pass']
del self.fields['confirm_admin_pass']
self.fields['keypair'].required = api.nova.requires_keypair()
def populate_keypair_choices(self, request, context):
keypairs = instance_utils.keypair_field_data(request, True)
if len(keypairs) == 2:
self.fields['keypair'].initial = keypairs[1][0]
return keypairs
def populate_groups_choices(self, request, context):
try:
groups = api.network.security_group_list(request)
if base.is_service_enabled(request, 'network'):
security_group_list = [(sg.id, sg.name) for sg in groups]
else:
# Nova-Network requires the groups to be listed by name
security_group_list = [(sg.name, sg.name) for sg in groups]
except Exception:
exceptions.handle(request,
_('Unable to retrieve list of security groups'))
security_group_list = []
return security_group_list
def clean(self):
'''Check to make sure password fields match.'''
cleaned_data = super(SetAccessControlsAction, self).clean()
if 'admin_pass' in cleaned_data:
if cleaned_data['admin_pass'] != cleaned_data.get(
'confirm_admin_pass', None):
raise forms.ValidationError(_('Passwords do not match.'))
return cleaned_data
class SetAccessControls(workflows.Step):
action_class = SetAccessControlsAction
depends_on = ("project_id", "user_id")
contributes = ("keypair_id", "security_group_ids",
"admin_pass", "confirm_admin_pass")
def contribute(self, data, context):
if data:
post = self.workflow.request.POST
context['security_group_ids'] = post.getlist("groups")
context['keypair_id'] = data.get("keypair", "")
context['admin_pass'] = data.get("admin_pass", "")
context['confirm_admin_pass'] = data.get("confirm_admin_pass", "")
return context
class CustomizeAction(workflows.Action):
class Meta(object):
name = _("Post-Creation")
help_text_template = ("admin/vgpu/"
"_launch_customize_help.html")
source_choices = [('', _('Select Script Source')),
('raw', _('Direct Input')),
('file', _('File'))]
attributes = {'class': 'switchable', 'data-slug': 'scriptsource'}
script_source = forms.ChoiceField(
label=_('Customization Script Source'),
choices=source_choices,
widget=forms.ThemableSelectWidget(attrs=attributes),
required=False)
script_help = _("A script or set of commands to be executed after the "
"instance has been built (max 16kb).")
script_upload = forms.FileField(
label=_('Script File'),
help_text=script_help,
widget=forms.FileInput(attrs={
'class': 'switched',
'data-switch-on': 'scriptsource',
'data-scriptsource-file': _('Script File')}),
required=False)
script_data = forms.CharField(
label=_('Script Data'),
help_text=script_help,
widget=forms.widgets.Textarea(attrs={
'class': 'switched',
'data-switch-on': 'scriptsource',
'data-scriptsource-raw': _('Script Data')}),
required=False)
def __init__(self, *args):
super(CustomizeAction, self).__init__(*args)
def clean(self):
cleaned = super(CustomizeAction, self).clean()
files = self.request.FILES
script = self.clean_uploaded_files('script', files)
if script is not None:
cleaned['script_data'] = script
return cleaned
def clean_uploaded_files(self, prefix, files):
upload_str = prefix + "_upload"
has_upload = upload_str in files
if has_upload:
upload_file = files[upload_str]
log_script_name = upload_file.name
LOG.info('got upload %s' % log_script_name)
if upload_file._size > 16 * units.Ki: # 16kb
msg = _('File exceeds maximum size (16kb)')
raise forms.ValidationError(msg)
else:
script = upload_file.read()
if script != "":
try:
normalize_newlines(script)
except Exception as e:
msg = _('There was a problem parsing the'
' %(prefix)s: %(error)s')
msg = msg % {'prefix': prefix,
'error': six.text_type(e)}
raise forms.ValidationError(msg)
return script
else:
return None
class PostCreationStep(workflows.Step):
action_class = CustomizeAction
contributes = ("script_data",)
class SetNetworkAction(workflows.Action):
network = forms.MultipleChoiceField(
label=_("Networks"),
widget=forms.ThemableCheckboxSelectMultiple(),
error_messages={
'required': _(
"At least one network must"
" be specified.")},
help_text=_("Launch instance with"
" these networks"))
if api.neutron.is_port_profiles_supported():
widget = None
else:
widget = forms.HiddenInput()
profile = forms.ChoiceField(label=_("Policy Profiles"),
required=False,
widget=widget,
help_text=_("Launch instance with "
"this policy profile"))
def __init__(self, request, *args, **kwargs):
super(SetNetworkAction, self).__init__(request, *args, **kwargs)
network_list = self.fields["network"].choices
if len(network_list) == 1:
self.fields['network'].initial = [network_list[0][0]]
if api.neutron.is_port_profiles_supported():
self.fields['profile'].choices = (
self.get_policy_profile_choices(request))
class Meta(object):
name = _("Networking")
permissions = ('openstack.services.network',)
help_text = _("Select networks for your instance.")
def populate_network_choices(self, request, context):
return instance_utils.network_field_data(request)
def get_policy_profile_choices(self, request):
profile_choices = [('', _("Select a profile"))]
for profile in self._get_profiles(request, 'policy'):
profile_choices.append((profile.id, profile.name))
return profile_choices
def _get_profiles(self, request, type_p):
profiles = []
try:
profiles = api.neutron.profile_list(request, type_p)
except Exception:
msg = _('Network Profiles could not be retrieved.')
exceptions.handle(request, msg)
return profiles
class SetNetwork(workflows.Step):
action_class = SetNetworkAction
# Disabling the template drag/drop only in the case port profiles
# are used till the issue with the drag/drop affecting the
# profile_id detection is fixed.
if api.neutron.is_port_profiles_supported():
contributes = ("network_id", "profile_id",)
else:
template_name = "admin/vgpu/_update_networks.html"
contributes = ("network_id",)
def contribute(self, data, context):
if data:
networks = self.workflow.request.POST.getlist("network")
# If no networks are explicitly specified, network list
# contains an empty string, so remove it.
networks = [n for n in networks if n != '']
if networks:
context['network_id'] = networks
if api.neutron.is_port_profiles_supported():
context['profile_id'] = data.get('profile', None)
return context
class SetNetworkPortsAction(workflows.Action):
ports = forms.MultipleChoiceField(label=_("Ports"),
widget=forms.CheckboxSelectMultiple(),
required=False,
help_text=_("Launch instance with"
" these ports"))
class Meta(object):
name = _("Network Ports")
permissions = ('openstack.services.network',)
help_text_template = ("admin/vgpu/"
"_launch_network_ports_help.html")
def populate_ports_choices(self, request, context):
ports = instance_utils.port_field_data(request)
if not ports:
self.fields['ports'].label = _("No ports available")
self.fields['ports'].help_text = _("No ports available")
return ports
class SetNetworkPorts(workflows.Step):
action_class = SetNetworkPortsAction
contributes = ("ports",)
def contribute(self, data, context):
if data:
ports = self.workflow.request.POST.getlist("ports")
if ports:
context['ports'] = ports
return context
class SetAdvancedAction(workflows.Action):
disk_config = forms.ThemableChoiceField(
label=_("Disk Partition"), required=False,
help_text=_("Automatic: The entire disk is a single partition and "
"automatically resizes. Manual: Results in faster build "
"times but requires manual partitioning."))
config_drive = forms.BooleanField(
label=_("Configuration Drive"),
required=False, help_text=_("Configure OpenStack to write metadata to "
"a special configuration drive that "
"attaches to the instance when it boots."))
server_group = forms.ThemableChoiceField(
label=_("Server Group"), required=False,
help_text=_("Server group to associate with this instance."))
def __init__(self, request, context, *args, **kwargs):
super(SetAdvancedAction, self).__init__(request, context,
*args, **kwargs)
try:
if not api.nova.extension_supported("DiskConfig", request):
del self.fields['disk_config']
else:
# Set our disk_config choices
config_choices = [("AUTO", _("Automatic")),
("MANUAL", _("Manual"))]
self.fields['disk_config'].choices = config_choices
# Only show the Config Drive option for the Launch Instance
# workflow (not Resize Instance) and only if the extension
# is supported.
if context.get('workflow_slug') != 'launch_instance' or (
not api.nova.extension_supported("ConfigDrive", request)):
del self.fields['config_drive']
if not api.nova.extension_supported("ServerGroups", request):
del self.fields['server_group']
else:
server_group_choices = instance_utils.server_group_field_data(
request)
self.fields['server_group'].choices = server_group_choices
except Exception:
exceptions.handle(request, _('Unable to retrieve extensions '
'information.'))
class Meta(object):
name = _("Advanced Options")
help_text_template = ("admin/vgpu/"
"_launch_advanced_help.html")
class SetAdvanced(workflows.Step):
action_class = SetAdvancedAction
contributes = ("disk_config", "config_drive", "server_group",)
def prepare_action_context(self, request, context):
context = super(SetAdvanced, self).prepare_action_context(request,
context)
# Add the workflow slug to the context so that we can tell which
# workflow is being used when creating the action. This step is
# used by both the Launch Instance and Resize Instance workflows.
context['workflow_slug'] = self.workflow.slug
return context
class LaunchInstance(workflows.Workflow):
slug = "launch_instance"
name = _("Launch VGPUInstance")
finalize_button_name = _("Launch")
success_message = _('Request for launching %(count)s named "%(name)s" '
'has been submitted.')
failure_message = _('Unable to launch %(count)s named "%(name)s".')
success_url = "horizon:admin:vgpu:index"
multipart = True
default_steps = (SelectProjectUser,
SetInstanceDetails,
SetAccessControls,
SetNetwork,
#SetNetworkPorts,
TerminalControl,
PostCreationStep,
SetAdvanced)
def format_status_message(self, message):
name = self.context.get('name', 'unknown instance')
count = self.context.get('count', 1)
templet_id = self.context.get('source_type')
if int(count) > 1:
return message % {"count": _("%s instances") % count,
"name": name}
elif templet_id == 'templet_id':
return message % {"count": _("instance"),
"name":'Batch create'}
else:
return message % {"count": _("instance"), "name": name}
def batch_handle(self, request, context, image_id, templet, users,
custom_script, block_device_mapping=None,
block_device_mapping_v2=None, nics=None,
availability_zone=None, scheduler_hints=None):
terminal = context.get('terminal', list())
if terminal:
param = terminal[0]
else:
param = dict()
try:
for info in templet:
if info['count']:
instance_count=int(info['count'], 10)
else:
instance_count=1
_user = info.get('user')
if not _user:
param.update({'vm_user':request.user.id})
else:
username = [u.id for u in users if u.name ==_user]
if username:
param.update({'vm_user':username.pop()})
else:
try:
domain = api.keystone.get_default_domain(self.request)
new_user=api.keystone.user_create(request, name=info['user'],
email=None, password=info['user'],
project=context['pool'],
enabled=True, domain=domain.id)
param.update({'vm_user':new_user.id})
except Exception:
messages.error(request, _('Unable to update the user.'))
context['terminal']=[param]
context.update({'name':info['name']})
api.nova.server_create(request,
context['name'],
image_id,
context['pool'],
context['flavor'],
context['keypair_id'],
normalize_newlines(custom_script),
context['security_group_ids'],
terminal = context['terminal'],
block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_v2,
nics=nics,
availability_zone=availability_zone,
instance_count=instance_count,
admin_pass=context['admin_pass'],
disk_config=context.get('disk_config'),
config_drive=context.get('config_drive'),
scheduler_hints=scheduler_hints)
return True
except Exception:
exceptions.handle(request)
return False
@sensitive_variables('context')
def handle(self, request, context):
custom_script = context.get('script_data', '')
users = context.get('users', None)
templet = context.get('templet', None)
dev_mapping_1 = None
dev_mapping_2 = None
image_id = ''
# Determine volume mapping options
source_type = context.get('source_type', None)
if source_type in ['image_id', 'instance_snapshot_id', 'templet_id']:
image_id = context['source_id']
if context.get('templet_id', '') == 'No_templet' or 'custom_id' in context:
if int(context['count']) > 1:
for i in range(int(context['count'])):
user_number = context['input_user']
userjoin = '_'.join([user_number, str(i+1)])
try:
if user_number:
domain = api.keystone.get_default_domain(self.request)
new_user=api.keystone.user_create(request,
name=userjoin,
email=None,
password=userjoin,
project=context['pool'],
enabled=True,
domain=domain.id)
except Exception:
username = [u.name for u in users if u.name == userjoin][0]
user_id = [u.id for u in users if u.name == userjoin][0]
user_info.append(user_id)
messages.warning(request, _('Users already exist.'))
else:
username = [u.name for u in users if u.name == context['input_user']]
if username:
user_id = [u.id for u in users if u.name == context['input_user']][0]
api.keystone.user_update_tenant(self.request, user_id, context['pool'])
templet_info=user_id
else:
try:
if context.get('input_user', None):
domain = api.keystone.get_default_domain(self.request)
new_user=api.keystone.user_create(request,
name=context['input_user'],
email=None,
password=context['input_user'],
project=context['pool'],
enabled=True,
domain=domain.id)
api.keystone.user_update_tenant(self.request, new_user.id, context['pool'])
templet_info=new_user.id
except Exception:
messages.error(request, _('Unable to update the user.'))
templet_info=None
elif source_type in ['volume_id', 'volume_snapshot_id']:
try:
if api.nova.extension_supported("BlockDeviceMappingV2Boot",
request):
# Volume source id is extracted from the source
volume_source_id = context['source_id'].split(':')[0]
device_name = context.get('device_name', '') \
.strip() or None
dev_source_type_mapping = {
'volume_id': 'volume',
'volume_snapshot_id': 'snapshot'
}
dev_mapping_2 = [
{'device_name': device_name,
'source_type': dev_source_type_mapping[source_type],
'destination_type': 'volume',
'delete_on_termination':
bool(context['vol_delete_on_instance_delete']),
'uuid': volume_source_id,
'boot_index': '0',
'volume_size': context['volume_size']
}
]
else:
dev_mapping_1 = {
context['device_name']: '%s::%s' %
(context['source_id'],
bool(context['vol_delete_on_instance_delete']))
}
except Exception:
msg = _('Unable to retrieve extensions information')
exceptions.handle(request, msg)
elif source_type == 'volume_image_id':
device_name = context.get('device_name', '').strip() or None
dev_mapping_2 = [
{'device_name': device_name, # None auto-selects device
'source_type': 'image',
'destination_type': 'volume',
'delete_on_termination':
bool(context['vol_delete_on_instance_delete']),
'uuid': context['source_id'],
'boot_index': '0',
'volume_size': context['volume_size']
}
]
netids = context.get('network_id', None)
if netids:
nics = [{"net-id": netid, "v4-fixed-ip": ""}
for netid in netids]
else:
nics = None
avail_zone = context.get('availability_zone', None)
scheduler_hints = {}
server_group = context.get('server_group', None)
if server_group:
scheduler_hints['group'] = server_group
port_profiles_supported = api.neutron.is_port_profiles_supported()
if port_profiles_supported:
nics = self.set_network_port_profiles(request,
context['network_id'],
context['profile_id'])
ports = context.get('ports')
if ports:
if nics is None:
nics = []
nics.extend([{'port-id': port} for port in ports])
if 'templet_id' in context and context.get('templet_id') != 'No_templet':
if templet and users:
result= self.batch_handle(request, context, image_id, templet, users,
custom_script, block_device_mapping=dev_mapping_1,
block_device_mapping_v2=dev_mapping_2, nics=nics,
availability_zone=avail_zone,
scheduler_hints=scheduler_hints)
return result
try:
api.nova.server_create(request,
context['name'],
image_id,
context['pool'],
context['flavor'],
context['keypair_id'],
normalize_newlines(custom_script),
context['security_group_ids'],
terminal = context['terminal'],
block_device_mapping=dev_mapping_1,
block_device_mapping_v2=dev_mapping_2,
nics=nics,
availability_zone=avail_zone,
instance_count=int(context['count']),
admin_pass=context['admin_pass'],
disk_config=context.get('disk_config'),
config_drive=context.get('config_drive'),
scheduler_hints=scheduler_hints)
return True
except Exception:
if port_profiles_supported:
ports_failing_deletes = _cleanup_ports_on_failed_vm_launch(
request, nics)
if ports_failing_deletes:
ports_str = ', '.join(ports_failing_deletes)
msg = (_('Port cleanup failed for these port-ids (%s).')
% ports_str)
exceptions.handle(request, msg)
exceptions.handle(request)
return False
def set_network_port_profiles(self, request, net_ids, profile_id):
# Create port with Network ID and Port Profile
# for the use with the plugin supporting port profiles.
nics = []
for net_id in net_ids:
try:
port = api.neutron.port_create(
request,
net_id,
policy_profile_id=profile_id,
)
except Exception as e:
msg = (_('Unable to create port for profile '
'"%(profile_id)s": %(reason)s'),
{'profile_id': profile_id,
'reason': e})
for nic in nics:
try:
port_id = nic['port-id']
api.neutron.port_delete(request, port_id)
except Exception:
msg = (msg +
_(' Also failed to delete port %s') % port_id)
redirect = self.success_url
exceptions.handle(request, msg, redirect=redirect)
if port:
nics.append({"port-id": port.id})
LOG.debug("Created Port %(portid)s with "
"network %(netid)s "
"policy profile %(profile_id)s",
{'portid': port.id,
'netid': net_id,
'profile_id': profile_id})
return nics
def _cleanup_ports_on_failed_vm_launch(request, nics):
ports_failing_deletes = []
LOG.debug('Cleaning up stale VM ports.')
for nic in nics:
try:
LOG.debug('Deleting port with id: %s' % nic['port-id'])
api.neutron.port_delete(request, nic['port-id'])
except Exception:
ports_failing_deletes.append(nic['port-id'])
return ports_failing_deletes
|
|
"""Mode Grueneisen parameter band structure calculation."""
# Copyright (C) 2012 Atsushi Togo
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import gzip
import sys
import numpy as np
import yaml
from phonopy.gruneisen.core import GruneisenBase
from phonopy.units import VaspToTHz
class GruneisenBandStructure(GruneisenBase):
"""Class to calculate mode Grueneisen parameter along band structure paths."""
def __init__(
self,
paths,
dynmat,
dynmat_plus,
dynmat_minus,
delta_strain=None,
path_connections=None,
labels=None,
factor=VaspToTHz,
):
"""Init method."""
super().__init__(
dynmat,
dynmat_plus,
dynmat_minus,
delta_strain=delta_strain,
is_band_connection=True,
)
self._cell = dynmat.primitive
rec_lattice = np.linalg.inv(self._cell.cell)
distance_shift = 0.0
self._paths = []
for qpoints_ in paths:
qpoints = np.array(qpoints_)
distances = np.zeros(len(qpoints))
delta_qpoints = qpoints[1:] - qpoints[:-1]
delta_distances = np.sqrt(
(np.dot(delta_qpoints, rec_lattice) ** 2).sum(axis=1)
)
for i, dd in enumerate(delta_distances):
distances[i + 1] = distances[i] + dd
self.set_qpoints(qpoints)
eigenvalues = self._eigenvalues
frequencies = np.sqrt(abs(eigenvalues)) * np.sign(eigenvalues) * factor
distances_with_shift = distances + distance_shift
self._paths.append(
[
qpoints,
distances,
self._gruneisen,
eigenvalues,
self._eigenvectors,
frequencies,
distances_with_shift,
]
)
distance_shift = distances_with_shift[-1]
self._labels = None
self._path_connections = None
if path_connections is None:
self._path_connections = [
True,
] * len(self._paths)
self._path_connections[-1] = False
else:
self._path_connections = path_connections
if (
labels is not None
and len(labels) == (2 - np.array(self._path_connections)).sum()
):
self._labels = labels
def get_qpoints(self):
"""Return q-points."""
return [path[0] for path in self._paths]
def get_distances(self):
"""Return distances."""
return [path[6] for path in self._paths]
def get_gruneisen(self):
"""Return mode Gruneisen parameters."""
return [path[2] for path in self._paths]
def get_eigenvalues(self):
"""Return eigenvalues."""
return [path[3] for path in self._paths]
def get_eigenvectors(self):
"""Return eigenvectors."""
return [path[4] for path in self._paths]
def get_frequencies(self):
"""Return frequencies."""
return [path[5] for path in self._paths]
def write_yaml(self, comment=None, filename=None, compression=None):
"""Write results to file in yaml."""
if filename is not None:
_filename = filename
if compression is None:
if filename is None:
_filename = "gruneisen.yaml"
with open(_filename, "w") as w:
self._write_yaml(w, comment)
elif compression == "gzip":
if filename is None:
_filename = "gruneisen.yaml.gz"
with gzip.open(_filename, "wb") as w:
self._write_yaml(w, comment, is_binary=True)
elif compression == "lzma":
try:
import lzma
except ImportError:
raise (
"Reading a lzma compressed file is not supported "
"by this python version."
)
if filename is None:
_filename = "gruneisen.yaml.xz"
with lzma.open(_filename, "w") as w:
self._write_yaml(w, comment, is_binary=True)
def _write_yaml(self, w, comment, is_binary=False):
natom = self._cell.get_number_of_atoms()
rec_lattice = np.linalg.inv(self._cell.cell) # column vecs
nq_paths = []
for qpoints in self._paths:
nq_paths.append(len(qpoints))
text = []
if comment is not None:
text.append(yaml.dump(comment, default_flow_style=False).rstrip())
text.append("nqpoint: %-7d" % np.sum(nq_paths))
text.append("npath: %-7d" % len(self._paths))
text.append("segment_nqpoint:")
text += ["- %d" % nq for nq in nq_paths]
if self._labels:
text.append("labels:")
if self._is_legacy_plot:
for i in range(len(self._paths)):
text.append(
"- [ '%s', '%s' ]" % (self._labels[i], self._labels[i + 1])
)
else:
i = 0
for c in self._path_connections:
text.append(
"- [ '%s', '%s' ]" % (self._labels[i], self._labels[i + 1])
)
if c:
i += 1
else:
i += 2
text.append("reciprocal_lattice:")
for vec, axis in zip(rec_lattice.T, ("a*", "b*", "c*")):
text.append("- [ %12.8f, %12.8f, %12.8f ] # %2s" % (tuple(vec) + (axis,)))
text.append("natom: %-7d" % (natom))
text.append(str(self._cell))
text.append("")
text.append("path:")
text.append("")
for band_structure in self._paths:
(
qpoints,
distances,
gamma,
eigenvalues,
_,
frequencies,
distances_with_shift,
) = band_structure
text.append("- nqpoint: %d" % len(qpoints))
text.append(" phonon:")
for q, d, gs, freqs in zip(qpoints, distances, gamma, frequencies):
text.append(" - q-position: [ %10.7f, %10.7f, %10.7f ]" % tuple(q))
text.append(" distance: %10.7f" % d)
text.append(" band:")
for i, (g, freq) in enumerate(zip(gs, freqs)):
text.append(" - # %d" % (i + 1))
text.append(" gruneisen: %15.10f" % g)
text.append(" frequency: %15.10f" % freq)
text.append("")
self._write_lines(w, text, is_binary)
def _write_lines(self, w, lines, is_binary):
text = "\n".join(lines)
if is_binary:
if sys.version_info < (3, 0):
w.write(bytes(text))
else:
w.write(bytes(text, "utf8"))
else:
w.write(text)
def plot(self, axarr, epsilon=None, color_scheme=None):
"""Return pyplot of band structure calculation results."""
for band_structure in self._paths:
self._plot(axarr, band_structure, epsilon, color_scheme)
def _plot(self, axarr, band_structure, epsilon, color_scheme):
(
qpoints,
distances,
gamma,
eigenvalues,
_,
frequencies,
distances_with_shift,
) = band_structure
n = len(gamma.T) - 1
ax1, ax2 = axarr
for i, (curve, freqs) in enumerate(zip(gamma.T.copy(), frequencies.T)):
if epsilon is not None:
if np.linalg.norm(qpoints[0]) < epsilon:
cutoff_index = 0
for j, q in enumerate(qpoints):
if not np.linalg.norm(q) < epsilon:
cutoff_index = j
break
for j in range(cutoff_index):
if abs(freqs[j]) < abs(max(freqs)) / 10:
curve[j] = curve[cutoff_index]
if np.linalg.norm(qpoints[-1]) < epsilon:
cutoff_index = len(qpoints) - 1
for j in reversed(range(len(qpoints))):
q = qpoints[j]
if not np.linalg.norm(q) < epsilon:
cutoff_index = j
break
for j in reversed(range(len(qpoints))):
if j == cutoff_index:
break
if abs(freqs[j]) < abs(max(freqs)) / 10:
curve[j] = curve[cutoff_index]
self._plot_a_band(ax1, curve, distances_with_shift, i, n, color_scheme)
ax1.set_xlim(0, distances_with_shift[-1])
for i, freqs in enumerate(frequencies.T):
self._plot_a_band(ax2, freqs, distances_with_shift, i, n, color_scheme)
ax2.set_xlim(0, distances_with_shift[-1])
def _plot_a_band(self, ax, curve, distances_with_shift, i, n, color_scheme):
color = None
if color_scheme == "RB":
color = (1.0 / n * i, 0, 1.0 / n * (n - i))
elif color_scheme == "RG":
color = (1.0 / n * i, 1.0 / n * (n - i), 0)
elif color_scheme == "RGB":
color = (
max(2.0 / n * (i - n / 2.0), 0),
min(2.0 / n * i, 2.0 / n * (n - i)),
max(2.0 / n * (n / 2.0 - i), 0),
)
if color:
ax.plot(distances_with_shift, curve, color=color)
else:
ax.plot(distances_with_shift, curve)
|
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from __future__ import absolute_import
from unittest import TestCase
from unittest.mock import patch, MagicMock
import sqlalchemy as sa
from sqlalchemy.sql import select
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Craty, ObjectArray
from crate.client.cursor import Cursor
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
class SqlAlchemyDictTypeTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
metadata = sa.MetaData()
self.mytable = sa.Table('mytable', metadata,
sa.Column('name', sa.String),
sa.Column('data', Craty))
def assertSQL(self, expected_str, actual_expr):
self.assertEqual(expected_str, str(actual_expr).replace('\n', ''))
def test_select_with_dict_column(self):
mytable = self.mytable
self.assertSQL(
"SELECT mytable.data['x'] AS anon_1 FROM mytable",
select([mytable.c.data['x']], bind=self.engine)
)
def test_select_with_dict_column_where_clause(self):
mytable = self.mytable
s = select([mytable.c.data], bind=self.engine).\
where(mytable.c.data['x'] == 1)
self.assertSQL(
"SELECT mytable.data FROM mytable WHERE mytable.data['x'] = ?",
s
)
def test_select_with_dict_column_nested_where(self):
mytable = self.mytable
s = select([mytable.c.name], bind=self.engine)
s = s.where(mytable.c.data['x']['y'] == 1)
self.assertSQL(
"SELECT mytable.name FROM mytable " +
"WHERE mytable.data['x']['y'] = ?",
s
)
def test_select_with_dict_column_where_clause_gt(self):
mytable = self.mytable
s = select([mytable.c.data], bind=self.engine).\
where(mytable.c.data['x'] > 1)
self.assertSQL(
"SELECT mytable.data FROM mytable WHERE mytable.data['x'] > ?",
s
)
def test_select_with_dict_column_where_clause_other_col(self):
mytable = self.mytable
s = select([mytable.c.name], bind=self.engine)
s = s.where(mytable.c.data['x'] == mytable.c.name)
self.assertSQL(
"SELECT mytable.name FROM mytable " +
"WHERE mytable.data['x'] = mytable.name",
s
)
def test_update_with_dict_column(self):
mytable = self.mytable
stmt = mytable.update(bind=self.engine).\
where(mytable.c.name == 'Arthur Dent').\
values({
"data['x']": "Trillian"
})
self.assertSQL(
"UPDATE mytable SET data['x'] = ? WHERE mytable.name = ?",
stmt
)
def set_up_character_and_cursor(self, return_value=None):
return_value = return_value or [('Trillian', {})]
fake_cursor.fetchall.return_value = return_value
fake_cursor.description = (
('characters_name', None, None, None, None, None, None),
('characters_data', None, None, None, None, None, None)
)
fake_cursor.rowcount = 1
Base = declarative_base(bind=self.engine)
class Character(Base):
__tablename__ = 'characters'
name = sa.Column(sa.String, primary_key=True)
age = sa.Column(sa.Integer)
data = sa.Column(Craty)
data_list = sa.Column(ObjectArray)
session = Session()
return session, Character
def test_assign_null_to_object_array(self):
session, Character = self.set_up_character_and_cursor()
char_1 = Character(name='Trillian', data_list=None)
self.assertTrue(char_1.data_list is None)
char_2 = Character(name='Trillian', data_list=1)
self.assertTrue(char_2.data_list == [1])
char_3 = Character(name='Trillian', data_list=[None])
self.assertTrue(char_3.data_list == [None])
@patch('crate.client.connection.Cursor', FakeCursor)
def test_assign_to_craty_type_after_commit(self):
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', None, None)]
)
char = Character(name='Trillian')
session.add(char)
session.commit()
char.data = {'x': 1}
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
"UPDATE characters SET data = ? WHERE characters.name = ?",
({'x': 1}, 'Trillian',)
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_change_tracking(self):
session, Character = self.set_up_character_and_cursor()
char = Character(name='Trillian')
session.add(char)
session.commit()
try:
char.data['x'] = 1
except Exception:
print(fake_cursor.fetchall.called)
print(fake_cursor.mock_calls)
raise
self.assertTrue(char in session.dirty)
try:
session.commit()
except Exception:
print(fake_cursor.mock_calls)
raise
self.assertFalse(char in session.dirty)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update(self):
session, Character = self.set_up_character_and_cursor()
char = Character(name='Trillian')
session.add(char)
session.commit()
char.data['x'] = 1
char.data['y'] = 2
session.commit()
# on python 3 dicts aren't sorted so the order if x or y is updated
# first isn't deterministic
try:
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['y'] = ?, data['x'] = ? "
"WHERE characters.name = ?"),
(2, 1, 'Trillian')
)
except AssertionError:
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['x'] = ?, data['y'] = ? "
"WHERE characters.name = ?"),
(1, 2, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update_only_one_key_changed(self):
"""
If only one attribute of Crate is changed
the update should only update that attribute
not all attributes of Crate.
"""
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', dict(x=1, y=2))]
)
char = Character(name='Trillian')
char.data = dict(x=1, y=2)
session.add(char)
session.commit()
char.data['y'] = 3
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['y'] = ? "
"WHERE characters.name = ?"),
(3, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update_with_regular_column(self):
session, Character = self.set_up_character_and_cursor()
char = Character(name='Trillian')
session.add(char)
session.commit()
char.data['x'] = 1
char.age = 20
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET age = ?, data['x'] = ? "
"WHERE characters.name = ?"),
(20, 1, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update_with_delitem(self):
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', {'x': 1})]
)
char = Character(name='Trillian')
char.data = {'x': 1}
session.add(char)
session.commit()
del char.data['x']
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['x'] = ? "
"WHERE characters.name = ?"),
(None, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update_with_delitem_setitem(self):
""" test that the change tracking doesn't get messed up
delitem -> setitem
"""
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', {'x': 1})]
)
session = Session()
char = Character(name='Trillian')
char.data = {'x': 1}
session.add(char)
session.commit()
del char.data['x']
char.data['x'] = 4
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['x'] = ? "
"WHERE characters.name = ?"),
(4, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update_with_setitem_delitem(self):
""" test that the change tracking doesn't get messed up
setitem -> delitem
"""
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', {'x': 1})]
)
char = Character(name='Trillian')
char.data = {'x': 1}
session.add(char)
session.commit()
char.data['x'] = 4
del char.data['x']
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['x'] = ? "
"WHERE characters.name = ?"),
(None, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_partial_dict_update_with_setitem_delitem_setitem(self):
""" test that the change tracking doesn't get messed up
setitem -> delitem -> setitem
"""
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', {'x': 1})]
)
char = Character(name='Trillian')
char.data = {'x': 1}
session.add(char)
session.commit()
char.data['x'] = 4
del char.data['x']
char.data['x'] = 3
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['x'] = ? "
"WHERE characters.name = ?"),
(3, 'Trillian')
)
def set_up_character_and_cursor_data_list(self, return_value=None):
return_value = return_value or [('Trillian', {})]
fake_cursor.fetchall.return_value = return_value
fake_cursor.description = (
('characters_name', None, None, None, None, None, None),
('characters_data_list', None, None, None, None, None, None)
)
fake_cursor.rowcount = 1
Base = declarative_base(bind=self.engine)
class Character(Base):
__tablename__ = 'characters'
name = sa.Column(sa.String, primary_key=True)
data_list = sa.Column(ObjectArray)
session = Session()
return session, Character
def _setup_object_array_char(self):
session, Character = self.set_up_character_and_cursor_data_list(
return_value=[('Trillian', [{'1': 1}, {'2': 2}])]
)
char = Character(name='Trillian', data_list=[{'1': 1}, {'2': 2}])
session.add(char)
session.commit()
return session, char
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_setitem_change_tracking(self):
session, char = self._setup_object_array_char()
char.data_list[1] = {'3': 3}
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data_list = ? "
"WHERE characters.name = ?"),
([{'1': 1}, {'3': 3}], 'Trillian')
)
def _setup_nested_object_char(self):
session, Character = self.set_up_character_and_cursor(
return_value=[('Trillian', {'nested': {'x': 1, 'y': {'z': 2}}})]
)
char = Character(name='Trillian')
char.data = {'nested': {'x': 1, 'y': {'z': 2}}}
session.add(char)
session.commit()
return session, char
@patch('crate.client.connection.Cursor', FakeCursor)
def test_nested_object_change_tracking(self):
session, char = self._setup_nested_object_char()
char.data["nested"]["x"] = 3
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['nested'] = ? "
"WHERE characters.name = ?"),
({'y': {'z': 2}, 'x': 3}, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_deep_nested_object_change_tracking(self):
session, char = self._setup_nested_object_char()
# change deep nested object
char.data["nested"]["y"]["z"] = 5
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['nested'] = ? "
"WHERE characters.name = ?"),
({'y': {'z': 5}, 'x': 1}, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_delete_nested_object_tracking(self):
session, char = self._setup_nested_object_char()
# delete nested object
del char.data["nested"]["y"]["z"]
self.assertTrue(char in session.dirty)
session.commit()
fake_cursor.execute.assert_called_with(
("UPDATE characters SET data['nested'] = ? "
"WHERE characters.name = ?"),
({'y': {}, 'x': 1}, 'Trillian')
)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_append_change_tracking(self):
session, char = self._setup_object_array_char()
char.data_list.append({'3': 3})
self.assertTrue(char in session.dirty)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_insert_change_tracking(self):
session, char = self._setup_object_array_char()
char.data_list.insert(0, {'3': 3})
self.assertTrue(char in session.dirty)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_slice_change_tracking(self):
session, char = self._setup_object_array_char()
char.data_list[:] = [{'3': 3}]
self.assertTrue(char in session.dirty)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_extend_change_tracking(self):
session, char = self._setup_object_array_char()
char.data_list.extend([{'3': 3}])
self.assertTrue(char in session.dirty)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_pop_change_tracking(self):
session, char = self._setup_object_array_char()
char.data_list.pop()
self.assertTrue(char in session.dirty)
@patch('crate.client.connection.Cursor', FakeCursor)
def test_object_array_remove_change_tracking(self):
session, char = self._setup_object_array_char()
item = char.data_list[0]
char.data_list.remove(item)
self.assertTrue(char in session.dirty)
|
|
"""
Unit tests for reverse URL lookups.
"""
from __future__ import unicode_literals
import unittest
from django.contrib.auth.models import User
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.core.urlresolvers import (reverse, reverse_lazy, resolve, get_callable,
get_resolver, NoReverseMatch, Resolver404, ResolverMatch, RegexURLResolver,
RegexURLPattern)
from django.http import HttpRequest, HttpResponseRedirect, HttpResponsePermanentRedirect
from django.shortcuts import redirect
from django.test import TestCase, override_settings
from django.utils import six
from admin_scripts.tests import AdminScriptTestCase
from . import urlconf_outer, middleware, views
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', None, '', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/view_class/42/37/', 'view-class', None, '', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/normal/42/37/', 'inc-normal-view', None, '', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/view_class/42/37/', 'inc-view-class', None, '', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', None, '', views.empty_view, tuple(), {'arg2': '37'}),
('/included/mixed_args/42/37/', 'inc-mixed-args', None, '', views.empty_view, tuple(), {'arg2': '37'}),
# Unnamed views will be resolved to the function/class name
('/unnamed/normal/42/37/', 'urlpatterns_reverse.views.empty_view', None, '', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/unnamed/view_class/42/37/', 'urlpatterns_reverse.views.ViewClass', None, '', views.view_class_instance, tuple(), {'arg1': '42', 'arg2': '37'}),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', None, '', views.empty_view, ('42', '37'), {}),
('/included/no_kwargs/42/37/', 'inc-no-kwargs', None, '', views.empty_view, ('42', '37'), {}),
# Namespaces
('/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/normal/42/37/', 'inc-normal-view', None, 'inc-ns1', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
('/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
('/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
('/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
('/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
# Nested namespaces
('/ns-included1/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:test-ns3', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
('/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'empty_view', tuple(), {'arg1': '42', 'arg2': '37'}),
# Namespaces capturing variables
('/inc70/', 'inner-nothing', None, 'inc-ns5', views.empty_view, tuple(), {'outer': '70'}),
('/inc78/extra/foobar/', 'inner-extra', None, 'inc-ns5', views.empty_view, tuple(), {'outer': '78', 'extra': 'foobar'}),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], dict(year=2007, month=5, day=21)),
('windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [], dict(drive_name='C', path=r'Documents and Settings\spam')),
('special', r'/special_chars/%2B%5C%24%2A/', [r'+\$*'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('insensitive', '/CaseInsensitive/fred', ['fred'], {}),
('test', '/test/1', [], {}),
('test2', '/test/2', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Regression for #9038
# These views are resolved by method name. Each method is deployed twice -
# once with an explicit argument, and once using the default value on
# the method. This is potentially ambiguous, as you have to pick the
# correct view for the arguments provided.
('kwargs_view', '/arg_view/', [], {}),
('kwargs_view', '/arg_view/10/', [], {'arg1': 10}),
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/', [], {}),
('urlpatterns_reverse.views.absolute_kwargs_view', '/absolute_arg_view/10/', [], {'arg1': 10}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(TestCase):
def test_no_urls_exception(self):
"""
RegexURLResolver should raise an exception when no urlpatterns exist.
"""
resolver = RegexURLResolver(r'^$', settings.ROOT_URLCONF)
self.assertRaisesMessage(ImproperlyConfigured,
"The included urlconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import.",
getattr, resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(TestCase):
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(expected, NoReverseMatch)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
self.assertRaises(NoReverseMatch, reverse, None)
def test_prefix_braces(self):
self.assertEqual('/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include', prefix='/{{invalid}}/'))
def test_prefix_parenthesis(self):
self.assertEqual('/bogus%29/includes/non_path_include/',
reverse('non_path_include', prefix='/bogus)/'))
def test_prefix_format_char(self):
self.assertEqual('/bump%2520map/includes/non_path_include/',
reverse('non_path_include', prefix='/bump%20map/'))
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022
self.assertEqual('/%7Eme/places/1/',
reverse('places', args=[1], prefix='/~me/'))
def test_patterns_reported(self):
# Regression for #17076
try:
# this url exists, but requires an argument
reverse("people", args=[])
except NoReverseMatch as e:
pattern_description = r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"
self.assertIn(pattern_description, str(e))
else:
# we can't use .assertRaises, since we want to inspect the
# exception
self.fail("Expected a NoReverseMatch, but none occurred.")
class ResolverTests(unittest.TestCase):
def test_resolver_repr(self):
"""
Test repr of RegexURLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced urlconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<RegexURLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
try:
resolver.resolve(proxy_url)
except TypeError:
self.fail('Failed to coerce lazy object to text')
def test_non_regex(self):
"""
Verifies that we raise a Resolver404 if what we are resolving doesn't
meet the basic requirements of a path to match - i.e., at the very
least, it matches the root pattern '^/'. We must never return None
from resolve, or we will get a TypeError further down the line.
Regression for #10834.
"""
self.assertRaises(Resolver404, resolve, '')
self.assertRaises(Resolver404, resolve, 'a')
self.assertRaises(Resolver404, resolve, '\\')
self.assertRaises(Resolver404, resolve, '.')
def test_404_tried_urls_have_names(self):
"""
Verifies that the list of URLs that come back from a Resolver404
exception contains a list in the right format for printing out in
the DEBUG 404 page with both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a non-existent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/non-existent-url')
url_types_names = [
[{'type': RegexURLPattern, 'name': 'named-url1'}],
[{'type': RegexURLPattern, 'name': 'named-url2'}],
[{'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url3'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url4'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLResolver}],
]
try:
resolve('/included/non-existent-url', urlconf=urls)
self.fail('resolve did not raise a 404')
except Resolver404 as e:
# make sure we at least matched the root ('/') url resolver:
self.assertTrue('tried' in e.args[0])
tried = e.args[0]['tried']
self.assertEqual(len(e.args[0]['tried']), len(url_types_names), 'Wrong number of tried URLs returned. Expected %s, got %s.' % (len(url_types_names), len(e.args[0]['tried'])))
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
self.assertIsInstance(t, e['type']), str('%s is not an instance of %s') % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertTrue(t.name is None, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(t.name, e['name'], 'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name))
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=301)
def test_user_permission_with_lazy_reverse(self):
User.objects.create_user('alfred', 'alfred@example.com', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.login(username='alfred', password='testpw')
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
Test that reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
self.write_settings('settings.py', extra="""
from django.core.urlresolvers import reverse_lazy
LOGIN_URL = reverse_lazy('login')""")
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['sqlall', 'auth'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(TestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj(object):
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
self.assertRaises(NoReverseMatch, redirect, 'not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
self.assertRaises(NoReverseMatch, redirect, absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class NamespaceTests(TestCase):
def test_ambiguous_object(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
def test_ambiguous_urlpattern(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing')
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', args=[37, 42])
self.assertRaises(NoReverseMatch, reverse, 'inner-nothing', kwargs={'arg1': 42, 'arg2': 37})
def test_non_existent_namespace(self):
"Non-existent namespaces raise errors"
self.assertRaises(NoReverseMatch, reverse, 'blahblah:urlobject-view')
self.assertRaises(NoReverseMatch, reverse, 'test-ns1:blahblah:urlobject-view')
def test_normal_name(self):
"Normal lookups work as expected"
self.assertEqual('/normal/', reverse('normal-view'))
self.assertEqual('/normal/37/42/', reverse('normal-view', args=[37, 42]))
self.assertEqual('/normal/42/37/', reverse('normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/', reverse('special-view'))
def test_simple_included_name(self):
"Normal lookups work on names included from other patterns"
self.assertEqual('/included/normal/', reverse('inc-normal-view'))
self.assertEqual('/included/normal/37/42/', reverse('inc-normal-view', args=[37, 42]))
self.assertEqual('/included/normal/42/37/', reverse('inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/+%5C$*/', reverse('inc-special-view'))
def test_namespace_object(self):
"Dynamic URL objects can be found using a namespace"
self.assertEqual('/test1/inner/', reverse('test-ns1:urlobject-view'))
self.assertEqual('/test1/inner/37/42/', reverse('test-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/test1/inner/42/37/', reverse('test-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/test1/inner/+%5C$*/', reverse('test-ns1:urlobject-special-view'))
def test_embedded_namespace_object(self):
"Namespaces can be installed anywhere in the URL pattern tree"
self.assertEqual('/included/test3/inner/', reverse('test-ns3:urlobject-view'))
self.assertEqual('/included/test3/inner/37/42/', reverse('test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/included/test3/inner/42/37/', reverse('test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('test-ns3:urlobject-special-view'))
def test_namespace_pattern(self):
"Namespaces can be applied to include()'d urlpatterns"
self.assertEqual('/ns-included1/normal/', reverse('inc-ns1:inc-normal-view'))
self.assertEqual('/ns-included1/normal/37/42/', reverse('inc-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual('/ns-included1/normal/42/37/', reverse('inc-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/+%5C$*/', reverse('inc-ns1:inc-special-view'))
def test_namespace_pattern_with_variable_prefix(self):
"When using a include with namespaces when there is a regex variable in front of it"
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', args=[42]))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42, 'arg1': 37, 'arg2': 4}))
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', args=[42, 37, 4]))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', args=[42]))
def test_multiple_namespace_pattern(self):
"Namespaces can be embedded"
self.assertEqual('/ns-included1/test3/inner/', reverse('inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/test3/inner/37/42/', reverse('inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/test3/inner/42/37/', reverse('inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:test-ns3:urlobject-special-view'))
def test_nested_namespace_pattern(self):
"Namespaces can be nested"
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/37/42/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/42/37/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view'))
def test_app_lookup_object(self):
"A default application namespace can be used for lookup"
self.assertEqual('/default/inner/', reverse('testapp:urlobject-view'))
self.assertEqual('/default/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42]))
self.assertEqual('/default/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/default/inner/+%5C$*/', reverse('testapp:urlobject-special-view'))
def test_app_lookup_object_with_default(self):
"A default application namespace is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/included/test3/inner/', reverse('testapp:urlobject-view', current_app='test-ns3'))
self.assertEqual('/included/test3/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42], current_app='test-ns3'))
self.assertEqual('/included/test3/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='test-ns3'))
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('testapp:urlobject-special-view', current_app='test-ns3'))
def test_app_lookup_object_without_default(self):
"An application namespace without a default is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/other2/inner/', reverse('nodefault:urlobject-view'))
self.assertEqual('/other2/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42]))
self.assertEqual('/other2/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/other2/inner/+%5C$*/', reverse('nodefault:urlobject-special-view'))
self.assertEqual('/other1/inner/', reverse('nodefault:urlobject-view', current_app='other-ns1'))
self.assertEqual('/other1/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42], current_app='other-ns1'))
self.assertEqual('/other1/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='other-ns1'))
self.assertEqual('/other1/inner/+%5C$*/', reverse('nodefault:urlobject-special-view', current_app='other-ns1'))
def test_special_chars_namespace(self):
self.assertEqual('/+%5C$*/included/normal/', reverse('special:inc-normal-view'))
self.assertEqual('/+%5C$*/included/normal/37/42/', reverse('special:inc-normal-view', args=[37, 42]))
self.assertEqual('/+%5C$*/included/normal/42/37/', reverse('special:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/included/+%5C$*/', reverse('special:inc-special-view'))
def test_namespaces_with_variables(self):
"Namespace prefixes can capture variables: see #15900"
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', kwargs={'outer': '70'}))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', kwargs={'outer': '78', 'extra': 'foobar'}))
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', args=['70']))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', args=['78', 'foobar']))
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(TestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,'
b'inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE_CLASSES=(
'%s.ChangeURLconfMiddleware' % middleware.__name__,
)
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=(
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
)
)
def test_urlconf_overridden_with_null(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(
MIDDLEWARE_CLASSES=(
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
)
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=(
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
)
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=(
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
)
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE_CLASSES=(
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
)
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
class ErrorHandlerResolutionTests(TestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
from django.core.urlresolvers import RegexURLResolver
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = RegexURLResolver(r'^$', urlconf)
self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.resolver.resolve400(), handler)
self.assertEqual(self.resolver.resolve404(), handler)
self.assertEqual(self.resolver.resolve500(), handler)
def test_callable_handers(self):
handler = (empty_view, {})
self.assertEqual(self.callable_resolver.resolve400(), handler)
self.assertEqual(self.callable_resolver.resolve404(), handler)
self.assertEqual(self.callable_resolver.resolve500(), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import')
class DefaultErrorHandlerTests(TestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
try:
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 404 handler")
try:
self.assertRaises(ValueError, self.client.get, '/bad_view/')
except AttributeError:
self.fail("Shouldn't get an AttributeError due to undefined 500 handler")
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(TestCase):
"""Tests for handler404 and handler500 if urlconf is None"""
def test_no_handler_exception(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(TestCase):
def test_urlpattern_resolve(self):
for path, name, app_name, namespace, func, args, kwargs in resolve_test_data:
# Test legacy support for extracting "function, args, kwargs"
match_func, match_args, match_kwargs = resolve(path)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# Test ResolverMatch capabilities.
match = resolve(path)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, name)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.func, func)
# ... and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(TestCase):
def test_erroneous_resolve(self):
self.assertRaises(ImportError, self.client.get, '/erroneous_inner/')
self.assertRaises(ImportError, self.client.get, '/erroneous_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_inner/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/missing_outer/')
self.assertRaises(ViewDoesNotExist, self.client.get, '/uncallable/')
def test_erroneous_reverse(self):
"""
Ensure that a useful exception is raised when a regex is invalid in the
URLConf.
Refs #6170.
"""
# The regex error will be hit before NoReverseMatch can be raised
self.assertRaises(ImproperlyConfigured, reverse, 'whatever blah blah')
class ViewLoadingTests(TestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'),
empty_view)
# passing a callable should return the callable
self.assertEqual(get_callable(empty_view), empty_view)
def test_exceptions(self):
# A missing view (identified by an AttributeError) should raise
# ViewDoesNotExist, ...
six.assertRaisesRegex(self, ViewDoesNotExist,
".*View does not exist in.*",
get_callable,
'urlpatterns_reverse.views.i_should_not_exist')
# ... but if the AttributeError is caused by something else don't
# swallow it.
self.assertRaises(AttributeError, get_callable,
'urlpatterns_reverse.views_broken.i_am_broken')
|
|
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import str
from xml.dom.minidom import *
from isisdata.templatetags.app_filters import *
from .mods_helper import *
def initial_response(citation_id):
doc = xml.dom.minidom.Document()
# <formats id="http://unapi.info/news/archives/9">
formats = doc.createElement('formats')
formats.setAttribute('id', citation_id)
doc.appendChild(formats)
# <format name="mods" type="application/xml" />
format = doc.createElement('format')
format.setAttribute('name', 'mods')
format.setAttribute('type', 'application/xml')
formats.appendChild(format)
return doc.toprettyxml(indent=" ", encoding="utf-8")
def generate_mods_xml(citation):
# create basis for xml
doc = xml.dom.minidom.Document()
mods = doc.createElement('mods')
doc.appendChild(mods)
mods.setAttribute('xmlns', 'http://www.loc.gov/mods/v3')
mods.setAttribute('version', '3.5')
#doc.createElementNS('http://www.w3.org/2001/XMLSchema-instance', 'xsi')
#mods.setAttribute('xsi:schemaLocation', 'http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-5.xsd')
# add titles
titleInfo = doc.createElement('titleInfo')
mods.appendChild(titleInfo)
title = doc.createElement('title')
titleInfo.appendChild(title)
title_text = doc.createTextNode(bleach_safe(get_title(citation)))
title.appendChild(title_text)
# add authors
authors = citation.acrelation_set.filter(type_controlled__in=['AU']).order_by('data_display_order')
for author in authors:
name = doc.createElement('name')
name.setAttribute('type', 'personal')
mods.appendChild(name)
name_part = doc.createElement('namePart')
name.appendChild(name_part)
name_part_text = doc.createTextNode(author.authority.name)
name_part.appendChild(name_part_text)
# role
role = doc.createElement('role')
name.appendChild(role)
role_term = doc.createElement('roleTerm')
role_term.setAttribute('type', 'text')
role.appendChild(role_term)
role_term_text = doc.createTextNode('creator')
role_term.appendChild(role_term_text)
# add editors
editors = citation.acrelation_set.filter(type_controlled__in=['ED'])
for edt in editors:
name = doc.createElement('name')
name.setAttribute('type', 'personal')
mods.appendChild(name)
name_part = doc.createElement('namePart')
name.appendChild(name_part)
name_part_text = doc.createTextNode(edt.authority.name)
name_part.appendChild(name_part_text)
# role
role = doc.createElement('role')
name.appendChild(role)
role_term = doc.createElement('roleTerm')
role_term.setAttribute('type', 'text')
role.appendChild(role_term)
role_term_text = doc.createTextNode('editor')
role_term.appendChild(role_term_text)
# add contributors
contributors = citation.acrelation_set.filter(type_controlled__in=['CO','AD'])
for contr in contributors:
name = doc.createElement('name')
name.setAttribute('type', 'personal')
mods.appendChild(name)
name_part = doc.createElement('namePart')
name.appendChild(name_part)
name_part_text = doc.createTextNode(contr.authority.name)
name_part.appendChild(name_part_text)
# role
role = doc.createElement('role')
name.appendChild(role)
role_term = doc.createElement('roleTerm')
role_term.setAttribute('type', 'text')
role.appendChild(role_term)
role_term_text = doc.createTextNode('contributor')
role_term.appendChild(role_term_text)
# publication date
origin_info = doc.createElement('originInfo')
origin_info.setAttribute('eventType', 'publication')
mods.appendChild(origin_info)
date_issued = doc.createElement('dateIssued')
date_issued.appendChild(doc.createTextNode(get_pub_year(citation)))
origin_info.appendChild(date_issued)
# add abstract
abstract = doc.createElement('abstract')
mods.appendChild(abstract)
abstract.appendChild(doc.createTextNode(citation.human_readable_abstract))
# type of resource
genre = doc.createElement('genre')
genre.setAttribute('authority', 'local')
cit_type = get_type(citation.type_controlled) #citation.get_type_controlled_display().lower()
genre.appendChild(doc.createTextNode(cit_type))
mods.appendChild(genre)
part = doc.createElement('part')
# volume
volume = get_volume(citation)
if volume:
detail = doc.createElement('detail')
detail.setAttribute('type', 'volume')
part.appendChild(detail)
number = doc.createElement('number')
number.appendChild(doc.createTextNode(volume))
detail.appendChild(number)
issue = get_issue(citation)
if issue:
detail = doc.createElement('detail')
detail.setAttribute('type', 'issue')
part.appendChild(detail)
number = doc.createElement('number')
number.appendChild(doc.createTextNode(issue))
detail.appendChild(number)
publishers = get_publisher(citation)
for pub in publishers:
publisher = doc.createElement('publisher')
publisher.appendChild(doc.createTextNode(pub.authority.name))
origin_info.appendChild(publisher)
# Periodical
periodicals = citation.acrelation_set.filter(type_controlled__in=['PE'])
for periodical in periodicals:
# create <relatedItem><titleInfo><title>
related_item = doc.createElement('relatedItem')
related_item.setAttribute('type', 'host')
mods.appendChild(related_item)
rel_title_info = doc.createElement('titleInfo')
related_item.appendChild(rel_title_info)
rel_title = doc.createElement('title')
rel_title.appendChild(doc.createTextNode(periodical.authority.name))
rel_title_info.appendChild(rel_title)
# add volume, etc. info
related_item.appendChild(part)
# series
series = citation.acrelation_set.filter(type_controlled__in=['BS'])
for serie in series:
# create <relatedItem><titleInfo><title>
series_related_item = doc.createElement('relatedItem')
series_related_item.setAttribute('type', 'host')
mods.appendChild(series_related_item)
series_rel_title_info = doc.createElement('titleInfo')
series_related_item.appendChild(series_rel_title_info)
series_rel_title = doc.createElement('title')
series_rel_title.appendChild(doc.createTextNode(serie.authority.name))
series_rel_title_info.appendChild(series_rel_title)
# add volume, etc. info
series_related_item.appendChild(part)
# included in
included_in = CCRelation.objects.filter(object_id=citation.id, type_controlled='IC', object__public=True)
for included in included_in:
# create <relatedItem><titleInfo><title>
included_in_rel_item = doc.createElement('relatedItem')
included_in_rel_item.setAttribute('type', 'host')
mods.appendChild(included_in_rel_item)
included_in_rel_item_title_info = doc.createElement('titleInfo')
included_in_rel_item.appendChild(included_in_rel_item_title_info)
included_in_rel_item_title = doc.createElement('title')
included_in_rel_item_title.appendChild(doc.createTextNode(bleach_safe(get_title(included.subject))))
included_in_rel_item_title_info.appendChild(included_in_rel_item_title)
start_page = citation.part_details.page_begin
end_page = citation.part_details.page_end
if not end_page:
end_page = start_page
if start_page or end_page:
extent = doc.createElement('extent')
extent.setAttribute('unit', 'page')
part.appendChild(extent)
if start_page:
start = doc.createElement('start')
start.appendChild(doc.createTextNode(str(start_page)))
extent.appendChild(start)
if end_page:
end = doc.createElement('end')
end.appendChild(doc.createTextNode(str(end_page)))
extent.appendChild(end)
for linked_data in citation.linkeddata_entries.all():
if linked_data.type_controlled.name in ['DOI', 'ISBN'] :
identifier = doc.createElement('identifier')
if linked_data.type_controlled.name == 'DOI':
identifier.setAttribute('type', 'doi')
else:
identifier.setAttribute('type', 'isbn')
identifier.appendChild(doc.createTextNode(linked_data.universal_resource_name))
mods.appendChild(identifier)
return doc.toprettyxml(indent=" ", encoding="utf-8")
|
|
"""
Keras implementation of CapsNet in Hinton's paper Dynamic Routing Between Capsules.
The current version maybe only works for TensorFlow backend. Actually it will be straightforward to re-write to TF code.
Adopting to other backends should be easy, but I have not tested this.
Usage:
python capsulenet.py
python capsulenet.py --epochs 50
python capsulenet.py --epochs 50 --routings 3
... ...
Result:
Validation accuracy > 99.5% after 20 epochs. Converge to 99.66% after 50 epochs.
About 110 seconds per epoch on a single GTX1070 GPU card
Author: Xifeng Guo, E-mail: `guoxifeng1990@163.com`, Github: `https://github.com/XifengGuo/CapsNet-Keras`
"""
import numpy as np
from keras import layers, models, optimizers
from keras import backend as K
from keras.utils import to_categorical
import matplotlib.pyplot as plt
from utils import combine_images
from PIL import Image
from capsulelayers import CapsuleLayer, PrimaryCap, Length, Mask
K.set_image_data_format('channels_last')
def CapsNet(input_shape, n_class, routings):
"""
A Capsule Network on MNIST.
:param input_shape: data shape, 3d, [width, height, channels]
:param n_class: number of classes
:param routings: number of routing iterations
:return: Two Keras Models, the first one used for training, and the second one for evaluation.
`eval_model` can also be used for training.
"""
x = layers.Input(shape=input_shape)
# Layer 1: Just a conventional Conv2D layer
conv1 = layers.Conv2D(filters=256, kernel_size=9, strides=1, padding='valid', activation='relu', name='conv1')(x)
# Layer 2: Conv2D layer with `squash` activation, then reshape to [None, num_capsule, dim_capsule]
primarycaps = PrimaryCap(conv1, dim_capsule=8, n_channels=32, kernel_size=9, strides=2, padding='valid')
# Layer 3: Capsule layer. Routing algorithm works here.
digitcaps = CapsuleLayer(num_capsule=n_class, dim_capsule=16, routings=routings,
name='digitcaps')(primarycaps)
# Layer 4: This is an auxiliary layer to replace each capsule with its length. Just to match the true label's shape.
# If using tensorflow, this will not be necessary. :)
out_caps = Length(name='capsnet')(digitcaps)
# Decoder network.
y = layers.Input(shape=(n_class,))
masked_by_y = Mask()([digitcaps, y]) # The true label is used to mask the output of capsule layer. For training
masked = Mask()(digitcaps) # Mask using the capsule with maximal length. For prediction
# Shared Decoder model in training and prediction
decoder = models.Sequential(name='decoder')
decoder.add(layers.Dense(512, activation='relu', input_dim=16*n_class))
decoder.add(layers.Dense(1024, activation='relu'))
decoder.add(layers.Dense(np.prod(input_shape), activation='sigmoid'))
decoder.add(layers.Reshape(target_shape=input_shape, name='out_recon'))
# Models for training and evaluation (prediction)
train_model = models.Model([x, y], [out_caps, decoder(masked_by_y)])
eval_model = models.Model(x, [out_caps, decoder(masked)])
# manipulate model
noise = layers.Input(shape=(n_class, 16))
noised_digitcaps = layers.Add()([digitcaps, noise])
masked_noised_y = Mask()([noised_digitcaps, y])
manipulate_model = models.Model([x, y, noise], decoder(masked_noised_y))
return train_model, eval_model, manipulate_model
def margin_loss(y_true, y_pred):
"""
Margin loss for Eq.(4). When y_true[i, :] contains not just one `1`, this loss should work too. Not test it.
:param y_true: [None, n_classes]
:param y_pred: [None, num_capsule]
:return: a scalar loss value.
"""
L = y_true * K.square(K.maximum(0., 0.9 - y_pred)) + \
0.5 * (1 - y_true) * K.square(K.maximum(0., y_pred - 0.1))
return K.mean(K.sum(L, 1))
def train(model, data, args):
"""
Training a CapsuleNet
:param model: the CapsuleNet model
:param data: a tuple containing training and testing data, like `((x_train, y_train), (x_test, y_test))`
:param args: arguments
:return: The trained model
"""
# unpacking the data
(x_train, y_train), (x_test, y_test) = data
# callbacks
log = callbacks.CSVLogger(args.save_dir + '/log.csv')
tb = callbacks.TensorBoard(log_dir=args.save_dir + '/tensorboard-logs',
batch_size=args.batch_size, histogram_freq=int(args.debug))
checkpoint = callbacks.ModelCheckpoint(args.save_dir + '/weights-{epoch:02d}.h5', monitor='val_capsnet_acc',
save_best_only=True, save_weights_only=True, verbose=1)
lr_decay = callbacks.LearningRateScheduler(schedule=lambda epoch: args.lr * (args.lr_decay ** epoch))
# compile the model
model.compile(optimizer=optimizers.Adam(lr=args.lr),
loss=[margin_loss, 'mse'],
loss_weights=[1., args.lam_recon],
metrics={'capsnet': 'accuracy'})
"""
# Training without data augmentation:
model.fit([x_train, y_train], [y_train, x_train], batch_size=args.batch_size, epochs=args.epochs,
validation_data=[[x_test, y_test], [y_test, x_test]], callbacks=[log, tb, checkpoint, lr_decay])
"""
# Begin: Training with data augmentation ---------------------------------------------------------------------#
def train_generator(x, y, batch_size, shift_fraction=0.):
train_datagen = ImageDataGenerator(width_shift_range=shift_fraction,
height_shift_range=shift_fraction) # shift up to 2 pixel for MNIST
generator = train_datagen.flow(x, y, batch_size=batch_size)
while 1:
x_batch, y_batch = generator.next()
yield ([x_batch, y_batch], [y_batch, x_batch])
# Training with data augmentation. If shift_fraction=0., also no augmentation.
model.fit_generator(generator=train_generator(x_train, y_train, args.batch_size, args.shift_fraction),
steps_per_epoch=int(y_train.shape[0] / args.batch_size),
epochs=args.epochs,
validation_data=[[x_test, y_test], [y_test, x_test]],
callbacks=[log, tb, checkpoint, lr_decay])
# End: Training with data augmentation -----------------------------------------------------------------------#
model.save_weights(args.save_dir + '/trained_model.h5')
print('Trained model saved to \'%s/trained_model.h5\'' % args.save_dir)
from utils import plot_log
plot_log(args.save_dir + '/log.csv', show=True)
return model
def test(model, data, args):
x_test, y_test = data
y_pred, x_recon = model.predict(x_test, batch_size=100)
print('-'*30 + 'Begin: test' + '-'*30)
print('Test acc:', np.sum(np.argmax(y_pred, 1) == np.argmax(y_test, 1))/y_test.shape[0])
img = combine_images(np.concatenate([x_test[:50],x_recon[:50]]))
image = img * 255
Image.fromarray(image.astype(np.uint8)).save(args.save_dir + "/real_and_recon.png")
print()
print('Reconstructed images are saved to %s/real_and_recon.png' % args.save_dir)
print('-' * 30 + 'End: test' + '-' * 30)
plt.imshow(plt.imread(args.save_dir + "/real_and_recon.png"))
plt.show()
def manipulate_latent(model, data, args):
print('-'*30 + 'Begin: manipulate' + '-'*30)
x_test, y_test = data
index = np.argmax(y_test, 1) == args.digit
number = np.random.randint(low=0, high=sum(index) - 1)
x, y = x_test[index][number], y_test[index][number]
x, y = np.expand_dims(x, 0), np.expand_dims(y, 0)
noise = np.zeros([1, 10, 16])
x_recons = []
for dim in range(16):
for r in [-0.25, -0.2, -0.15, -0.1, -0.05, 0, 0.05, 0.1, 0.15, 0.2, 0.25]:
tmp = np.copy(noise)
tmp[:,:,dim] = r
x_recon = model.predict([x, y, tmp])
x_recons.append(x_recon)
x_recons = np.concatenate(x_recons)
img = combine_images(x_recons, height=16)
image = img*255
Image.fromarray(image.astype(np.uint8)).save(args.save_dir + '/manipulate-%d.png' % args.digit)
print('manipulated result saved to %s/manipulate-%d.png' % (args.save_dir, args.digit))
print('-' * 30 + 'End: manipulate' + '-' * 30)
def load_mnist():
# the data, shuffled and split between train and test sets
from keras.datasets import mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(-1, 28, 28, 1).astype('float32') / 255.
x_test = x_test.reshape(-1, 28, 28, 1).astype('float32') / 255.
y_train = to_categorical(y_train.astype('float32'))
y_test = to_categorical(y_test.astype('float32'))
return (x_train, y_train), (x_test, y_test)
if __name__ == "__main__":
import os
import argparse
from keras.preprocessing.image import ImageDataGenerator
from keras import callbacks
# setting the hyper parameters
parser = argparse.ArgumentParser(description="Capsule Network on MNIST.")
parser.add_argument('--epochs', default=50, type=int)
parser.add_argument('--batch_size', default=100, type=int)
parser.add_argument('--lr', default=0.001, type=float,
help="Initial learning rate")
parser.add_argument('--lr_decay', default=0.9, type=float,
help="The value multiplied by lr at each epoch. Set a larger value for larger epochs")
parser.add_argument('--lam_recon', default=0.392, type=float,
help="The coefficient for the loss of decoder")
parser.add_argument('-r', '--routings', default=3, type=int,
help="Number of iterations used in routing algorithm. should > 0")
parser.add_argument('--shift_fraction', default=0.1, type=float,
help="Fraction of pixels to shift at most in each direction.")
parser.add_argument('--debug', action='store_true',
help="Save weights by TensorBoard")
parser.add_argument('--save_dir', default='./result')
parser.add_argument('-t', '--testing', action='store_true',
help="Test the trained model on testing dataset")
parser.add_argument('--digit', default=5, type=int,
help="Digit to manipulate")
parser.add_argument('-w', '--weights', default=None,
help="The path of the saved weights. Should be specified when testing")
args = parser.parse_args()
print(args)
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
# load data
(x_train, y_train), (x_test, y_test) = load_mnist()
# define model
model, eval_model, manipulate_model = CapsNet(input_shape=x_train.shape[1:],
n_class=len(np.unique(np.argmax(y_train, 1))),
routings=args.routings)
model.summary()
# train or test
if args.weights is not None: # init the model weights with provided one
model.load_weights(args.weights)
if not args.testing:
train(model=model, data=((x_train, y_train), (x_test, y_test)), args=args)
else: # as long as weights are given, will run testing
if args.weights is None:
print('No weights are provided. Will test using random initialized weights.')
manipulate_latent(manipulate_model, (x_test, y_test), args)
test(model=eval_model, data=(x_test, y_test), args=args)
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.hashers import make_password
from django.contrib.sites.models import Site
from django.core.urlresolvers import clear_url_caches
from django.db import IntegrityError
from django.test import RequestFactory, TestCase
from django.utils.translation import override
from cms import api
from cms.apphook_pool import apphook_pool
from cms.appresolver import clear_app_resolvers
from cms.exceptions import AppAlreadyRegistered
from cms.models import Title
from cms.test_utils.testcases import BaseCMSTestCase
from cms.utils import get_cms_setting
from cms.utils.i18n import get_language_list, force_language
from djangocms_helper.utils import create_user
from ..models import Work
from allink_core.allink_categories.models import AllinkCategory
APP_MODULE = 'work.cms_apps'
DEFAULT_WORK_NAMESPACE = 'work'
class CleanUpMixin(object):
def tearDown(self):
self.reset_all()
super(CleanUpMixin, self).tearDown()
def reset_all(self):
"""
Reset all that could leak from previous test to current/next test.
:return: None
"""
self.delete_app_module()
self.reload_urls()
self.apphook_clear()
def delete_app_module(self):
"""
Remove APP_MODULE from sys.modules. Taken from cms.
:return: None
"""
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
def apphook_clear(self):
"""
Clean up apphook_pool and sys.modules. Taken from cms with slight
adjustments and fixes.
:return: None
"""
try:
apphooks = apphook_pool.get_apphooks()
except AppAlreadyRegistered:
# there is an issue with discover apps, or i'm using it wrong.
# setting discovered to True solves it. Maybe that is due to import
# from aldryn_events.cms_apps which registers EventListAppHook
apphook_pool.discovered = True
apphooks = apphook_pool.get_apphooks()
for name, label in list(apphooks):
if apphook_pool.apps[name].__class__.__module__ in sys.modules:
del sys.modules[apphook_pool.apps[name].__class__.__module__]
apphook_pool.clear()
def reload_urls(self):
"""
Clean up url related things (caches, app resolvers, modules).
Taken from cms.
:return: None
"""
clear_app_resolvers()
clear_url_caches()
url_modules = [
'cms.urls',
settings.ROOT_URLCONF
]
for module in url_modules:
if module in sys.modules:
del sys.modules[module]
class DefaultApphookMixin(object):
"""
Creates the default app hook page for work. Relyes on
BaseWorkTest.setUp method and its utilities.
"""
def setUp(self):
super(DefaultApphookMixin, self).setUp()
self.app_hook_page = self.create_apphook_page(multilang=True)
class DefaultSetupMixin(object):
su_username = 'user'
su_password = 'pass'
data = {
'work1': {
'en': {'title': 'work1', 'slug': 'work1',
'lead': 'text1'},
'de': {'title': 'work2', 'slug': 'work2',
'lead': 'text2'},
},
'work2': {
# This should *not* have a EN translation
'de': {'title': 'work2-de', 'slug': 'work2-de',
'lead': 'text2-de'},
},
# this category is only supposed to be used in work model
'category1': {'name': 'Test Category 1', 'slug': 'test-category-1',
'model_names': ['work'],
},
# this category is supposed to be used in work model and the people model
'category2': {'name': 'Test Category 2', 'slug': 'test-category-2',
'model_names': ['work', 'people'],
},
# this category is not supposed to be used in work
'category3': {'name': 'Test Category 3', 'slug': 'test-category-3',
'model_names': ['work', 'people'],
},
}
def setUp(self):
self.template = get_cms_setting('TEMPLATES')[0][0]
self.language = settings.LANGUAGES[0][0]
self.page = api.create_page(
'page one en', self.template, self.language, published=True)
api.create_title('de', 'page de', self.page)
self.page.publish('de')
self.placeholder = self.page.placeholders.all()[0]
self.superuser = self.create_superuser()
super(DefaultSetupMixin, self).setUp()
def create_superuser(self):
return User.objects.create_superuser(
self.su_username, 'email@example.com', self.su_password)
def create_user(self, user_name, user_password, is_staff=False,
is_superuser=False):
return User.objects.create(
username=user_name,
first_name='{0} first_name'.format(user_name),
last_name='{0} last_name'.format(user_name),
password=make_password(user_password),
is_staff=is_staff,
is_superuser=is_superuser
)
def create_apphook_page(self, multilang=False):
with force_language('en'):
page = api.create_page(
title='Work en', template=self.template, language='en',
published=True,
parent=self.page,
apphook='WorkApphook',
apphook_namespace=DEFAULT_WORK_NAMESPACE,
)
page.publish('en')
if multilang:
api.create_title('de', 'Work de', page)
page.publish('de')
return page.reload()
class BaseWorkTest(DefaultSetupMixin,
CleanUpMixin,
BaseCMSTestCase,
TestCase):
@staticmethod
def reload(obj, language=None):
"""Simple convenience method for re-fetching an object from the ORM,
optionally "as" a specified language."""
try:
new_obj = obj.__class__.objects.language(language).get(id=obj.id)
except:
new_obj = obj.__class__.objects.get(id=obj.id)
return new_obj
def assertEqualItems(self, a, b):
try:
# In Python3, this method has been renamed (poorly)
return self.assertCountEqual(a, b)
except:
# In 2.6, assertItemsEqual() doesn't sort first
return self.assertItemsEqual(sorted(a), sorted(b))
def mktranslation(self, obj, lang, **kwargs):
"""Simple method of adding a translation to an existing object."""
try:
obj.set_current_language(lang)
except:
try:
obj.translate(lang)
except IntegrityError:
pass
for k, v in kwargs.items():
setattr(obj, k, v)
obj.save()
def setUp(self):
super(BaseWorkTest, self).setUp()
with override('en'):
self.work1 = Work(**self.data['work1']['en'])
self.work1.title = 'work1'
self.work1.slug = 'work1-slug'
self.work1.save()
# Create categories
self.rootcategory1 = AllinkCategory.add_root(name='root 1')
self.rootcategory1.model_names = self.data['category1']['model_names']
self.rootcategory1.save()
self.category1 = self.rootcategory1.add_child(name=self.data['category1']['name'])
self.category1.save()
self.rootcategory2 = AllinkCategory.add_root(name='root 2')
self.rootcategory2.model_names = self.data['category2']['model_names']
self.rootcategory2.save()
self.category2 = self.rootcategory2.add_child(name=self.data['category2']['name'])
self.category2.save()
self.category3 = self.rootcategory2.add_child(name=self.data['category3']['name'])
self.category3.save()
# Add a DE translation for work1
self.mktranslation(self.work1, 'de', **self.data['work1']['de'])
# Make work2
with override('de'):
self.work2 = Work(**self.data['work2']['de'])
self.work2.save()
def set_default_work_objects_current_language(self, language):
"""
Make sure parler active language is set to language.
:param language: language_code
:return: None
"""
self.work1.set_current_language(language)
self.work2.set_current_language(language)
class CMSRequestBasedTest(CleanUpMixin, TestCase):
"""Sets-up User(s) and CMS Pages for testing."""
languages = get_language_list()
@classmethod
def setUpClass(cls):
cls.request_factory = RequestFactory()
cls.user = create_user('normal', 'normal@admin.com', 'normal')
cls.site1 = Site.objects.get(pk=1)
@classmethod
def tearDownClass(cls):
User.objects.all().delete()
def get_or_create_page(self, base_title=None, languages=None):
"""Creates a page with a given title, or, if it already exists, just
retrieves and returns it."""
from cms.api import create_page, create_title
if not base_title:
# No title? Create one.
base_title = self.rand_str(prefix="page", length=8)
if not languages:
# If no langs supplied, use'em all
languages = self.languages
# If there is already a page with this title, just return it.
try:
page_title = Title.objects.get(title=base_title)
return page_title.page.get_draft_object()
except:
pass
# No? Okay, create one.
page = create_page(base_title, 'fullwidth.html', language=languages[0])
# If there are multiple languages, create the translations
if len(languages) > 1:
for lang in languages[1:]:
title_lang = "{0}-{1}".format(base_title, lang)
create_title(language=lang, title=title_lang, page=page)
page.publish(lang)
return page.get_draft_object()
def get_page_request(
self, page, user, path=None, edit=False, lang_code='en'):
from cms.middleware.toolbar import ToolbarMiddleware
path = path or page and page.get_absolute_url()
if edit:
path += '?edit'
request = RequestFactory().get(path)
request.session = {}
request.user = user
request.LANGUAGE_CODE = lang_code
if edit:
request.GET = {'edit': None}
else:
request.GET = {'edit_off': None}
request.current_page = page
mid = ToolbarMiddleware()
mid.process_request(request)
return request
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import mxnet as mx
from mxnet.test_utils import *
from mxnet.base import MXNetError
import numpy as np
import os
import gzip
import pickle as pickle
import time
try:
import h5py
except ImportError:
h5py = None
import sys
from common import assertRaises
import unittest
def test_MNISTIter():
# prepare data
get_mnist_ubyte()
batch_size = 100
train_dataiter = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
label="data/train-labels-idx1-ubyte",
data_shape=(784,),
batch_size=batch_size, shuffle=1, flat=1, silent=0, seed=10)
# test_loop
nbatch = 60000 / batch_size
batch_count = 0
for batch in train_dataiter:
batch_count += 1
assert(nbatch == batch_count)
# test_reset
train_dataiter.reset()
train_dataiter.iter_next()
label_0 = train_dataiter.getlabel().asnumpy().flatten()
train_dataiter.iter_next()
train_dataiter.iter_next()
train_dataiter.iter_next()
train_dataiter.iter_next()
train_dataiter.reset()
train_dataiter.iter_next()
label_1 = train_dataiter.getlabel().asnumpy().flatten()
assert(sum(label_0 - label_1) == 0)
def test_Cifar10Rec():
get_cifar10()
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
rand_crop=False,
and_mirror=False,
shuffle=False,
data_shape=(3, 28, 28),
batch_size=100,
preprocess_threads=4,
prefetch_buffer=1)
labelcount = [0 for i in range(10)]
batchcount = 0
for batch in dataiter:
npdata = batch.data[0].asnumpy().flatten().sum()
sys.stdout.flush()
batchcount += 1
nplabel = batch.label[0].asnumpy()
for i in range(nplabel.shape[0]):
labelcount[int(nplabel[i])] += 1
for i in range(10):
assert(labelcount[i] == 5000)
def test_NDArrayIter():
data = np.ones([1000, 2, 2])
label = np.ones([1000, 1])
for i in range(1000):
data[i] = i / 100
label[i] = i / 100
dataiter = mx.io.NDArrayIter(
data, label, 128, True, last_batch_handle='pad')
batchidx = 0
for batch in dataiter:
batchidx += 1
assert(batchidx == 8)
dataiter = mx.io.NDArrayIter(
data, label, 128, False, last_batch_handle='pad')
batchidx = 0
labelcount = [0 for i in range(10)]
for batch in dataiter:
label = batch.label[0].asnumpy().flatten()
assert((batch.data[0].asnumpy()[:, 0, 0] == label).all())
for i in range(label.shape[0]):
labelcount[int(label[i])] += 1
for i in range(10):
if i == 0:
assert(labelcount[i] == 124)
else:
assert(labelcount[i] == 100)
def test_NDArrayIter_h5py():
if not h5py:
return
data = np.ones([1000, 2, 2])
label = np.ones([1000, 1])
for i in range(1000):
data[i] = i / 100
label[i] = i / 100
try:
os.remove("ndarraytest.h5")
except OSError:
pass
with h5py.File("ndarraytest.h5") as f:
f.create_dataset("data", data=data)
f.create_dataset("label", data=label)
dataiter = mx.io.NDArrayIter(
f["data"], f["label"], 128, True, last_batch_handle='pad')
batchidx = 0
for batch in dataiter:
batchidx += 1
assert(batchidx == 8)
dataiter = mx.io.NDArrayIter(
f["data"], f["label"], 128, False, last_batch_handle='pad')
labelcount = [0 for i in range(10)]
for batch in dataiter:
label = batch.label[0].asnumpy().flatten()
assert((batch.data[0].asnumpy()[:, 0, 0] == label).all())
for i in range(label.shape[0]):
labelcount[int(label[i])] += 1
try:
os.remove("ndarraytest.h5")
except OSError:
pass
for i in range(10):
if i == 0:
assert(labelcount[i] == 124)
else:
assert(labelcount[i] == 100)
def test_NDArrayIter_csr():
# creating toy data
num_rows = rnd.randint(5, 15)
num_cols = rnd.randint(1, 20)
batch_size = rnd.randint(1, num_rows)
shape = (num_rows, num_cols)
csr, _ = rand_sparse_ndarray(shape, 'csr')
dns = csr.asnumpy()
# CSRNDArray or scipy.sparse.csr_matrix with last_batch_handle not equal to 'discard' will throw NotImplementedError
assertRaises(NotImplementedError, mx.io.NDArrayIter,
{'data': csr}, dns, batch_size)
try:
import scipy.sparse as spsp
train_data = spsp.csr_matrix(dns)
assertRaises(NotImplementedError, mx.io.NDArrayIter,
{'data': train_data}, dns, batch_size)
except ImportError:
pass
# CSRNDArray with shuffle
csr_iter = iter(mx.io.NDArrayIter({'csr_data': csr, 'dns_data': dns}, dns, batch_size,
shuffle=True, last_batch_handle='discard'))
num_batch = 0
for batch in csr_iter:
num_batch += 1
assert(num_batch == num_rows // batch_size)
# make iterators
csr_iter = iter(mx.io.NDArrayIter(
csr, csr, batch_size, last_batch_handle='discard'))
begin = 0
for batch in csr_iter:
expected = np.zeros((batch_size, num_cols))
end = begin + batch_size
expected[:num_rows - begin] = dns[begin:end]
if end > num_rows:
expected[num_rows - begin:] = dns[0:end - num_rows]
assert_almost_equal(batch.data[0].asnumpy(), expected)
begin += batch_size
def test_LibSVMIter():
def check_libSVMIter_synthetic():
cwd = os.getcwd()
data_path = os.path.join(cwd, 'data.t')
label_path = os.path.join(cwd, 'label.t')
with open(data_path, 'w') as fout:
fout.write('1.0 0:0.5 2:1.2\n')
fout.write('-2.0\n')
fout.write('-3.0 0:0.6 1:2.4 2:1.2\n')
fout.write('4 2:-1.2\n')
with open(label_path, 'w') as fout:
fout.write('1.0\n')
fout.write('-2.0 0:0.125\n')
fout.write('-3.0 2:1.2\n')
fout.write('4 1:1.0 2:-1.2\n')
data_dir = os.path.join(cwd, 'data')
data_train = mx.io.LibSVMIter(data_libsvm=data_path, label_libsvm=label_path,
data_shape=(3, ), label_shape=(3, ), batch_size=3)
first = mx.nd.array([[0.5, 0., 1.2], [0., 0., 0.], [0.6, 2.4, 1.2]])
second = mx.nd.array([[0., 0., -1.2], [0.5, 0., 1.2], [0., 0., 0.]])
i = 0
for batch in iter(data_train):
expected = first.asnumpy() if i == 0 else second.asnumpy()
data = data_train.getdata()
data.check_format(True)
assert_almost_equal(data.asnumpy(), expected)
i += 1
def check_libSVMIter_news_data():
news_metadata = {
'name': 'news20.t',
'origin_name': 'news20.t.bz2',
'url': "https://apache-mxnet.s3-accelerate.dualstack.amazonaws.com/gluon/dataset/news20.t.bz2",
'feature_dim': 62060 + 1,
'num_classes': 20,
'num_examples': 3993,
}
batch_size = 33
num_examples = news_metadata['num_examples']
data_dir = os.path.join(os.getcwd(), 'data')
get_bz2_data(data_dir, news_metadata['name'], news_metadata['url'],
news_metadata['origin_name'])
path = os.path.join(data_dir, news_metadata['name'])
data_train = mx.io.LibSVMIter(data_libsvm=path, data_shape=(news_metadata['feature_dim'],),
batch_size=batch_size)
for epoch in range(2):
num_batches = 0
for batch in data_train:
# check the range of labels
data = batch.data[0]
label = batch.label[0]
data.check_format(True)
assert(np.sum(label.asnumpy() > 20) == 0)
assert(np.sum(label.asnumpy() <= 0) == 0)
num_batches += 1
expected_num_batches = num_examples / batch_size
assert(num_batches == int(expected_num_batches)), num_batches
data_train.reset()
def check_libSVMIter_exception():
cwd = os.getcwd()
data_path = os.path.join(cwd, 'data.t')
label_path = os.path.join(cwd, 'label.t')
with open(data_path, 'w') as fout:
fout.write('1.0 0:0.5 2:1.2\n')
fout.write('-2.0\n')
# Below line has a neg indice. Should throw an exception
fout.write('-3.0 -1:0.6 1:2.4 2:1.2\n')
fout.write('4 2:-1.2\n')
with open(label_path, 'w') as fout:
fout.write('1.0\n')
fout.write('-2.0 0:0.125\n')
fout.write('-3.0 2:1.2\n')
fout.write('4 1:1.0 2:-1.2\n')
data_dir = os.path.join(cwd, 'data')
data_train = mx.io.LibSVMIter(data_libsvm=data_path, label_libsvm=label_path,
data_shape=(3, ), label_shape=(3, ), batch_size=3)
for batch in iter(data_train):
data_train.get_data().asnumpy()
check_libSVMIter_synthetic()
check_libSVMIter_news_data()
assertRaises(MXNetError, check_libSVMIter_exception)
def test_DataBatch():
from nose.tools import ok_
from mxnet.io import DataBatch
import re
batch = DataBatch(data=[mx.nd.ones((2, 3))])
ok_(re.match(
'DataBatch: data shapes: \[\(2L?, 3L?\)\] label shapes: None', str(batch)))
batch = DataBatch(data=[mx.nd.ones((2, 3)), mx.nd.ones(
(7, 8))], label=[mx.nd.ones((4, 5))])
ok_(re.match(
'DataBatch: data shapes: \[\(2L?, 3L?\), \(7L?, 8L?\)\] label shapes: \[\(4L?, 5L?\)\]', str(batch)))
def test_CSVIter():
def check_CSVIter_synthetic(dtype='float32'):
cwd = os.getcwd()
data_path = os.path.join(cwd, 'data.t')
label_path = os.path.join(cwd, 'label.t')
entry_str = '1'
if dtype is 'int32':
entry_str = '200000001'
if dtype is 'int64':
entry_str = '2147483648'
with open(data_path, 'w') as fout:
for i in range(1000):
fout.write(','.join([entry_str for _ in range(8*8)]) + '\n')
with open(label_path, 'w') as fout:
for i in range(1000):
fout.write('0\n')
data_train = mx.io.CSVIter(data_csv=data_path, data_shape=(8, 8),
label_csv=label_path, batch_size=100, dtype=dtype)
expected = mx.nd.ones((100, 8, 8), dtype=dtype) * int(entry_str)
for batch in iter(data_train):
data_batch = data_train.getdata()
assert_almost_equal(data_batch.asnumpy(), expected.asnumpy())
assert data_batch.asnumpy().dtype == expected.asnumpy().dtype
for dtype in ['int32', 'int64', 'float32']:
check_CSVIter_synthetic(dtype=dtype)
@unittest.skip("Flaky test: https://github.com/apache/incubator-mxnet/issues/11359")
def test_ImageRecordIter_seed_augmentation():
get_cifar10()
seed_aug = 3
# check whether to get constant images after fixing seed_aug
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
shuffle=False,
data_shape=(3, 28, 28),
batch_size=3,
rand_crop=True,
rand_mirror=True,
max_random_scale=1.3,
max_random_illumination=3,
max_rotate_angle=10,
random_l=50,
random_s=40,
random_h=10,
max_shear_ratio=2,
seed_aug=seed_aug)
batch = dataiter.next()
data = batch.data[0].asnumpy().astype(np.uint8)
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
shuffle=False,
data_shape=(3, 28, 28),
batch_size=3,
rand_crop=True,
rand_mirror=True,
max_random_scale=1.3,
max_random_illumination=3,
max_rotate_angle=10,
random_l=50,
random_s=40,
random_h=10,
max_shear_ratio=2,
seed_aug=seed_aug)
batch = dataiter.next()
data2 = batch.data[0].asnumpy().astype(np.uint8)
assert(np.array_equal(data,data2))
# check whether to get different images after change seed_aug
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
shuffle=False,
data_shape=(3, 28, 28),
batch_size=3,
rand_crop=True,
rand_mirror=True,
max_random_scale=1.3,
max_random_illumination=3,
max_rotate_angle=10,
random_l=50,
random_s=40,
random_h=10,
max_shear_ratio=2,
seed_aug=seed_aug+1)
batch = dataiter.next()
data2 = batch.data[0].asnumpy().astype(np.uint8)
assert(not np.array_equal(data,data2))
# check whether seed_aug changes the iterator behavior
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
shuffle=False,
data_shape=(3, 28, 28),
batch_size=3,
seed_aug=seed_aug)
batch = dataiter.next()
data = batch.data[0].asnumpy().astype(np.uint8)
dataiter = mx.io.ImageRecordIter(
path_imgrec="data/cifar/train.rec",
mean_img="data/cifar/cifar10_mean.bin",
shuffle=False,
data_shape=(3, 28, 28),
batch_size=3,
seed_aug=seed_aug)
batch = dataiter.next()
data2 = batch.data[0].asnumpy().astype(np.uint8)
assert(np.array_equal(data,data2))
if __name__ == "__main__":
test_NDArrayIter()
if h5py:
test_NDArrayIter_h5py()
test_MNISTIter()
test_Cifar10Rec()
test_LibSVMIter()
test_NDArrayIter_csr()
test_CSVIter()
test_ImageRecordIter_seed_augmentation()
|
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUVNFInterfaceDescriptorsFetcher
from bambou import NURESTObject
class NUVNFDescriptor(NURESTObject):
""" Represents a VNFDescriptor in the VSD
Notes:
The behavioral and deployment information of a VNF is defined in the VNF descriptor template. The template is based on the libvirt domain XML and is on-boarded in a VNF catalog. The resource requirements for CPU, memory and storage are defined in this screen and the rest of the template is inherited from the VNF Metadata object.
"""
__rest_name__ = "vnfdescriptor"
__resource_name__ = "vnfdescriptors"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_TYPE_FIREWALL = "FIREWALL"
CONST_TYPE_WAN_OPT = "WAN_OPT"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a VNFDescriptor instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> vnfdescriptor = NUVNFDescriptor(id=u'xxxx-xxx-xxx-xxx', name=u'VNFDescriptor')
>>> vnfdescriptor = NUVNFDescriptor(data=my_dict)
"""
super(NUVNFDescriptor, self).__init__()
# Read/Write Attributes
self._cpu_count = None
self._name = None
self._memory_mb = None
self._vendor = None
self._description = None
self._metadata_id = None
self._visible = None
self._entity_scope = None
self._associated_vnf_threshold_policy_id = None
self._storage_gb = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="cpu_count", remote_name="CPUCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="memory_mb", remote_name="memoryMB", attribute_type=int, is_required=True, is_unique=False)
self.expose_attribute(local_name="vendor", remote_name="vendor", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="metadata_id", remote_name="metadataID", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="visible", remote_name="visible", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="associated_vnf_threshold_policy_id", remote_name="associatedVNFThresholdPolicyID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="storage_gb", remote_name="storageGB", attribute_type=int, is_required=True, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False, choices=[u'FIREWALL', u'WAN_OPT'])
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.vnf_interface_descriptors = NUVNFInterfaceDescriptorsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def cpu_count(self):
""" Get cpu_count value.
Notes:
Number of CPUs to be allocated VNF instance when deployed
This attribute is named `CPUCount` in VSD API.
"""
return self._cpu_count
@cpu_count.setter
def cpu_count(self, value):
""" Set cpu_count value.
Notes:
Number of CPUs to be allocated VNF instance when deployed
This attribute is named `CPUCount` in VSD API.
"""
self._cpu_count = value
@property
def name(self):
""" Get name value.
Notes:
Name of the VNF Descriptor
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
Name of the VNF Descriptor
"""
self._name = value
@property
def memory_mb(self):
""" Get memory_mb value.
Notes:
Memory (in MB) to be allocated for VNF instance when deployed
This attribute is named `memoryMB` in VSD API.
"""
return self._memory_mb
@memory_mb.setter
def memory_mb(self, value):
""" Set memory_mb value.
Notes:
Memory (in MB) to be allocated for VNF instance when deployed
This attribute is named `memoryMB` in VSD API.
"""
self._memory_mb = value
@property
def vendor(self):
""" Get vendor value.
Notes:
The vendor generating this VNF Descriptor
"""
return self._vendor
@vendor.setter
def vendor(self, value):
""" Set vendor value.
Notes:
The vendor generating this VNF Descriptor
"""
self._vendor = value
@property
def description(self):
""" Get description value.
Notes:
A description of the VNF Descriptor
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
A description of the VNF Descriptor
"""
self._description = value
@property
def metadata_id(self):
""" Get metadata_id value.
Notes:
Id of referenced Metadata Object
This attribute is named `metadataID` in VSD API.
"""
return self._metadata_id
@metadata_id.setter
def metadata_id(self, value):
""" Set metadata_id value.
Notes:
Id of referenced Metadata Object
This attribute is named `metadataID` in VSD API.
"""
self._metadata_id = value
@property
def visible(self):
""" Get visible value.
Notes:
Controls if descriptor visible in catalog to create new VNF
"""
return self._visible
@visible.setter
def visible(self, value):
""" Set visible value.
Notes:
Controls if descriptor visible in catalog to create new VNF
"""
self._visible = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def associated_vnf_threshold_policy_id(self):
""" Get associated_vnf_threshold_policy_id value.
Notes:
The Id of referenced VNF threshold policy
This attribute is named `associatedVNFThresholdPolicyID` in VSD API.
"""
return self._associated_vnf_threshold_policy_id
@associated_vnf_threshold_policy_id.setter
def associated_vnf_threshold_policy_id(self, value):
""" Set associated_vnf_threshold_policy_id value.
Notes:
The Id of referenced VNF threshold policy
This attribute is named `associatedVNFThresholdPolicyID` in VSD API.
"""
self._associated_vnf_threshold_policy_id = value
@property
def storage_gb(self):
""" Get storage_gb value.
Notes:
Disk storage (in GB) to be allocated VNF instance when deployed
This attribute is named `storageGB` in VSD API.
"""
return self._storage_gb
@storage_gb.setter
def storage_gb(self, value):
""" Set storage_gb value.
Notes:
Disk storage (in GB) to be allocated VNF instance when deployed
This attribute is named `storageGB` in VSD API.
"""
self._storage_gb = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def type(self):
""" Get type value.
Notes:
Type of virtual network function
"""
return self._type
@type.setter
def type(self, value):
""" Set type value.
Notes:
Type of virtual network function
"""
self._type = value
|
|
######################################################################
#
# Copyright (C) 2013
# Associated Universities, Inc. Washington DC, USA,
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
# License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
#
# Correspondence concerning VLA Pipelines should be addressed as follows:
# Please register and submit helpdesk tickets via: https://help.nrao.edu
# Postal address:
# National Radio Astronomy Observatory
# VLA Pipeline Support Office
# PO Box O
# Socorro, NM, USA
#
######################################################################
# MAKE FINAL GAIN CALIBRATION TABLES
logprint ("Starting EVLA_pipe_finalcals.py", logfileout='logs/finalcals.log')
time_list=runtiming('finalcals', 'start')
QA2_finalcals='Pass'
# Find reference antenna again in case there has been more flagging
refantspw=''
refantfield=calibrator_field_select_string
findrefant=RefAntHeuristics(vis=ms_active,field=refantfield,geometry=True,flagging=True)
RefAntOutput=findrefant.calculate()
refAnt=str(RefAntOutput[0])+','+str(RefAntOutput[1])+','+str(RefAntOutput[2])+','+str(RefAntOutput[3])
logprint ("The pipeline will use antenna(s) "+refAnt+" as the reference", logfileout='logs/finalcals.log')
# Initial phase solutions on delay calibrator
syscommand='rm -rf finaldelayinitialgain.g'
os.system(syscommand)
if (cal3C84_d == True):
default('gaincal')
vis=ms_active
caltable='finaldelayinitialgain.g'
field=delay_field_select_string
spw=tst_delay_spw
intent=''
selectdata=True
uvrange=uvrange3C84
scan=delay_scan_select_string
solint='int'
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
gaintable=priorcals
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
else:
default('gaincal')
vis=ms_active
caltable='finaldelayinitialgain.g'
field=delay_field_select_string
spw=tst_delay_spw
intent=''
selectdata=True
uvrange=''
scan=delay_scan_select_string
solint='int'
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
gaintable=priorcals
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
syscommand='rm -rf finaldelay.k'
os.system(syscommand)
GainTables=copy.copy(priorcals)
GainTables.append('finaldelayinitialgain.g')
if (cal3C84_d == True):
default('gaincal')
vis=ms_active
caltable='finaldelay.k'
field=''
spw=''
intent=''
selectdata=True
uvrange=uvrange3C84
scan=delay_scan_select_string
solint='inf'
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='K'
smodel=[]
calmode='p'
append=False
gaintable=GainTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
else:
default('gaincal')
vis=ms_active
caltable='finaldelay.k'
field=''
spw=''
intent=''
selectdata=True
uvrange=''
scan=delay_scan_select_string
solint='inf'
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='K'
smodel=[]
calmode='p'
append=False
gaintable=GainTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
logprint ("Delay calibration complete", logfileout='logs/finalcals.log')
syscommand='rm -rf finalBPinitialgain.g'
os.system(syscommand)
GainTables=copy.copy(priorcals)
GainTables.append('finaldelay.k')
if (cal3C84_bp == True):
default('gaincal')
vis=ms_active
caltable='finalBPinitialgain.g'
field=''
spw=tst_bpass_spw
selectdata=True
uvrange=uvrange3C84
scan=bandpass_scan_select_string
solint=gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
gaintable=GainTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
else:
default('gaincal')
vis=ms_active
caltable='finalBPinitialgain.g'
field=''
spw=tst_bpass_spw
selectdata=True
uvrange=''
scan=bandpass_scan_select_string
solint=gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
gaintable=GainTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
logprint ("Initial BP gain calibration complete", logfileout='logs/finalcals.log')
syscommand='rm -rf finalBPcal.b'
os.system(syscommand)
BPGainTables=copy.copy(priorcals)
BPGainTables.append('finaldelay.k')
BPGainTables.append('finalBPinitialgain.g')
if (cal3C84_bp == True):
default('bandpass')
vis=ms_active
caltable='finalBPcal.b'
field=bandpass_field_select_string
spw=''
selectdata=True
uvrange=uvrange3C84
scan=bandpass_scan_select_string
solint='inf'
combine='scan'
refant=refAnt
minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
bandtype='B'
fillgaps=0
append=False
gaintable=BPGainTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
bandpass()
else:
default('bandpass')
vis=ms_active
caltable='finalBPcal.b'
field=bandpass_field_select_string
spw=''
selectdata=True
uvrange=''
scan=bandpass_scan_select_string
solint='inf'
combine='scan'
refant=refAnt
minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
bandtype='B'
fillgaps=0
append=False
gaintable=BPGainTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
bandpass()
logprint ("Bandpass calibration complete", logfileout='logs/finalcals.log')
AllCalTables=copy.copy(priorcals)
AllCalTables.append('finaldelay.k')
AllCalTables.append('finalBPcal.b')
#Derive an average phase solution for the bandpass calibrator to apply
#to all data to make QA plots easier to interpret.
default('gaincal')
vis=ms_active
caltable='averagephasegain.g'
field=bandpass_field_select_string
spw=''
selectdata=True
uvrange=''
scan=bandpass_scan_select_string
solint='inf'
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=1.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
gaintable=AllCalTables
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
#In case any antenna is flagged by this process, unflag all solutions
#in this gain table (if an antenna does exist or has bad solutions from
#other steps, it will be flagged by those gain tables).
default('flagdata')
vis='averagephasegain.g'
mode='unflag'
action='apply'
flagbackup=False
savepars=False
async=False
flagdata()
AllCalTables=copy.copy(priorcals)
AllCalTables.append('finaldelay.k')
AllCalTables.append('finalBPcal.b')
AllCalTables.append('averagephasegain.g')
default('applycal')
vis=ms_active
field=''
spw=''
selectdata=True
scan=calibrator_scan_select_string
gaintable=AllCalTables
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
calwt=False
flagbackup=False
async=False
applycal()
syscommand='rm -rf calibrators.ms'
os.system(syscommand)
default('split')
vis=ms_active
outputvis='calibrators.ms'
datacolumn='corrected'
field=''
spw=''
width=int(max(channels))
antenna=''
timebin='0s'
timerange=''
scan=calibrator_scan_select_string
intent=''
array=''
uvrange=''
correlation=''
observation=''
keepflags=False
async=False
split()
tb.open('calibrators.ms')
positions = []
for ii in range(0,len(field_positions[0][0])):
positions.append([field_positions[0][0][ii], field_positions[1][0][ii]])
standard_source_names = [ '3C48', '3C138', '3C147', '3C286' ]
standard_source_fields = find_standards(positions)
ii=0
for fields in standard_source_fields:
for myfield in fields:
spws = field_spws[myfield]
for myspw in spws:
reference_frequency = center_frequencies[myspw]
EVLA_band = find_EVLA_band(reference_frequency)
logprint ("Center freq for spw "+str(myspw)+" = "+str(reference_frequency)+", observing band = "+EVLA_band, logfileout='logs/calprep.log')
model_image = standard_source_names[ii]+'_'+EVLA_band+'.im'
logprint ("Setting model for field "+str(myfield)+" spw "+str(myspw)+" using "+model_image, logfileout='logs/calprep.log')
default('setjy')
vis='calibrators.ms'
field=str(myfield)
spw=str(myspw)
selectdata=False
modimage=model_image
listmodimages=False
scalebychan=True
fluxdensity=-1
standard='Perley-Butler 2010'
usescratch=False
async=False
setjy()
ii=ii+1
tb.close()
fitfunc = lambda p, x: p[0] + p[1] * x
errfunc = lambda p, x, y, err: (y - fitfunc(p, x)) / err
try:
ff = open(fluxscale_output, 'r')
except IOError as err:
logprint (fluxscale_output+" doesn't exist, error: "+err.filename, logfileout='logs/finalcals.log')
# looking for lines like:
#2012-03-09 21:30:23 INFO fluxscale:::: Flux density for J1717-3342 in SpW=3 is: 1.94158 +/- 0.0123058 (SNR = 157.777, N= 34)
# sometimes they look like:
#2012-03-09 21:30:23 INFO fluxscale:::: Flux density for J1717-3342 in SpW=0 is: INSUFFICIENT DATA
# so watch for that.
sources = []
flux_densities = []
spws = []
for line in ff:
if 'Flux density for' in line:
fields = line[:-1].split()
if (fields[11] != 'INSUFFICIENT'):
sources.append(fields[7])
flux_densities.append([float(fields[11]), float(fields[13])])
spws.append(int(fields[9].split('=')[1]))
ii = 0
unique_sources = list(np.unique(sources))
results = []
for source in unique_sources:
indices = []
for ii in range(len(sources)):
if (sources[ii] == source):
indices.append(ii)
bands = []
for ii in range(len(indices)):
bands.append(find_EVLA_band(center_frequencies[spws[indices[ii]]]))
unique_bands = list(np.unique(bands))
for band in unique_bands:
lfreqs = []
lfds = []
lerrs = []
uspws = []
for ii in range(len(indices)):
if find_EVLA_band(center_frequencies[spws[indices[ii]]]) == band:
lfreqs.append(log10(center_frequencies[spws[indices[ii]]]))
lfds.append(log10(flux_densities[indices[ii]][0]))
lerrs.append((flux_densities[indices[ii]][1])/(flux_densities[indices[ii]][0])/2.303)
uspws.append(spws[indices[ii]])
if len(lfds) < 2:
pfinal = [lfds[0], 0.0]
covar = [0.0,0.0]
else:
alfds = scp.array(lfds)
alerrs = scp.array(lerrs)
alfreqs = scp.array(lfreqs)
pinit = [0.0, 0.0]
fit_out = scpo.leastsq(errfunc, pinit, args=(alfreqs, alfds, alerrs), full_output=1)
pfinal = fit_out[0]
covar = fit_out[1]
aa = pfinal[0]
bb = pfinal[1]
reffreq = 10.0**lfreqs[0]/1.0e9
fluxdensity = 10.0**(aa + bb*lfreqs[0])
spix = bb
results.append([ source, uspws, fluxdensity, spix, reffreq ])
logprint(source + ' ' + band + ' fitted spectral index = ' + str(spix), logfileout='logs/finalcals.log')
logprint("Frequency, data, and fitted data:", logfileout='logs/finalcals.log')
for ii in range(len(lfreqs)):
SS = fluxdensity * (10.0**lfreqs[ii]/reffreq/1.0e9)**spix
logprint(' '+str(10.0**lfreqs[ii]/1.0e9)+' '+ str(10.0**lfds[ii])+' '+str(SS), logfileout='logs/finalcals.log')
logprint ("Setting power-law fit in the model column", logfileout='logs/finalcals.log')
for result in results:
for spw_i in result[1]:
logprint('Running setjy on spw '+str(spw_i), logfileout='logs/finalcals.log')
default('setjy')
vis='calibrators.ms'
field = str(result[0])
#spw = ','.join(["%s" % ii for ii in result[1]])
spw = str(spw_i)
selectdata=False
modimage=''
listmodimages=False
scalebychan=True
fluxdensity = [ result[2], 0, 0, 0 ]
spix = result[3]
reffreq = str(result[4])+'GHz'
standard='Perley-Butler 2010'
usescratch=False
async=False
setjy()
# Derive gain tables. Note that gaincurves, opacity corrections and
# antenna position corrections have already been applied during applycal
# and split in above.
# Need to add check for 3C84 in here, when heuristics have been sorted out
default('gaincal')
vis='calibrators.ms'
caltable='phaseshortgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=new_gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
#gaintable=filter(None, [priorcals,'finaldelay.k','finalBPcal.b'])
gaintable=['']
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
default('gaincal')
vis='calibrators.ms'
caltable='finalampgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=gain_solint2
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
gaintype='G'
smodel=[]
calmode='ap'
append=False
#gaintable=filter(None, [priorcals,'finaldelay.k','finalBPcal.b','phaseshortgaincal.g'])
gaintable=['phaseshortgaincal.g']
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
default('gaincal')
vis='calibrators.ms'
caltable='finalphasegaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=gain_solint2
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
#gaintable=filter(None, [priorcals,'finaldelay.k','finalBPcal.b','finalampgaincal.g'])
gaintable=['finalampgaincal.g']
gainfield=['']
interp=['']
spwmap=[]
gaincurve=False
opacity=[]
parang=False
async=False
gaincal()
logprint ("Final calibration tables created", logfileout='logs/finalcals.log')
logprint ("Plotting final calibration tables", logfileout='logs/finalcals.log')
# do some plotting
nplots=int(numAntenna/3)
if ((numAntenna%3)>0):
nplots = nplots + 1
tb.open('finaldelay.k')
fpar = tb.getcol('FPARAM')
tb.close()
delays = np.abs(fpar)
maxdelay = np.max(delays)
for ii in range(nplots):
filename='finaldelay'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finaldelay.k'
xaxis='freq'
yaxis='delay'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[]
showflags=False
plotsymbol='o'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
for ii in range(nplots):
filename='finalBPinitialgainphase'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finalBPinitialgain.g'
xaxis='time'
yaxis='phase'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,-180,180]
showflags=False
plotsymbol='o-'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
tb.open('finalBPcal.b')
dataVarCol = tb.getvarcol('CPARAM')
flagVarCol = tb.getvarcol('FLAG')
tb.close()
rowlist = dataVarCol.keys()
nrows = len(rowlist)
maxmaxamp = 0.0
maxmaxphase = 0.0
for rrow in rowlist:
dataArr = dataVarCol[rrow]
flagArr = flagVarCol[rrow]
amps=np.abs(dataArr)
phases=np.arctan2(np.imag(dataArr),np.real(dataArr))
good=np.logical_not(flagArr)
tmparr=amps[good]
if (len(tmparr)>0):
maxamp=np.max(amps[good])
if (maxamp>maxmaxamp):
maxmaxamp=maxamp
tmparr=np.abs(phases[good])
if (len(tmparr)>0):
maxphase=np.max(np.abs(phases[good]))*180./pi
if (maxphase>maxmaxphase):
maxmaxphase=maxphase
ampplotmax=maxmaxamp
phaseplotmax=maxmaxphase
for ii in range(nplots):
filename='finalBPcal_amp'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finalBPcal.b'
xaxis='freq'
yaxis='amp'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,0,ampplotmax]
showflags=False
plotsymbol='o'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
for ii in range(nplots):
filename='finalBPcal_phase'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finalBPcal.b'
xaxis='freq'
yaxis='phase'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,-phaseplotmax,phaseplotmax]
showflags=False
plotsymbol='o'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
for ii in range(nplots):
filename='phaseshortgaincal'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='phaseshortgaincal.g'
xaxis='time'
yaxis='phase'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,-180,180]
showflags=False
plotsymbol='o-'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
tb.open('finalampgaincal.g')
cpar=tb.getcol('CPARAM')
flgs=tb.getcol('FLAG')
tb.close()
amps=np.abs(cpar)
good=np.logical_not(flgs)
maxamp=np.max(amps[good])
plotmax=max(2.0,maxamp)
for ii in range(nplots):
filename='finalamptimecal'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finalampgaincal.g'
xaxis='time'
yaxis='amp'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,0,plotmax]
showflags=False
plotsymbol='o-'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
for ii in range(nplots):
filename='finalampfreqcal'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finalampgaincal.g'
xaxis='freq'
yaxis='amp'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,0,plotmax]
showflags=False
plotsymbol='o'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
for ii in range(nplots):
filename='finalphasegaincal'+str(ii)+'.png'
syscommand='rm -rf '+filename
os.system(syscommand)
antPlot=str(ii*3)+'~'+str(ii*3+2)
default('plotcal')
caltable='finalphasegaincal.g'
xaxis='time'
yaxis='phase'
poln=''
field=''
antenna=antPlot
spw=''
timerange=''
subplot=311
overplot=False
clearpanel='Auto'
iteration='antenna'
plotrange=[0,0,-180,180]
showflags=False
plotsymbol='o-'
plotcolor='blue'
markersize=5.0
fontsize=10.0
showgui=False
figfile=filename
async=False
plotcal()
# Calculate fractions of flagged solutions for final QA2
flaggedDelaySolns=getCalFlaggedSoln('finaldelay.k')
flaggedBPSolns=getCalFlaggedSoln('finalBPcal.b')
flaggedAmpSolns=getCalFlaggedSoln('finalampgaincal.g')
flaggedPhaseSolns=getCalFlaggedSoln('finalphasegaincal.g')
if (flaggedDelaySolns['all']['total'] > 0):
if (flaggedDelaySolns['antmedian']['fraction'] > critfrac):
QA2_delay='Partial'
else:
QA2_delay='Pass'
else:
QA2_delay='Fail'
if (flaggedBPSolns['all']['total'] > 0):
if (flaggedBPSolns['antmedian']['fraction'] > 0.2):
QA2_BP='Partial'
else:
QA2_BP='Pass'
else:
QA2_BP='Fail'
if (flaggedAmpSolns['all']['total'] > 0):
if (flaggedAmpSolns['antmedian']['fraction'] > 0.1):
QA2_amp='Partial'
else:
QA2_amp='Pass'
else:
QA2_amp='Fail'
if (flaggedPhaseSolns['all']['total'] > 0):
if (flaggedPhaseSolns['antmedian']['fraction'] > 0.1):
QA2_phase='Partial'
else:
QA2_phase='Pass'
else:
QA2_phase='Fail'
if (QA2_delay=='Fail' or QA2_BP=='Fail' or QA2_amp=='Fail' or QA2_phase=='Fail'):
QA2_finalcals='Fail'
elif (QA2_delay=='Partial' or QA2_BP=='Partial' or QA2_amp=='Partial' or QA2_phase=='Partial'):
QA2_finalcals='Partial'
logprint ("QA2 score: "+QA2_finalcals, logfileout='logs/finalcals.log')
logprint ("Finished EVLA_pipe_finalcals.py", logfileout='logs/finalcals.log')
time_list=runtiming('finalcals', 'end')
pipeline_save()
|
|
"""
Wrapper methods for easy access to common operations, making them both less
complex and less verbose for one off or simple use cases.
"""
from rabbitpy import amqp_queue
from rabbitpy import connection
from rabbitpy import exchange
from rabbitpy import message
class SimpleChannel(object):
"""The rabbitpy.simple.Channel class creates a context manager
implementation for use on a single channel where the connection is
automatically created and managed for you.
Example:
.. code:: python
import rabbitpy
with rabbitpy.SimpleChannel('amqp://localhost/%2f') as channel:
queue = rabbitpy.Queue(channel, 'my-queue')
:param str uri: The AMQP URI to connect with. For URI options, see the
:class:`~rabbitpy.connection.Connection` class documentation.
"""
def __init__(self, uri):
self.connection = None
self.channel = None
self.uri = uri
def __enter__(self):
self.connection = connection.Connection(self.uri)
self.channel = self.connection.channel()
return self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
if not self.channel.closed:
self.channel.close()
if not self.connection.closed:
self.connection.close()
if exc_type and exc_val:
raise
def consume(uri=None, queue_name=None, no_ack=False, prefetch=None,
priority=None):
"""Consume messages from the queue as a generator:
.. code:: python
for message in rabbitpy.consume('amqp://localhost/%2F', 'my_queue'):
message.ack()
:param str uri: AMQP connection URI
:param str queue_name: The name of the queue to consume from
:param bool no_ack: Do not require acknowledgements
:param int prefetch: Set a prefetch count for the channel
:param int priority: Set the consumer priority
:rtype: :py:class:`Iterator`
:raises: py:class:`ValueError`
"""
_validate_name(queue_name, 'queue')
with SimpleChannel(uri) as channel:
queue = amqp_queue.Queue(channel, queue_name)
for msg in queue.consume(no_ack, prefetch, priority):
yield msg
def get(uri=None, queue_name=None):
"""Get a message from RabbitMQ, auto-acknowledging with RabbitMQ if one
is returned.
Invoke directly as ``rabbitpy.get()``
:param str uri: AMQP URI to connect to
:param str queue_name: The queue name to get the message from
:rtype: py:class:`rabbitpy.message.Message` or None
:raises: py:class:`ValueError`
"""
_validate_name(queue_name, 'queue')
with SimpleChannel(uri) as channel:
queue = amqp_queue.Queue(channel, queue_name)
return queue.get(False)
def publish(uri=None, exchange_name=None, routing_key=None,
body=None, properties=None, confirm=False):
"""Publish a message to RabbitMQ. This should only be used for one-off
publishing, as you will suffer a performance penalty if you use it
repeatedly instead creating a connection and channel and publishing on that
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange to publish to
:param str routing_key: The routing_key to publish with
:param body: The message body
:type body: str or unicode or bytes or dict or list
:param dict properties: Dict representation of Basic.Properties
:param bool confirm: Confirm this delivery with Publisher Confirms
:rtype: bool or None
"""
if exchange_name is None:
exchange_name = ''
with SimpleChannel(uri) as channel:
msg = message.Message(channel, body or '', properties or dict())
if confirm:
channel.enable_publisher_confirms()
return msg.publish(exchange_name, routing_key or '',
mandatory=True)
else:
msg.publish(exchange_name, routing_key or '')
def create_queue(uri=None, queue_name='', durable=True, auto_delete=False,
max_length=None, message_ttl=None, expires=None,
dead_letter_exchange=None, dead_letter_routing_key=None,
arguments=None):
"""Create a queue with RabbitMQ. This should only be used for one-off
operations. If a queue name is omitted, the name will be automatically
generated by RabbitMQ.
:param str uri: AMQP URI to connect to
:param str queue_name: The queue name to create
:param durable: Indicates if the queue should survive a RabbitMQ is restart
:type durable: bool
:param bool auto_delete: Automatically delete when all consumers disconnect
:param int max_length: Maximum queue length
:param int message_ttl: Time-to-live of a message in milliseconds
:param expires: Milliseconds until a queue is removed after becoming idle
:type expires: int
:param dead_letter_exchange: Dead letter exchange for rejected messages
:type dead_letter_exchange: str
:param dead_letter_routing_key: Routing key for dead lettered messages
:type dead_letter_routing_key: str
:param dict arguments: Custom arguments for the queue
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_validate_name(queue_name, 'queue')
with SimpleChannel(uri) as channel:
amqp_queue.Queue(channel, queue_name,
durable=durable,
auto_delete=auto_delete,
max_length=max_length,
message_ttl=message_ttl,
expires=expires,
dead_letter_exchange=dead_letter_exchange,
dead_letter_routing_key=dead_letter_routing_key,
arguments=arguments).declare()
def delete_queue(uri=None, queue_name=None):
"""Delete a queue from RabbitMQ. This should only be used for one-off
operations.
:param str uri: AMQP URI to connect to
:param str queue_name: The queue name to delete
:rtype: bool
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_validate_name(queue_name, 'queue')
with SimpleChannel(uri) as channel:
amqp_queue.Queue(channel, queue_name).delete()
def create_direct_exchange(uri=None, exchange_name=None, durable=True):
"""Create a direct exchange with RabbitMQ. This should only be used for
one-off operations.
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange name to create
:param bool durable: Exchange should survive server restarts
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_create_exchange(uri, exchange_name, exchange.DirectExchange, durable)
def create_fanout_exchange(uri=None, exchange_name=None, durable=True):
"""Create a fanout exchange with RabbitMQ. This should only be used for
one-off operations.
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange name to create
:param bool durable: Exchange should survive server restarts
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_create_exchange(uri, exchange_name, exchange.FanoutExchange, durable)
def create_headers_exchange(uri=None, exchange_name=None, durable=True):
"""Create a headers exchange with RabbitMQ. This should only be used for
one-off operations.
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange name to create
:param bool durable: Exchange should survive server restarts
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_create_exchange(uri, exchange_name, exchange.HeadersExchange, durable)
def create_topic_exchange(uri=None, exchange_name=None, durable=True):
"""Create an exchange from RabbitMQ. This should only be used for one-off
operations.
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange name to create
:param bool durable: Exchange should survive server restarts
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_create_exchange(uri, exchange_name, exchange.TopicExchange, durable)
def delete_exchange(uri=None, exchange_name=None):
"""Delete an exchange from RabbitMQ. This should only be used for one-off
operations.
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange name to delete
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_validate_name(exchange_name, 'exchange')
with SimpleChannel(uri) as channel:
exchange.Exchange(channel, exchange_name).delete()
def _create_exchange(uri, exchange_name, exchange_class, durable):
"""Create an exchange from RabbitMQ. This should only be used for one-off
operations.
:param str uri: AMQP URI to connect to
:param str exchange_name: The exchange name to create
:param bool durable: Exchange should survive server restarts
:raises: :py:class:`ValueError`
:raises: :py:class:`rabbitpy.RemoteClosedException`
"""
_validate_name(exchange_name, 'exchange')
with SimpleChannel(uri) as channel:
exchange_class(channel, exchange_name, durable=durable).declare()
def _validate_name(value, obj_type):
"""Validate the specified name is set.
:param str value: The value to validate
:param str obj_type: The object type for the error message if needed
:raises: ValueError
"""
if not value:
raise ValueError('You must specify the {} name'.format(obj_type))
|
|
"""
Custom formats used to return data in different formats.
These are used from the query itself when calling the fetch_as_format()
method.
For example, returning a LogQuery as a table::
query = LogQuery(fetch_size=200)
for log in query.fetch_batch(): # Default is TableFormat
print(log)
As CSV::
query = LogQuery(fetch_size=200)
for log in query.fetch_batch(CSVFormat):
print(log)
Each format also allows the ability to customize the fields that should be
in the output. By default, each query type in :py:mod:`smc_monitoring.monitors`
will have a class attribute ``field_ids`` which specify the default fields.
These can be customized by modifying the query.format.field_ids([....])
parameter.
For example, modifying a routing query to return only destination interface
and the route network::
query = RoutingQuery('sg_vm')
query.format.field_ids([LogField.DSTIF, LogField.ROUTENETWORK])
for log in query.fetch_batch():
...
The same field_id customization applies to all query types.
A simple way to view results is to use a RawDictFormat::
query = LogQuery(fetch_size=3)
query.format.field_names(['Src', 'Dst'])
for record in query.fetch_batch(RawDictFormat):
...
It is also possible to provide your own formatter. At a minimum you must
provide a method called ``formatted`` in your class. The custom class should
extend :class:`._Header` to support custom field_ids within the query.
.. note:: Constants are defined in :py:mod:`smc_monitoring.models.constants`.
Although there are many field values, not all field values will return
results for every query. It is sometimes useful to log in to the SMC to
verify available fields.
"""
from smc_monitoring.models.constants import LogField
class InvalidFieldFormat(Exception):
"""
If using a complex format type such as combined, formatters
are not supported. These specialized formats must be returned
in raw dict format as they've been customized to return the data
in a specific way.
"""
pass
class _Header(object):
def __init__(self, query):
self.query = query
# Allow custom field_ids to be used
field_ids = query.format.data.get('field_ids')
# Format specified by the query for id to name mapping
field_format = query.format.data.get('field_format')
# If a combined filter is specified, field_format will be None
if not field_format:
raise InvalidFieldFormat('Combined field format specified is not a '
'supported type for formatters and must be returned as a raw dict.')
# No custom fields provided, use the class defined field_ids
if not field_ids:
field_ids = query.field_ids
# Ask for the field parameters so we can create the
# headers based on the field_format (pretty, name, id)
fields = query.resolve_field_ids(field_ids, **query.sockopt)
if not fields:
raise ValueError(
'Unable to resolve field IDs. Call query.format.field_ids() '
'and set valid fields.')
self.headers = []
for ids in field_ids:
for mapping in fields:
if mapping.get('id') == ids:
self.headers.append(mapping.get(field_format))
break
self.header_set = False
class CSVFormat(_Header):
"""
Return the results in CSV format. The first line will be a comma
separated string with the field header. This is an iterable that
will return results in batches of 200 (max) per iteration.
"""
def __init__(self, query):
super(CSVFormat, self).__init__(query)
def formatted(self, alist):
format = ('%s\n') # @ReservedAssignment
formatted_data = ''
if not self.header_set:
formatted_data += format % ','.join(self.headers)
self.header_set = True
for element in alist:
data_to_format = []
for pair in self.headers:
value = element.get(pair, '')
if ',' in value:
value = value.replace(',', ' ')
data_to_format.append(value)
formatted_data += format % ','.join(data_to_format)
formatted_data.rstrip()[-1]
return formatted_data
class TableFormat(_Header):
"""
Return the data in a table format. The field_id values will be
used for the table header. Spacing will be calculated for each
batch of results to align the table. The base spacing is determined
by the header width, but adjusted wider if the data returned is wider.
Anytime there is an adjustment to the width, a new table header will also
be printed to visually realign. The query will return a max of 200 batch
results per iteration.
.. note:: Table alignment will likely not be exact between batches
as width is calculated per batch.
"""
def __init__(self, query):
super(TableFormat, self).__init__(query)
# Calculate starting column width
self.column_width = [(header, len(header)) for header in self.headers]
def formatted(self, alist):
column_widths = []
for header in self.headers:
column_widths.append(max(len(str(column.get(header, ''))) for column in alist))
# Create a tuple pair of key and the associated column width for data
key_width_pair = list(zip(self.headers, column_widths))
data_longest_item = dict(key_width_pair)
current_column_width = dict(self.column_width)
for col, width in current_column_width.items():
if data_longest_item.get(col, 0) > width:
self.header_set = False # Add header again to realign
current_column_width[col] = data_longest_item.get(col)
self.column_width = [(key, current_column_width[key]) for key in self.headers]
if not self.header_set:
header_divider = []
#for key in key_width_pair:
for key in self.column_width:
header_divider.append('-' * key[1])
# Create a list of dictionary from the keys and the header and
# insert it at the beginning of the list. Do the same for the
# divider and insert below the header.
header_divider = dict(zip(self.headers, header_divider))
alist.insert(0, header_divider)
header = dict(zip(self.headers, self.headers))
alist.insert(0, header)
self.header_set = True
format = ('%-*s ' * len(self.headers)).strip() + '\n' # @ReservedAssignment
formatted_data = ''
for element in alist:
data_to_format = []
# Create a tuple that will be used for the formatting in
# width, value format
#for pair in key_width_pair:
for pair in self.column_width:
data_to_format.append(pair[1])
data_to_format.append(element.get(pair[0],'-'))
formatted_data += format % tuple(data_to_format)
formatted_data.rstrip()[-1]
return formatted_data
class RawDictFormat(object):
"""
Return the data as a list in raw dict format. The results are not
filtered with exception of the returned fields based on field_id
filters. This is a convenience format for consistency, although you
can also call the :py:class:`smc_monitoring.models.query.Query.fetch_raw`
method to get the same data.
"""
def __init__(self, query):
pass
def formatted(self, alist):
return alist
|
|
"""
Compute Engine definitions for the Pipeline API.
"""
from abc import (
ABCMeta,
abstractmethod,
)
from uuid import uuid4
from six import (
iteritems,
with_metaclass,
)
from numpy import array
from pandas import DataFrame, MultiIndex
from toolz import groupby, juxt
from toolz.curried.operator import getitem
from zipline.lib.adjusted_array import ensure_adjusted_array, ensure_ndarray
from zipline.errors import NoFurtherDataError
from zipline.utils.numpy_utils import (
as_column,
repeat_first_axis,
repeat_last_axis,
)
from zipline.utils.pandas_utils import explode
from .term import AssetExists, InputDates, LoadableTerm
class PipelineEngine(with_metaclass(ABCMeta)):
@abstractmethod
def run_pipeline(self, pipeline, start_date, end_date):
"""
Compute values for `pipeline` between `start_date` and `end_date`.
Returns a DataFrame with a MultiIndex of (date, asset) pairs.
Parameters
----------
pipeline : zipline.pipeline.Pipeline
The pipeline to run.
start_date : pd.Timestamp
Start date of the computed matrix.
end_date : pd.Timestamp
End date of the computed matrix.
Returns
-------
result : pd.DataFrame
A frame of computed results.
The columns `result` correspond to the entries of
`pipeline.columns`, which should be a dictionary mapping strings to
instances of `zipline.pipeline.term.Term`.
For each date between `start_date` and `end_date`, `result` will
contain a row for each asset that passed `pipeline.screen`. A
screen of None indicates that a row should be returned for each
asset that existed each day.
"""
raise NotImplementedError("run_pipeline")
class NoEngineRegistered(Exception):
"""
Raised if a user tries to call pipeline_output in an algorithm that hasn't
set up a pipeline engine.
"""
class ExplodingPipelineEngine(PipelineEngine):
"""
A PipelineEngine that doesn't do anything.
"""
def run_pipeline(self, pipeline, start_date, end_date):
raise NoEngineRegistered(
"Attempted to run a pipeline but no pipeline "
"resources were registered."
)
class SimplePipelineEngine(object):
"""
PipelineEngine class that computes each term independently.
Parameters
----------
get_loader : callable
A function that is given a loadable term and returns a PipelineLoader
to use to retrieve raw data for that term.
calendar : DatetimeIndex
Array of dates to consider as trading days when computing a range
between a fixed start and end.
asset_finder : zipline.assets.AssetFinder
An AssetFinder instance. We depend on the AssetFinder to determine
which assets are in the top-level universe at any point in time.
"""
__slots__ = (
'_get_loader',
'_calendar',
'_finder',
'_root_mask_term',
'_root_mask_dates_term',
'__weakref__',
)
def __init__(self, get_loader, calendar, asset_finder):
self._get_loader = get_loader
self._calendar = calendar
self._finder = asset_finder
self._root_mask_term = AssetExists()
self._root_mask_dates_term = InputDates()
def run_pipeline(self, pipeline, start_date, end_date):
"""
Compute a pipeline.
Parameters
----------
pipeline : zipline.pipeline.Pipeline
The pipeline to run.
start_date : pd.Timestamp
Start date of the computed matrix.
end_date : pd.Timestamp
End date of the computed matrix.
The algorithm implemented here can be broken down into the following
stages:
0. Build a dependency graph of all terms in `pipeline`. Topologically
sort the graph to determine an order in which we can compute the
terms.
1. Ask our AssetFinder for a "lifetimes matrix", which should contain,
for each date between start_date and end_date, a boolean value for
each known asset indicating whether the asset existed on that date.
2. Compute each term in the dependency order determined in (0), caching
the results in a a dictionary to that they can be fed into future
terms.
3. For each date, determine the number of assets passing
pipeline.screen. The sum, N, of all these values is the total
number of rows in our output frame, so we pre-allocate an output
array of length N for each factor in `terms`.
4. Fill in the arrays allocated in (3) by copying computed values from
our output cache into the corresponding rows.
5. Stick the values computed in (4) into a DataFrame and return it.
Step 0 is performed by ``Pipeline.to_graph``.
Step 1 is performed in ``SimplePipelineEngine._compute_root_mask``.
Step 2 is performed in ``SimplePipelineEngine.compute_chunk``.
Steps 3, 4, and 5 are performed in ``SimplePiplineEngine._to_narrow``.
See Also
--------
PipelineEngine.run_pipeline
"""
if end_date < start_date:
raise ValueError(
"start_date must be before or equal to end_date \n"
"start_date=%s, end_date=%s" % (start_date, end_date)
)
screen_name = uuid4().hex
graph = pipeline.to_execution_plan(
screen_name,
self._root_mask_term,
self._calendar,
start_date,
end_date,
)
extra_rows = graph.extra_rows[self._root_mask_term]
root_mask = self._compute_root_mask(start_date, end_date, extra_rows)
dates, assets, root_mask_values = explode(root_mask)
results = self.compute_chunk(
graph,
dates,
assets,
initial_workspace={
self._root_mask_term: root_mask_values,
self._root_mask_dates_term: as_column(dates.values)
},
)
return self._to_narrow(
graph.outputs,
results,
results.pop(screen_name),
dates[extra_rows:],
assets,
)
def _compute_root_mask(self, start_date, end_date, extra_rows):
"""
Compute a lifetimes matrix from our AssetFinder, then drop columns that
didn't exist at all during the query dates.
Parameters
----------
start_date : pd.Timestamp
Base start date for the matrix.
end_date : pd.Timestamp
End date for the matrix.
extra_rows : int
Number of extra rows to compute before `start_date`.
Extra rows are needed by terms like moving averages that require a
trailing window of data.
Returns
-------
lifetimes : pd.DataFrame
Frame of dtype `bool` containing dates from `extra_rows` days
before `start_date`, continuing through to `end_date`. The
returned frame contains as columns all assets in our AssetFinder
that existed for at least one day between `start_date` and
`end_date`.
"""
calendar = self._calendar
finder = self._finder
start_idx, end_idx = self._calendar.slice_locs(start_date, end_date)
if start_idx < extra_rows:
raise NoFurtherDataError.from_lookback_window(
initial_message="Insufficient data to compute Pipeline:",
first_date=calendar[0],
lookback_start=start_date,
lookback_length=extra_rows,
)
# Build lifetimes matrix reaching back to `extra_rows` days before
# `start_date.`
lifetimes = finder.lifetimes(
calendar[start_idx - extra_rows:end_idx],
include_start_date=False
)
assert lifetimes.index[extra_rows] == start_date
assert lifetimes.index[-1] == end_date
if not lifetimes.columns.unique:
columns = lifetimes.columns
duplicated = columns[columns.duplicated()].unique()
raise AssertionError("Duplicated sids: %d" % duplicated)
# Filter out columns that didn't exist between the requested start and
# end dates.
existed = lifetimes.iloc[extra_rows:].any()
ret = lifetimes.loc[:, existed]
shape = ret.shape
assert shape[0] * shape[1] != 0, 'root mask cannot be empty'
return ret
def _mask_and_dates_for_term(self, term, workspace, graph, all_dates):
"""
Load mask and mask row labels for term.
"""
mask = term.mask
mask_offset = graph.extra_rows[mask] - graph.extra_rows[term]
# This offset is computed against _root_mask_term because that is what
# determines the shape of the top-level dates array.
dates_offset = (
graph.extra_rows[self._root_mask_term] - graph.extra_rows[term]
)
return workspace[mask][mask_offset:], all_dates[dates_offset:]
@staticmethod
def _inputs_for_term(term, workspace, graph):
"""
Compute inputs for the given term.
This is mostly complicated by the fact that for each input we store as
many rows as will be necessary to serve **any** computation requiring
that input.
"""
offsets = graph.offset
out = []
if term.windowed:
# If term is windowed, then all input data should be instances of
# AdjustedArray.
for input_ in term.inputs:
adjusted_array = ensure_adjusted_array(
workspace[input_], input_.missing_value,
)
out.append(
adjusted_array.traverse(
window_length=term.window_length,
offset=offsets[term, input_],
)
)
else:
# If term is not windowed, input_data may be an AdjustedArray or
# np.ndarray. Coerce the former to the latter.
for input_ in term.inputs:
input_data = ensure_ndarray(workspace[input_])
offset = offsets[term, input_]
# OPTIMIZATION: Don't make a copy by doing input_data[0:] if
# offset is zero.
if offset:
input_data = input_data[offset:]
out.append(input_data)
return out
def get_loader(self, term):
return self._get_loader(term)
def compute_chunk(self, graph, dates, assets, initial_workspace):
"""
Compute the Pipeline terms in the graph for the requested start and end
dates.
Parameters
----------
graph : zipline.pipeline.graph.TermGraph
dates : pd.DatetimeIndex
Row labels for our root mask.
assets : pd.Int64Index
Column labels for our root mask.
initial_workspace : dict
Map from term -> output.
Must contain at least entry for `self._root_mask_term` whose shape
is `(len(dates), len(assets))`, but may contain additional
pre-computed terms for testing or optimization purposes.
Returns
-------
results : dict
Dictionary mapping requested results to outputs.
"""
self._validate_compute_chunk_params(dates, assets, initial_workspace)
get_loader = self.get_loader
# Copy the supplied initial workspace so we don't mutate it in place.
workspace = initial_workspace.copy()
# If loadable terms share the same loader and extra_rows, load them all
# together.
loader_group_key = juxt(get_loader, getitem(graph.extra_rows))
loader_groups = groupby(loader_group_key, graph.loadable_terms)
for term in graph.ordered():
# `term` may have been supplied in `initial_workspace`, and in the
# future we may pre-compute loadable terms coming from the same
# dataset. In either case, we will already have an entry for this
# term, which we shouldn't re-compute.
if term in workspace:
continue
# Asset labels are always the same, but date labels vary by how
# many extra rows are needed.
mask, mask_dates = self._mask_and_dates_for_term(
term, workspace, graph, dates
)
if isinstance(term, LoadableTerm):
to_load = sorted(
loader_groups[loader_group_key(term)],
key=lambda t: t.dataset
)
loader = get_loader(term)
loaded = loader.load_adjusted_array(
to_load, mask_dates, assets, mask,
)
workspace.update(loaded)
else:
workspace[term] = term._compute(
self._inputs_for_term(term, workspace, graph),
mask_dates,
assets,
mask,
)
if term.ndim == 2:
assert workspace[term].shape == mask.shape
else:
assert workspace[term].shape == (mask.shape[0], 1)
out = {}
graph_extra_rows = graph.extra_rows
for name, term in iteritems(graph.outputs):
# Truncate off extra rows from outputs.
out[name] = workspace[term][graph_extra_rows[term]:]
return out
def _to_narrow(self, terms, data, mask, dates, assets):
"""
Convert raw computed pipeline results into a DataFrame for public APIs.
Parameters
----------
terms : dict[str -> Term]
Dict mapping column names to terms.
data : dict[str -> ndarray[ndim=2]]
Dict mapping column names to computed results for those names.
mask : ndarray[bool, ndim=2]
Mask array of values to keep.
dates : ndarray[datetime64, ndim=1]
Row index for arrays `data` and `mask`
assets : ndarray[int64, ndim=2]
Column index for arrays `data` and `mask`
Returns
-------
results : pd.DataFrame
The indices of `results` are as follows:
index : two-tiered MultiIndex of (date, asset).
Contains an entry for each (date, asset) pair corresponding to
a `True` value in `mask`.
columns : Index of str
One column per entry in `data`.
If mask[date, asset] is True, then result.loc[(date, asset), colname]
will contain the value of data[colname][date, asset].
"""
if not mask.any():
# Manually handle the empty DataFrame case. This is a workaround
# to pandas failing to tz_localize an empty dataframe with a
# MultiIndex. It also saves us the work of applying a known-empty
# mask to each array.
#
# Slicing `dates` here to preserve pandas metadata.
empty_dates = dates[:0]
empty_assets = array([], dtype=object)
return DataFrame(
data={
name: array([], dtype=arr.dtype)
for name, arr in iteritems(data)
},
index=MultiIndex.from_arrays([empty_dates, empty_assets]),
)
resolved_assets = array(self._finder.retrieve_all(assets))
dates_kept = repeat_last_axis(dates.values, len(assets))[mask]
assets_kept = repeat_first_axis(resolved_assets, len(dates))[mask]
final_columns = {}
for name in data:
# Each term that computed an output has its postprocess method
# called on the filtered result.
#
# As of Mon May 2 15:38:47 2016, we only use this to convert
# LabelArrays into categoricals.
final_columns[name] = terms[name].postprocess(data[name][mask])
return DataFrame(
data=final_columns,
index=MultiIndex.from_arrays([dates_kept, assets_kept]),
).tz_localize('UTC', level=0)
def _validate_compute_chunk_params(self, dates, assets, initial_workspace):
"""
Verify that the values passed to compute_chunk are well-formed.
"""
root = self._root_mask_term
clsname = type(self).__name__
# Writing this out explicitly so this errors in testing if we change
# the name without updating this line.
compute_chunk_name = self.compute_chunk.__name__
if root not in initial_workspace:
raise AssertionError(
"root_mask values not supplied to {cls}.{method}".format(
cls=clsname,
method=compute_chunk_name,
)
)
shape = initial_workspace[root].shape
implied_shape = len(dates), len(assets)
if shape != implied_shape:
raise AssertionError(
"root_mask shape is {shape}, but received dates/assets "
"imply that shape should be {implied}".format(
shape=shape,
implied=implied_shape,
)
)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2008 Artifacts - Fine Software Development
# http://www.artifacts.de
# Author: Martin Borho (martin@borho.net)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import os
import sys
import time
import logging
import mimetypes
import fnmatch
from urlparse import urlparse
from optparse import OptionParser
from zipfile import ZipFile
rfc1123_format = '%a, %d %b %Y %H:%M:%S GMT+00:00'
# add mimetypes
mimetypes.add_type('application/json', '.json', strict=True)
class AFCachePackager(object):
def __init__(self, **kwargs):
self.maxage = kwargs.get('maxage')
self.baseurl = kwargs.get('baseurl')
if not self.baseurl:
self.baseurl = 'afcpkg://localhost'
self.lastmodfile = kwargs.get('lastmodfile')
self.lastmodplus = kwargs.get('lastmodplus')
self.lastmodminus = kwargs.get('lastmodminus')
self.folder = kwargs.get('folder')
self.include_all = kwargs.get('include_all')
self.outfile = kwargs.get('outfile')
if not self.outfile:
self.outfile = 'afcache-archive.zip'
self.max_size = kwargs.get('max_size')
self.excludes = kwargs.get('excludes', [])
self.mime = kwargs.get('mime')
self.errors = []
self.logger = kwargs.get('logger',logging.getLogger(__file__))
self._check_input()
def _check_input(self):
if not self.folder:
self.errors.append('import-folder (--folder) is missing')
elif not os.path.isdir(self.folder):
self.errors.append('import-folder does not exists')
if not self.maxage:
self.errors.append('maxage is missing')
def _get_host(self, baseurl):
p = urlparse(baseurl)
if p.hostname:
return p.hostname
else:
self.errors.append('baseurl invalid')
return None
def build_zipcache(self):
manifest = []
hostname = self._get_host(self.baseurl)
if self.errors:
return None
try:
zip = ZipFile(self.outfile, 'w')
except IOError, e:
self.logger.error('exiting: creation of zipfile failed!')
return None
else:
for dirpath, dirnames, filenames in os.walk(self.folder):
# skip empty dirs
if not filenames:
continue
for name in filenames:
path = os.path.join(dirpath, name)
# skip hidden files if
if not self.include_all:
if name.startswith('.') or path.find('/.') > -1:
self.logger.info("skipping "+path)
continue
# skip big files if
if self.max_size and (os.path.getsize(path) > self.max_size):
self.logger.info("skipping big file "+path)
continue
# exclude paths if
if self.excludes:
exclude_file = None
for ex_filter in self.excludes:
if fnmatch.fnmatch(path, ex_filter):
exclude_file = True
self.logger.info("excluded "+path)
break
if exclude_file: continue
# detect mime-type
mime_type = ''
if self.mime:
mime_tuple = mimetypes.guess_type(path, False)
if mime_tuple[0]: mime_type = mime_tuple[0]
else: self.logger.warning("mime-type unknown: "+path)
# handle lastmodified
if self.lastmodfile: lastmod = os.path.getmtime(os.path.join(dirpath, name))
else: lastmod = time.time()
if self.lastmodplus: lastmod += self.lastmodplus
elif self.lastmodminus: lastmod -= self.lastmodminus
# handle path forms
rel_path = os.path.join(dirpath.replace(os.path.normpath(self.folder),''),name)
exported_path = hostname+rel_path
# add data
self.logger.info("adding "+ exported_path)
zip.write(path, exported_path)
# add manifest line
last_mod_date = time.strftime(rfc1123_format,time.gmtime(lastmod))
expire_date = time.strftime(rfc1123_format,time.gmtime(lastmod+self.maxage))
manifest_line = '%s ; %s ; %s' % (self.baseurl+rel_path, last_mod_date, expire_date)
# add mime type
if self.mime:
manifest_line += ' ; '+mime_type
manifest.append(manifest_line)
# add manifest to zip
self.logger.info("adding manifest")
zip.writestr("manifest.afcache", "\n".join(manifest))
return True
def main():
logging.basicConfig(level=logging.DEBUG,format='%(asctime)s %(levelname)-2s %(message)s')
logger = logging.getLogger(__file__)
usage = "Usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option("--maxage", dest="maxage", type="int", help="max-age in seconds")
parser.add_option("--baseurl", dest="baseurl",
help="base url, e.g. http://www.foo.bar (WITHOUT trailig slash)")
parser.add_option("--lastmodifiedfile", dest="lastmodfile", action="store_true",
help="use lastmodified from file instead of now")
parser.add_option("--lastmodifiedplus", dest="lastmodplus", type="int",
help="add n seconds to file's lastmodfied date")
parser.add_option("--lastmodifiedminus", dest="lastmodminus", type="int",
help="substract n seconds from file's lastmodfied date")
parser.add_option("--folder", dest="folder",
help="folder containing resources")
parser.add_option("-a", dest="include_all", action="store_true",
help="include all files. By default, files starting with a dot are excluded.")
parser.add_option("--outfile", dest="outfile",
help="Output filename. Default: afcache-archive.zip")
parser.add_option("--maxItemFileSize", dest="max_size", type="int",
help="Maximum filesize of a cacheable item.")
parser.add_option("--exclude", dest="excludes",action="append",
help="Regexp filter for filepaths. Add one --exclude for every pattern.")
parser.add_option("--mime", dest="mime", action="store_true",
help="add file mime types to manifest.afcache")
(options, args) = parser.parse_args()
packager = AFCachePackager(
maxage=options.maxage,
baseurl=options.baseurl,
lastmodfile=options.lastmodfile,
lastmodplus=options.lastmodplus,
lastmodminus=options.lastmodminus,
folder=options.folder,
include_all=options.include_all,
outfile=options.outfile,
max_size=options.max_size,
excludes=options.excludes,
mime=options.mime,
logger=logger
)
packager.build_zipcache()
if packager.errors:
print "Error: "+"\nError: ".join(packager.errors)
if __name__ == "__main__":
main()
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.db.models import Q
import HTMLParser
html_parser = HTMLParser.HTMLParser()
class Migration(DataMigration):
def forwards(self, orm):
wrong = orm['website.Jurisdiction'].objects.filter(Q(name__contains = '&') |
Q(city__contains = '&'))
for jurisdiction in wrong:
jurisdiction.name = html_parser.unescape(jurisdiction.name)
jurisdiction.city = html_parser.unescape(jurisdiction.city)
jurisdiction.save()
def backwards(self, orm):
"no backwards migration"
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'website.action': {
'Meta': {'object_name': 'Action'},
'action_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.ActionCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.actioncategory': {
'Meta': {'object_name': 'ActionCategory'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'})
},
'website.address': {
'Meta': {'object_name': 'Address'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'website.answerattachment': {
'Meta': {'object_name': 'AnswerAttachment'},
'answer_reference': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.AnswerReference']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'file_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'file_upload': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'website.answerchoice': {
'Meta': {'object_name': 'AnswerChoice'},
'answer_choice_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.AnswerChoiceGroup']"}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'website.answerchoicegroup': {
'Meta': {'object_name': 'AnswerChoiceGroup'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'website.answerreference': {
'Meta': {'object_name': 'AnswerReference'},
'approval_status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'file_upload': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_callout': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'migrated_answer_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'status_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'website.api_keys': {
'Meta': {'object_name': 'API_Keys'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.TextField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'website.applicability': {
'Meta': {'object_name': 'Applicability'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'website.article': {
'Meta': {'object_name': 'Article'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'publisher': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {})
},
'website.comment': {
'Meta': {'object_name': 'Comment'},
'approval_status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'comment_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'parent_reference'", 'null': 'True', 'to': "orm['website.Comment']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '8', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.entityview': {
'Meta': {'object_name': 'EntityView'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'session_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.entityviewcount': {
'Meta': {'object_name': 'EntityViewCount'},
'count_30_days': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'total_count': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'website.event': {
'Meta': {'object_name': 'Event'},
'end': ('django.db.models.fields.DateTimeField', [], {}),
'expiration': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']"}),
'published': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {})
},
'website.geographicarea': {
'Meta': {'object_name': 'GeographicArea'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'filter_name': ('django.db.models.fields.TextField', [], {'default': "''"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdictions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['website.Jurisdiction']", 'symmetrical': 'False'}),
'states': ('website.models.reporting.PythonDataField', [], {})
},
'website.jurisdiction': {
'Meta': {'object_name': 'Jurisdiction'},
'city': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'county': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'last_contributed': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'last_contributed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'last_contributed_by_org': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_org_contributor'", 'null': 'True', 'to': "orm['website.Organization']"}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'name_for_url': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_org_jurisdiction'", 'null': 'True', 'to': "orm['website.Organization']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'parent_jurisdiction'", 'null': 'True', 'to': "orm['website.Jurisdiction']"}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'website.jurisdictioncontributor': {
'Meta': {'object_name': 'JurisdictionContributor'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.jurisdictionrating': {
'Meta': {'object_name': 'JurisdictionRating'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'rank': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'rating_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'website.migrationhistory': {
'Meta': {'object_name': 'MigrationHistory'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'notes2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'source_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'source_table': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'target_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'target_table': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.organization': {
'Meta': {'object_name': 'Organization'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.OrganizationCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'logo_scaled': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'parent_org': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'phone': ('django.contrib.localflavor.us.models.PhoneNumberField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '8', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'status_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'website.organizationaddress': {
'Meta': {'object_name': 'OrganizationAddress'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Address']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'})
},
'website.organizationcategory': {
'Meta': {'object_name': 'OrganizationCategory'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.organizationmember': {
'Meta': {'object_name': 'OrganizationMember'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitation_key': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'invitor': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_member_invitor'", 'null': 'True', 'to': u"orm['auth.User']"}),
'join_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'requested_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RoleType']", 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '8', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'_member_user'", 'null': 'True', 'to': u"orm['auth.User']"})
},
'website.organizationrating': {
'Meta': {'object_name': 'OrganizationRating'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']", 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'updated_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'website.pressrelease': {
'Meta': {'object_name': 'PressRelease'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'published': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'title': ('django.db.models.fields.TextField', [], {}),
'url': ('django.db.models.fields.TextField', [], {})
},
'website.question': {
'Meta': {'object_name': 'Question'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'answer_choice_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.AnswerChoiceGroup']", 'null': 'True', 'blank': 'True'}),
'applicability': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Applicability']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'default_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'display_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'field_attributes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'field_suffix': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'form_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'has_multivalues': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instruction': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'js': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'migration_type': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'qtemplate': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Template']", 'null': 'True'}),
'question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'state_exclusive': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'support_attachments': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'terminology': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'validation_class': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'website.questioncategory': {
'Meta': {'object_name': 'QuestionCategory'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'website.ratingcategory': {
'Meta': {'object_name': 'RatingCategory'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'rating_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'website.ratinglevel': {
'Meta': {'object_name': 'RatingLevel'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'rank': ('django.db.models.fields.PositiveSmallIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'website.reaction': {
'Meta': {'object_name': 'Reaction'},
'action': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Action']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.ReactionCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'question_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.QuestionCategory']", 'null': 'True', 'blank': 'True'}),
'reaction_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.reactioncategory': {
'Meta': {'object_name': 'ReactionCategory'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'rating_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'})
},
'website.rewardcategory': {
'Meta': {'object_name': 'RewardCategory'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.roletype': {
'Meta': {'object_name': 'RoleType'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'website.servervariable': {
'Meta': {'object_name': 'ServerVariable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'website.template': {
'Meta': {'object_name': 'Template'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '8', 'db_index': 'True', 'blank': 'True'})
},
'website.templatequestion': {
'Meta': {'object_name': 'TemplateQuestion'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Template']"})
},
'website.usercommentview': {
'Meta': {'object_name': 'UserCommentView'},
'comments_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'last_comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Comment']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'view_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'})
},
'website.userdetail': {
'Meta': {'object_name': 'UserDetail'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'display_preference': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'migrated_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'notification_preference': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'old_password': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'reset_password_key': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '124', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.userfavorite': {
'Meta': {'object_name': 'UserFavorite'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.userpageview': {
'Meta': {'object_name': 'UserPageView'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_page_view_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.userrating': {
'Meta': {'object_name': 'UserRating'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingCategory']", 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.RatingLevel']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'scale': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'updated_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.usersearch': {
'Meta': {'object_name': 'UserSearch'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'entity_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'entity_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'search_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'website.view': {
'Meta': {'object_name': 'View'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jurisdiction': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Jurisdiction']", 'null': 'True', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'reviewed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'view_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '8', 'db_index': 'True', 'blank': 'True'})
},
'website.vieworgs': {
'Meta': {'object_name': 'ViewOrgs'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Organization']"}),
'view': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.View']"})
},
'website.viewquestions': {
'Meta': {'object_name': 'ViewQuestions'},
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'view': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.View']"})
},
'website.zipcode': {
'Meta': {'object_name': 'Zipcode'},
'city': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'county': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'create_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'db_index': 'True', 'null': 'True', 'max_digits': '10', 'decimal_places': '7', 'blank': 'True'}),
'modify_datetime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '10', 'db_index': 'True'})
}
}
complete_apps = ['website']
symmetrical = True
|
|
import re
from literal import *
from cscreator import *
from sanatize import *
#try to match the line with the implication pattern
def implies(line):
#######################################################
# ~A -> ~B
m = re.match('NOT (\w+) implies NOT (\w+)$', line, re.I)
if m:
clause = []
#CNF: A v ~B
clause.append(Literal(m.group(1), False))
clause.append(Literal(m.group(2), True))
return clause
#######################################################
# ~A -> B
m = re.match('NOT (\w+) implies (\w+)$', line, re.I)
if m:
clause = []
#CNF: A v B
clause.append(Literal(m.group(1), False))
clause.append(Literal(m.group(2), False))
return clause
#######################################################
# A -> ~B
m = re.match('(\w+) implies NOT (\w+)$', line, re.I)
if m:
clause = []
#CNF: ~A v ~B
clause.append(Literal(m.group(1), True))
clause.append(Literal(m.group(2), True))
return clause
#######################################################
# A -> B
m = re.match('(\w+) implies (\w+)$', line, re.I)
if m:
clause = []
#CNF: ~A v B
clause.append(Literal(m.group(1), True))
clause.append(Literal(m.group(2), False))
return clause
def iff(line):
clauses = []
#######################################################
# ~A <-> ~B
m = re.match('NOT (\w+) xnor NOT (\w+)$', line, re.I)
if m:
#CNF: (A v ~B) and (B or ~A)
l = Literal(m.group(1), False)
notl = Literal(m.group(1), True)
k = Literal(m.group(2), False)
notk = Literal(m.group(2), True)
clause1 = []
clause2 = []
clause1.append(l)
clause1.append(notk)
clause2.append(k)
clause2.append(notl)
clauses.append(clause1)
clauses.append(clause2)
return clauses
#######################################################
# ~A <-> B
m = re.match('NOT (\w+) xnor (\w+)$', line, re.I)
if m:
#CNF: (~A v ~B) and (A or B)
l = Literal(m.group(1), False)
notl = Literal(m.group(1), True)
k = Literal(m.group(2), False)
notk = Literal(m.group(2), True)
clause1 = []
clause2 = []
clause1.append(l)
clause1.append(k)
clause2.append(notl)
clause2.append(notk)
clauses.append(clause1)
clauses.append(clause2)
return clauses
#######################################################
# A <-> ~B
m = re.match('(\w+) xnor NOT (\w+)$', line, re.I)
if m:
#CNF: (~A v ~B) and (A or B)
l = Literal(m.group(1), False)
notl = Literal(m.group(1), True)
k = Literal(m.group(2), False)
notk = Literal(m.group(2), True)
clause1 = []
clause2 = []
clause1.append(l)
clause1.append(k)
clause2.append(notl)
clause2.append(notk)
clauses.append(clause1)
clauses.append(clause2)
return clauses
#######################################################
# A <-> B
m = re.match('(\w+) xnor (\w+)$', line, re.I)
if m:
#CNF: (A v ~B) and (B or ~A)
l = Literal(m.group(1), False)
notl = Literal(m.group(1), True)
k = Literal(m.group(2), False)
notk = Literal(m.group(2), True)
clause1 = []
clause2 = []
clause1.append(l)
clause1.append(notk)
clause2.append(k)
clause2.append(notl)
clauses.append(clause1)
clauses.append(clause2)
return clauses
def lit(line):
#######################################################
# A
m = re.match('(\w)$', line, re.I)
if m:
l = Literal(m.group(1), False)
return [l]
#######################################################
# ~A
m = re.match('NOT (\w)$', line, re.I)
if m:
l = Literal(m.group(1), True)
return [l]
def conjunction(line):
clauses = []
#######################################################
# A ^ B
m = re.match('(\w) and (\w)$', line, re.I)
if m:
l = Literal(m.group(1), False)
k = Literal(m.group(2), False)
clauses.append([l])
clauses.append([k])
return clauses
#######################################################
# ~A ^ B
m = re.match('not (\w) and (\w)$', line, re.I)
if m:
l = Literal(m.group(1), True)
k = Literal(m.group(2), False)
clauses.append([l])
clauses.append([k])
return clauses
#######################################################
# A ^ ~B
m = re.match('(\w) and not (\w)$', line, re.I)
if m:
l = Literal(m.group(1), False)
k = Literal(m.group(2), True)
clauses.append([l])
clauses.append([k])
return clauses
#######################################################
# ~A ^ ~B
m = re.match('not (\w) and not (\w)$', line, re.I)
if m:
l = Literal(m.group(1), True)
k = Literal(m.group(2), True)
clauses.append([l])
clauses.append([k])
return clauses
def disjunction(line):
#######################################################
# A v B
m = re.match('(\w) or (\w)$', line, re.I)
if m:
l = Literal(m.group(1), False)
k = Literal(m.group(2), False)
clause = []
clause.append(l)
clause.append(k)
return clause
#######################################################
# ~A v B
m = re.match('not (\w) or (\w)$', line, re.I)
if m:
l = Literal(m.group(1), True)
k = Literal(m.group(2), False)
clause = []
clause.append(l)
clause.append(k)
return clause
#######################################################
# A v ~B
m = re.match('(\w) or not (\w)$', line, re.I)
if m:
l = Literal(m.group(1), False)
k = Literal(m.group(2), True)
clause = []
clause.append(l)
clause.append(k)
return clause
#######################################################
# ~A v ~B
m = re.match('not (\w) or not (\w)$', line, re.I)
if m:
l = Literal(m.group(1), True)
k = Literal(m.group(2), True)
clause = []
clause.append(l)
clause.append(k)
return clause
#attempts to match the given line to a pattern which we already know the
# CNF for and is relatively simple to parse in order to avoid the
# costly wolfram API calls that take forever
def tryRegex(line, cs_creator):
line = sanatize(line)
line = line.replace("+", " ")
#try to match the line against any literals (e.g. A, ~A)
m = lit(line)
if m:
cs_creator.append(m)
#try to match the line with the implication pattern (e.g. A -> B)
m = implies(line)
if m:
cs_creator.append(m)
return True
#try to match the line with the iff pattern (e.g. A <-> B)
m = iff(line)
if m:
for n in m:
cs_creator.append(n)
return True
#try to match the line with the and pattern (e.g. A ^ B)
m = conjunction(line)
if m:
for n in m:
cs_creator.append(n)
return True
#try to match the line with the or pattern (e.g. A v B)
m = disjunction(line)
if m:
cs_creator.append(m)
return True
#we didn't find any matches, return false
return False
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations(object):
"""VirtualNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2016_09_01.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_09_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkGatewayListResult"]
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def _reset_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.VirtualNetworkGateway"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if gateway_vip is not None:
query_parameters['gatewayVip'] = self._serialize.query("gateway_vip", gateway_vip, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def begin_reset(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
gateway_vip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def _generatevpnclientpackage_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> str
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def begin_generatevpnclientpackage(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2016_09_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def _get_bgp_peer_status_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Optional["_models.BgpPeerStatusListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_bgp_peer_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if peer is not None:
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def begin_get_bgp_peer_status(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.BgpPeerStatusListResult"]
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either BgpPeerStatusListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.BgpPeerStatusListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
def _get_learned_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_learned_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def begin_get_learned_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
def _get_advertised_routes_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.GatewayRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_advertised_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
def begin_get_advertised_routes(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
peer, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.GatewayRouteListResult"]
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_user
short_description: Manage user accounts and user attributes on a BIG-IP
description:
- Manage user accounts and user attributes on a BIG-IP
version_added: "2.1"
options:
append:
description:
- If C(yes), will only add groups, not set them to just the list
in groups.
choices:
- yes
- no
default: no
full_name:
description:
- Full name of the user
required: false
connection:
description:
- The connection used to interface with the BIG-IP
required: false
default: soap
choices:
- rest
- soap
server:
description:
- BIG-IP host
required: true
password:
description:
- BIG-IP password
required: true
user:
description:
- BIG-IP username
required: true
username_credential:
description:
- Name of the user to create, remove or modify.
required: true
aliases:
- user
password_credential:
description:
- Optionally set the users password to this unencrypted value. One of
either C(password_credential) or C(encrypted_credential) is required
when creating a new account.
default: None
required: false
encrypted_credential:
description:
- Optionally set the users password to this crypted value. One of either
C(password_credential) or C(encrypted_credential) is required when
creating a new account. The password should be encrypted using crypt(3).
default: None
required: false
shell:
description:
- Optionally set the users shell.
required: false
default: None
choices:
- bash
- none
- tmsh
partition:
description:
- Partition to create user. Ignored during updates.
required: false
default: 'Common'
partition_access:
description:
- Specifies the administrative partition to which the user has access.
Should be in the form "partition:role". Valid roles include
C(acceleration-policy-editor), C(admin), C(application-editor), C(auditor)
C(certificate-manager), C(guest), C(irule-manager), C(manager), C(no-access)
C(operator), C(resource-admin), C(user-manager), C(web-application-security-administrator),
and C(web-application-security-editor). Partition portion of tuple should
be an existing partition or the value 'all'.
required: false
default: "all:no-access"
choices: []
state:
description:
- Whether the account should exist or not, taking action if the state is
different from what is stated.
required: false
default: present
choices:
- present
- absent
update_password:
description:
- C(always) will update passwords if they differ. C(on_create) will only
set the password for newly created users.
required: false
default: always
choices:
- always
- on_create
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be
used on personally controlled sites using self-signed certificates.
required: false
default: true
notes:
- Requires the bigsuds Python package on the host if using the iControl
interface. This is as easy as pip install bigsuds
- Requires the requests Python package on the host. This is as easy as
pip install requests
- For BIG-IP versions < 11.6.0, multiple roles on different partitions
is not supported. Instead, the last specified role wins.
- Specifying a C(partition) to create the account on is only supported
via the C(soap) connection type (the default) due to missing
functionality in BIG-IP versions <= 12.1.0
requirements:
- bigsuds
- requests
author:
- Matt Hite (@mhite)
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Add the user 'johnd' as an admin
bigip_user:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
username_credential: "johnd"
password_credential: "password"
full_name: "John Doe"
partition_access: "all:admin"
state: "present"
delegate_to: localhost
- name: Change the user "johnd's" role and shell
bigip_user:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
username_credential: "johnd"
partition_access: "NewPartition:manager"
shell: "tmsh"
state: "present"
delegate_to: localhost
- name: Make the user 'johnd' an admin and set to advanced shell
bigip_user:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
name: "johnd"
partition_access: "all:admin"
shell: "bash"
state: "present"
delegate_to: localhost
- name: Remove the user 'johnd'
bigip_user:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
name: "johnd"
state: "absent"
delegate_to: localhost
- name: Update password
bigip_user:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
state: "present"
username_credential: "johnd"
password_credential: "newsupersecretpassword"
'''
RETURN = '''
full_name:
description: Full name of the user
returned: changed and success
type: string
sample: "John Doe"
partition_access:
description:
- List of strings containing the user's roles and which partitions they
are applied to. They are specified in the form "partition:role".
returned: changed and success
type: list
sample: "['all:admin']"
shell:
description: The shell assigned to the user account
returned: changed and success
type: string
sample: "tmsh"
'''
import json
# These are the roles that are available to be set in the BIG-IP
ROLES = [
'acceleration-policy-editor', 'application-editor', 'auditor',
'certificate-manager', 'guest', 'irule-manager', 'manager',
'no-access', 'operator', 'resource-admin', 'user-manager',
'web-application-security-administrator', 'web-application-security-editor',
'admin'
]
SHELLS = ['bash', 'none', 'tmsh']
STATES = ['absent', 'present']
class F5ModuleError(Exception):
pass
class BigIpApiFactory(object):
def factory(module):
connection = module.params.get('connection')
pa = module.params.get('partition_access')
if pa is None or 'Common:' in pa:
connection = 'soap'
if connection == 'rest':
if not requests_found:
raise Exception("The python requests module is required")
return BigIpRestApi(check_mode=module.check_mode, **module.params)
elif connection == 'soap':
if not bigsuds_found:
raise Exception("The python bigsuds module is required")
return BigIpSoapApi(check_mode=module.check_mode, **module.params)
factory = staticmethod(factory)
class BigIpCommon(object):
ALL_PARTITIONS = [
'resource-admin', 'auditor', 'admin',
'web-application-security-administrator', 'no-access',
'USER_ROLE_ADMINISTRATOR', 'USER_ROLE_INVALID'
]
RESERVED_NAMES = [
'admin'
]
RESERVED_ROLES = [
'admin', 'USER_ROLE_ADMINISTRATOR'
]
ADVANCED_SHELL = [
'admin', 'resource-admin',
'USER_ROLE_ADMINISTRATOR', 'USER_ROLE_RESOURCE_ADMINISTRATOR'
]
SHELL_BASH = 'bash'
SHELL_NONE = 'none'
def __init__(self, *args, **kwargs):
self.result = dict(changed=False, changes=dict())
self.params = kwargs
if self.params['partition_access'] is None:
pass
elif not isinstance(self.params['partition_access'], list):
self.params['partition_access'] = [kwargs['partition_access']]
self.current = dict()
def can_have_advanced_shell(self):
current = self.read()
for acl in current['partition_access']:
permission = acl.split(':')
if permission[0] in self.ADVANCED_SHELL:
return True
return False
def _determine_updates(self):
result = dict(
full_name=False,
password=False,
shell=False,
partition_access=False
)
current = self.read()
full_name = self.params['full_name']
password_credential = self.params['password_credential']
username_credential = self.params['username_credential']
shell = self.params['shell']
partition_access = self.params['partition_access']
update_password = self.params['update_password']
encrypted_credential = self.params['encrypted_credential']
if full_name:
if current['full_name'] != full_name:
result['full_name'] = True
if encrypted_credential and username_credential:
if update_password == 'always':
result['password'] = True
elif password_credential and username_credential:
if update_password == 'always' and self.did_password_change():
result['password'] = True
if shell:
if shell == 'bash':
if not self.can_have_advanced_shell():
raise CustomShellError()
if shell == current['shell']:
result['shell'] = False
else:
result['shell'] = True
if partition_access:
if partition_access == current['partition_access']:
result['partition_access'] = False
else:
result['partition_access'] = True
return result
def _determine_partition_access(self):
result = []
has_all = False
if self.params['partition_access'] is None:
return result
for permission in self.params['partition_access']:
acl = permission.split(':')
if len(acl) != 2:
raise PartitionAccessMalformedError
elif acl[1] not in ROLES:
raise InvalidRoleError
partition = acl[0]
role = acl[1]
# These roles affect all partitions. There can be no other roles
# assigned to any other partitions if they are set.
#
# Likewise, if the specified role is being placed in the 'all' partition
# then existing permissions must be cleared
if role in self.ALL_PARTITIONS or partition == 'all':
result = []
has_all = True
elif self.params['append']:
if permission in self._current['partition_access']:
continue
permissions = dict(
role=role,
partition=partition
)
result.append(permissions)
if has_all:
break
for permission in result:
role = permission['role']
partition = permission['partition']
if role in self.ALL_PARTITIONS and partition != 'all':
raise RestrictedToSinglePartitionError
return result
def flush(self):
result = dict()
encrypted_credential = self.params['encrypted_credential']
password_credential = self.params['password_credential']
state = self.params['state']
user = self.params['user']
username_credential = self.params['username_credential']
if password_credential:
self.params['is_encrypted'] = False
else:
self.params['is_encrypted'] = True
self.params['password_credential'] = encrypted_credential
if state == "present":
changed = self.present()
if not self.params['check_mode']:
current = self.read()
result.update(current)
else:
if username_credential in self.RESERVED_NAMES:
raise Exception('The specified user cannot be removed because it is a system account')
elif user == username_credential:
raise Exception('The current user cannot remove themselves')
changed = self.absent()
result.update(dict(changed=changed))
return result
class BigIpSoapApi(BigIpCommon):
"""Manipulate user accounts via SOAP
"""
ROLE_MAP = {
'acceleration-policy-editor': 'USER_ROLE_ACCELERATION_POLICY_EDITOR',
'admin': 'USER_ROLE_ADMINISTRATOR',
'application-editor': 'USER_ROLE_APPLICATION_EDITOR',
'auditor': 'USER_ROLE_AUDITOR',
'certificate-manager': 'USER_ROLE_CERTIFICATE_MANAGER',
'guest': 'USER_ROLE_GUEST',
'irule-manager': 'USER_ROLE_IRULE_MANAGER',
'manager': 'USER_ROLE_MANAGER',
'no-access': 'USER_ROLE_INVALID',
'operator': 'USER_ROLE_TRAFFIC_MANAGER',
'resource-admin': 'USER_ROLE_RESOURCE_ADMINISTRATOR',
'user-manager': 'USER_ROLE_USER_MANAGER',
'web-application-security-administrator': 'USER_ROLE_ASM_POLICY_EDITOR',
'web-application-security-editor': 'USER_ROLE_ASM_EDITOR'
}
SHELL_MAP = {
'bash': '/bin/bash',
'none': '/sbin/nologin',
'tmsh': '/usr/bin/tmsh',
}
SHELL_RMAP = {
'/bin/bash': 'bash',
'/sbin/nologin': 'none',
'/usr/bin/tmsh': 'tmsh',
'/bin/false': 'none'
}
ALL_PARTITION = '[All]'
ADMIN_ROLE = 'USER_ROLE_ADMINISTRATOR'
ROLE_DEFAULT = 'USER_ROLE_INVALID'
def __init__(self, *args, **kwargs):
super(BigIpSoapApi, self).__init__(*args, **kwargs)
self.api = bigip_api(kwargs['server'],
kwargs['user'],
kwargs['password'],
kwargs['validate_certs'])
def did_password_change(self):
server = self.params['server']
user = self.params['username_credential']
password = self.params['password_credential']
validate_certs = self.params['validate_certs']
try:
api = bigip_api(server,
user,
password,
validate_certs)
api.Management.UserManagement.get_fullname(
user_names=[user]
)
return False
except bigsuds.ConnectionError, e:
if 'Authorization Required' in str(e):
return False
return True
def get_fullname(self):
username_credential = self.params['username_credential']
resp = self.api.Management.UserManagement.get_fullname(
user_names=[username_credential]
)
return resp[0]
def get_login_shell(self):
username_credential = self.params['username_credential']
resp = self.api.Management.UserManagement.get_login_shell(
user_names=[username_credential]
)
return resp[0]
def get_user_permission(self):
result = {}
username_credential = self.params['username_credential']
resp = self.api.Management.UserManagement.get_user_permission(
user_names=[username_credential]
)
for part in resp[0]:
partition = part['partition']
role = part['role']
result[partition] = role
return result
def delete_all_permissions(self):
username_credential = self.params['username_credential']
permissions = self.get_user_permission()
for partition, role in permissions.iteritems():
permission = dict(role=role, partition=partition)
self.api.Management.UserManagement.delete_user_permission(
user_names=[username_credential],
permissions=[[permission]]
)
def set_permission(self, role, partition):
username_credential = self.params['username_credential']
permission = dict(role=role, partition=partition)
self.api.Management.UserManagement.set_user_permission(
user_names=[username_credential],
permissions=[[permission]]
)
def set_fullname(self):
username_credential = self.params['username_credential']
full_name = self.params['full_name']
self.api.Management.UserManagement.set_fullname(
user_names=[username_credential],
fullnames=[full_name]
)
return True
def set_login_shell(self):
username_credential = self.params['username_credential']
shell = self.params['shell']
shell = self.SHELL_MAP[shell]
self.api.Management.UserManagement.set_login_shell(
user_names=[username_credential],
shells=[shell]
)
return True
def start_transaction(self):
self.api.System.Session.start_transaction()
# need to switch to root, set recursive query state
self.current_folder = self.api.System.Session.get_active_folder()
if self.current_folder != '/' + self.params['partition']:
self.api.System.Session.set_active_folder(folder='/' + self.params['partition'])
self.current_query_state = self.api.System.Session.get_recursive_query_state()
if self.current_query_state == 'STATE_DISABLED':
self.api.System.Session.set_recursive_query_state('STATE_ENABLED')
def submit_transaction(self):
# set everything back
if self.current_query_state == 'STATE_DISABLED':
self.api.System.Session.set_recursive_query_state('STATE_DISABLED')
if self.current_folder != '/' + self.params['partition']:
self.api.System.Session.set_active_folder(folder=self.current_folder)
self.api.System.Session.submit_transaction()
def set_password(self):
is_encrypted = self.params['is_encrypted']
password_credential = self.params['password_credential']
username_credential = self.params['username_credential']
passwords = dict(
is_encrypted=is_encrypted,
password=password_credential
)
self.api.Management.UserManagement.change_password_2(
user_names=[username_credential],
passwords=[passwords]
)
return True
def exists(self):
result = False
username_credential = self.params['username_credential']
self.start_transaction()
users = self.api.Management.UserManagement.get_list()
for user in users:
if user['name'] == username_credential:
result = True
self.submit_transaction()
return result
def read(self):
result = {}
ROLE_RMAP = dict((v, k) for k, v in self.ROLE_MAP.iteritems())
result['full_name'] = self.get_fullname()
result['partition_access'] = []
result['shell'] = ''
shell = self.get_login_shell()
result['shell'] = self.SHELL_RMAP[shell]
acls = self.get_user_permission()
for partition, role in acls.iteritems():
if partition == self.ALL_PARTITION:
partition = 'all'
role = ROLE_RMAP[role]
partition_access = '%s:%s' % (partition, role)
result['partition_access'].append(partition_access)
self._current = result
return result
def absent(self):
username_credential = self.params['username_credential']
if not self.exists():
return False
if self.params['check_mode']:
return True
self.start_transaction()
self.api.Management.UserManagement.delete_user(
user_names=[username_credential]
)
self.submit_transaction()
if self.exists():
raise DeleteUserError()
else:
return True
def update(self):
changed = False
updates = self._determine_updates()
shell = self.params['shell']
self.start_transaction()
if updates['full_name']:
if self.params['check_mode']:
changed = True
else:
changed = self.set_fullname()
if updates['password']:
if self.params['check_mode']:
changed = True
else:
changed = self.set_password()
if updates['shell']:
if shell == self.SHELL_BASH and not self.can_have_advanced_shell():
raise CustomShellError()
else:
if self.params['check_mode']:
changed = True
else:
changed = self.set_login_shell()
if updates['partition_access']:
permissions = self.determine_partition_access()
if self.params['check_mode']:
changed = True
else:
# Start by zeroing out the permissions
self.delete_all_permissions()
# Now, add all the permissions to the user account
for permission in permissions:
self.set_permission(permission['role'], permission['partition'])
changed = True
self.submit_transaction()
return changed
def determine_partition_access(self):
result = []
access = self._determine_partition_access()
for permission in access:
role = permission['role']
partition = permission['partition']
if partition == 'all':
partition = self.ALL_PARTITION
permissions = dict(
role=self.ROLE_MAP[role],
partition=partition
)
result.append(permissions)
return result
def create(self):
advanced_allowed = False
is_encrypted = self.params['is_encrypted']
password_credential = self.params['password_credential']
shell = self.params['shell']
username_credential = self.params['username_credential']
user_id = dict(
name=username_credential,
full_name=''
)
password_info = dict(
is_encrypted=is_encrypted,
password=password_credential
)
users = dict(
user=user_id,
password=password_info,
)
user_permission = self.determine_partition_access()
if user_permission:
users['permissions'] = user_permission
else:
users['permissions'] = [dict(
role=self.ROLE_DEFAULT,
partition=self.ALL_PARTITION
)]
if shell and shell != self.SHELL_NONE:
for x in user_permission:
if x['role'] in self.ADVANCED_SHELL:
advanced_allowed = True
if not advanced_allowed and shell == self.SHELL_BASH:
raise CustomShellError()
else:
users['login_shell'] = self.SHELL_MAP[shell]
self.start_transaction()
self.api.Management.UserManagement.create_user_3(
users=[users]
)
self.submit_transaction()
if self.exists():
return True
else:
raise CreateUserError()
def present(self):
password_credential = self.params['password_credential']
if self.exists():
return self.update()
else:
if self.params['check_mode']:
return True
elif password_credential is None:
raise PasswordRequiredError
return self.create()
class BigIpRestApi(BigIpCommon):
"""Manipulate user accounts via REST
"""
ALL_PARTITION = 'all-partitions'
ADMIN_ROLE = 'admin'
ROLE_DEFAULT = 'no-access'
def __init__(self, *args, **kwargs):
super(BigIpRestApi, self).__init__(*args, **kwargs)
server = self.params['server']
self._uri = 'https://%s/mgmt/tm/auth/user' % (server)
self._headers = {
'Content-Type': 'application/json'
}
def did_password_change(self):
user = self.params['username_credential']
password = self.params['password_credential']
validate_certs = self.params['validate_certs']
try:
url = "%s/%s" % (self._uri, user)
resp = requests.get(url,
auth=(user, password),
verify=validate_certs)
if resp.status_code == 200:
return False
except:
return True
def read(self):
result = {}
tmp = []
user = self.params['user']
username_credential = self.params['username_credential']
password = self.params['password']
validate_certs = self.params['validate_certs']
url = "%s/%s" % (self._uri, username_credential)
resp = requests.get(url,
auth=(user, password),
verify=validate_certs)
if resp.status_code == 200:
res = resp.json()
if 'description' in res:
result['full_name'] = res['description']
else:
result['full_name'] = ''
if 'shell' in res:
result['shell'] = res['shell']
else:
result['shell'] = self.SHELL_NONE
if 'partitionAccess' in res:
for part in res['partitionAccess']:
if part['name'] == self.ALL_PARTITION:
part['name'] = 'all'
partition = '%s:%s' % (part['name'], part['role'])
tmp.append(partition)
result['partition_access'] = tmp
return result
def exists(self):
user = self.params['user']
username_credential = self.params['username_credential']
password = self.params['password']
validate_certs = self.params['validate_certs']
url = "%s/%s" % (self._uri, username_credential)
resp = requests.get(url,
auth=(user, password),
verify=validate_certs)
if resp.status_code != 200:
return False
else:
return True
def present(self):
password_credential = self.params['password_credential']
if self.exists():
return self.update()
else:
if self.params['check_mode']:
return True
elif password_credential is None:
raise PasswordRequiredError
return self.create()
def update(self):
payload = {}
updates = self._determine_updates()
is_encrypted = self.params['is_encrypted']
user = self.params['user']
username_credential = self.params['username_credential']
password = self.params['password']
password_credential = self.params['password_credential']
shell = self.params['shell']
validate_certs = self.params['validate_certs']
if updates['full_name']:
payload['description'] = self.params['full_name']
if updates['password']:
if is_encrypted:
payload['encryptedPassword'] = password_credential
else:
payload['password'] = password_credential
if updates['shell']:
if shell == self.SHELL_BASH and not self.can_have_advanced_shell():
raise CustomShellError()
else:
payload['shell'] = shell
if updates['partition_access']:
payload['partitionAccess'] = self.determine_partition_access()
if payload:
if self.params['check_mode']:
return True
uri = "%s/%s" % (self._uri, username_credential)
resp = requests.patch(uri,
auth=(user, password),
data=json.dumps(payload),
verify=validate_certs,
headers=self._headers)
if resp.status_code == 200:
return True
else:
res = resp.json()
raise Exception(res['message'])
else:
return False
def determine_partition_access(self):
result = []
access = self._determine_partition_access()
for permission in access:
role = permission['role']
partition = permission['partition']
if partition == 'all':
partition = self.ALL_PARTITION
permissions = dict(
role=role,
name=partition
)
result.append(permissions)
return result
def create(self):
full_name = self.params['full_name']
user = self.params['user']
username_credential = self.params['username_credential']
password = self.params['password']
password_credential = self.params['password_credential']
validate_certs = self.params['validate_certs']
is_encrypted = self.params['is_encrypted']
partition_access = self.params['partition_access']
shell = self.params['shell']
payload = dict(
name=username_credential
)
if partition_access is None:
paccess = dict(
name=self.ALL_PARTITION,
role=self.ROLE_DEFAULT
)
payload['partitionAccess'] = [paccess]
else:
payload['partitionAccess'] = self.determine_partition_access()
if full_name:
payload['description'] = full_name
if is_encrypted:
payload['encryptedPassword'] = password_credential
else:
payload['password'] = password_credential
if shell and shell != self.SHELL_NONE:
for x in payload['partitionAccess']:
if x['role'] in self.ADVANCED_SHELL:
advanced_allowed = True
if not advanced_allowed and shell == self.SHELL_BASH:
raise CustomShellError()
else:
payload['shell'] = shell
resp = requests.post(self._uri,
auth=(user, password),
data=json.dumps(payload),
verify=validate_certs,
headers=self._headers)
if resp.status_code == 200:
return True
else:
res = resp.json()
raise Exception(res['message'])
def absent(self):
user = self.params['user']
username_credential = self.params['username_credential']
password = self.params['password']
validate_certs = self.params['validate_certs']
if not self.exists():
return False
if self.params['check_mode']:
return True
uri = "%s/%s" % (self._uri, username_credential)
resp = requests.delete(uri,
auth=(user, password),
verify=validate_certs)
if resp.status_code == 200:
return True
else:
res = resp.json()
raise Exception(res['message'])
def main():
argument_spec = f5_argument_spec()
meta_args = dict(
append=dict(default=False, type='bool', choices=BOOLEANS),
full_name=dict(),
encrypted_credential=dict(required=False, default=None, no_log=True),
partition_access=dict(required=False, default=None),
password_credential=dict(required=False, default=None, no_log=True),
shell=dict(default=None, choices=SHELLS),
state=dict(default='present', choices=STATES),
username_credential=dict(required=True, aliases=['name']),
update_password=dict(required=False, default='always', choices=['always', 'on_create'])
)
argument_spec.update(meta_args)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[
['password_credential', 'encrypted_credential']
]
)
try:
obj = BigIpApiFactory.factory(module)
result = obj.flush()
module.exit_json(**result)
except bigsuds.ConnectionError:
module.fail_json(msg="Could not connect to BIG-IP host")
except bigsuds.ServerError, e:
if 'folder not found' in str(e):
module.fail_json(msg="Partition not found")
else:
module.fail_json(msg=str(e))
except AdminRoleNoModifyError:
module.fail_json(msg="The admin user's role cannot be changed")
except CurrentUserNoRoleModifyError:
module.fail_json(msg='The login_user user cannot change their own role')
except CreateUserError:
module.fail_json(msg='Failed to create the user!')
except DeleteUserError:
module.fail_json(msg='Failed to delete the user!')
except CustomShellError:
module.fail_json(msg='Custom shells are only available to administrators')
except PasswordRequiredError:
module.fail_json(msg='At least one of password_credential or encrypted_credential must be specified')
except PartitionAccessMalformedError:
module.fail_json(msg='partition_access must be one or more role:partition tuples')
except InvalidRoleError:
module.fail_json(msg='Value of role must be one of: %s' % ','.join(ROLES))
except RestrictedToSinglePartitionError:
module.fail_json(msg='The specified role may not be restricted to a single partition')
except requests.exceptions.SSLError:
module.fail_json(msg='Certificate verification failed. Consider using validate_certs=no')
from ansible.module_utils.basic import *
from ansible.module_utils.f5 import *
if __name__ == '__main__':
main()
|
|
# Copyright 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
from oslo.config import cfg
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LW
from ironic.common import utils
from ironic.openstack.common import log as logging
from ironic.openstack.common import loopingcall
from ironic.openstack.common import processutils
opts = [
cfg.IntOpt('check_device_interval',
default=1,
help='After Ironic has completed creating the partition table, '
'it continues to check for activity on the attached iSCSI '
'device status at this interval prior to copying the image'
' to the node, in seconds'),
cfg.IntOpt('check_device_max_retries',
default=20,
help='The maximum number of times to check that the device is '
'not accessed by another process. If the device is still '
'busy after that, the disk partitioning will be treated as'
' having failed.'),
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='disk_partitioner',
title='Options for the disk partitioner')
CONF.register_group(opt_group)
CONF.register_opts(opts, opt_group)
LOG = logging.getLogger(__name__)
class DiskPartitioner(object):
def __init__(self, device, disk_label='msdos', alignment='optimal'):
"""A convenient wrapper around the parted tool.
:param device: The device path.
:param disk_label: The type of the partition table. Valid types are:
"bsd", "dvh", "gpt", "loop", "mac", "msdos",
"pc98", or "sun".
:param alignment: Set alignment for newly created partitions.
Valid types are: none, cylinder, minimal and
optimal.
"""
self._device = device
self._disk_label = disk_label
self._alignment = alignment
self._partitions = []
self._fuser_pids_re = re.compile(r'((\d)+\s*)+')
def _exec(self, *args):
# NOTE(lucasagomes): utils.execute() is already a wrapper on top
# of processutils.execute() which raises specific
# exceptions. It also logs any failure so we don't
# need to log it again here.
utils.execute('parted', '-a', self._alignment, '-s', self._device,
'--', 'unit', 'MiB', *args, check_exit_code=[0],
run_as_root=True)
def add_partition(self, size, part_type='primary', fs_type='',
bootable=False):
"""Add a partition.
:param size: The size of the partition in MiB.
:param part_type: The type of the partition. Valid values are:
primary, logical, or extended.
:param fs_type: The filesystem type. Valid types are: ext2, fat32,
fat16, HFS, linux-swap, NTFS, reiserfs, ufs.
If blank (''), it will create a Linux native
partition (83).
:param bootable: Boolean value; whether the partition is bootable
or not.
:returns: The partition number.
"""
self._partitions.append({'size': size,
'type': part_type,
'fs_type': fs_type,
'bootable': bootable})
return len(self._partitions)
def get_partitions(self):
"""Get the partitioning layout.
:returns: An iterator with the partition number and the
partition layout.
"""
return enumerate(self._partitions, 1)
def _wait_for_disk_to_become_available(self, retries, max_retries, pids,
stderr):
retries[0] += 1
if retries[0] > max_retries:
raise loopingcall.LoopingCallDone()
try:
# NOTE(ifarkas): fuser returns a non-zero return code if none of
# the specified files is accessed
out, err = utils.execute('fuser', self._device,
check_exit_code=[0, 1], run_as_root=True)
if not out and not err:
raise loopingcall.LoopingCallDone()
else:
if err:
stderr[0] = err
if out:
pids_match = re.search(self._fuser_pids_re, out)
pids[0] = pids_match.group()
except processutils.ProcessExecutionError as exc:
LOG.warning(_LW('Failed to check the device %(device)s with fuser:'
' %(err)s'), {'device': self._device, 'err': exc})
def commit(self):
"""Write to the disk."""
cmd_args = ['mklabel', self._disk_label]
# NOTE(lucasagomes): Lead in with 1MiB to allow room for the
# partition table itself.
start = 1
for num, part in self.get_partitions():
end = start + part['size']
cmd_args.extend(['mkpart', part['type'], part['fs_type'],
str(start), str(end)])
if part['bootable']:
cmd_args.extend(['set', str(num), 'boot', 'on'])
start = end
self._exec(*cmd_args)
retries = [0]
pids = ['']
fuser_err = ['']
interval = CONF.disk_partitioner.check_device_interval
max_retries = CONF.disk_partitioner.check_device_max_retries
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_disk_to_become_available,
retries, max_retries, pids, fuser_err)
timer.start(interval=interval).wait()
if retries[0] > max_retries:
if pids[0]:
raise exception.InstanceDeployFailure(
_('Disk partitioning failed on device %(device)s. '
'Processes with the following PIDs are holding it: '
'%(pids)s. Time out waiting for completion.')
% {'device': self._device, 'pids': pids[0]})
else:
raise exception.InstanceDeployFailure(
_('Disk partitioning failed on device %(device)s. Fuser '
'exited with "%(fuser_err)s". Time out waiting for '
'completion.')
% {'device': self._device, 'fuser_err': fuser_err[0]})
_PARTED_PRINT_RE = re.compile(r"^\d+:([\d\.]+)MiB:"
"([\d\.]+)MiB:([\d\.]+)MiB:(\w*)::(\w*)")
def list_partitions(device):
"""Get partitions information from given device.
:param device: The device path.
:returns: list of dictionaries (one per partition) with keys:
start, end, size (in MiB), filesystem, flags
"""
env = os.environ.copy()
env['LC_ALL'] = 'C'
output = utils.execute('parted', '-s', '-m', device, 'unit', 'MiB',
'print', env_variables=env)[0]
lines = [line for line in output.split('\n') if line.strip()][2:]
# Example of line: 1:1.00MiB:501MiB:500MiB:ext4::boot
fields = ('start', 'end', 'size', 'filesystem', 'flags')
result = []
for line in lines:
match = _PARTED_PRINT_RE.match(line)
if match is None:
LOG.warn(_LW("Partition information from parted for device "
"%(device)s does not match "
"expected format: %(line)s"),
dict(device=device, line=line))
continue
# Cast int fields to ints (some are floats and we round them down)
groups = [int(float(x)) if i < 3 else x
for i, x in enumerate(match.groups())]
result.append(dict(zip(fields, groups)))
return result
|
|
# coding: utf-8
from sqlalchemy import Column
from sqlalchemy import exc
from sqlalchemy import Integer
from sqlalchemy import schema
from sqlalchemy import sql
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.assertions import assert_raises
from sqlalchemy.testing.assertions import eq_
class OnConflictTest(fixtures.TablesTest):
__only_on__ = ("postgresql >= 9.5",)
__backend__ = True
run_define_tables = "each"
@classmethod
def define_tables(cls, metadata):
Table(
"users",
metadata,
Column("id", Integer, primary_key=True),
Column("name", String(50)),
)
class SpecialType(sqltypes.TypeDecorator):
impl = String
def process_bind_param(self, value, dialect):
return value + " processed"
Table(
"bind_targets",
metadata,
Column("id", Integer, primary_key=True),
Column("data", SpecialType()),
)
users_xtra = Table(
"users_xtra",
metadata,
Column("id", Integer, primary_key=True),
Column("name", String(50)),
Column("login_email", String(50)),
Column("lets_index_this", String(50)),
)
cls.unique_partial_index = schema.Index(
"idx_unique_partial_name",
users_xtra.c.name,
users_xtra.c.lets_index_this,
unique=True,
postgresql_where=users_xtra.c.lets_index_this == "unique_name",
)
cls.unique_constraint = schema.UniqueConstraint(
users_xtra.c.login_email, name="uq_login_email"
)
cls.bogus_index = schema.Index(
"idx_special_ops",
users_xtra.c.lets_index_this,
postgresql_where=users_xtra.c.lets_index_this > "m",
)
def test_bad_args(self):
assert_raises(
ValueError,
insert(self.tables.users).on_conflict_do_nothing,
constraint="id",
index_elements=["id"],
)
assert_raises(
ValueError,
insert(self.tables.users).on_conflict_do_update,
constraint="id",
index_elements=["id"],
)
assert_raises(
ValueError,
insert(self.tables.users).on_conflict_do_update,
constraint="id",
)
assert_raises(
ValueError, insert(self.tables.users).on_conflict_do_update
)
def test_on_conflict_do_nothing(self):
users = self.tables.users
with testing.db.connect() as conn:
result = conn.execute(
insert(users).on_conflict_do_nothing(),
dict(id=1, name="name1"),
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
result = conn.execute(
insert(users).on_conflict_do_nothing(),
dict(id=1, name="name2"),
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name1")],
)
def test_on_conflict_do_nothing_connectionless(self, connection):
users = self.tables.users_xtra
result = connection.execute(
insert(users).on_conflict_do_nothing(constraint="uq_login_email"),
dict(name="name1", login_email="email1"),
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, (1,))
result = connection.execute(
insert(users).on_conflict_do_nothing(constraint="uq_login_email"),
dict(name="name2", login_email="email1"),
)
eq_(result.inserted_primary_key, None)
eq_(result.returned_defaults, None)
eq_(
connection.execute(
users.select().where(users.c.id == 1)
).fetchall(),
[(1, "name1", "email1", None)],
)
@testing.provide_metadata
def test_on_conflict_do_nothing_target(self):
users = self.tables.users
with testing.db.connect() as conn:
result = conn.execute(
insert(users).on_conflict_do_nothing(
index_elements=users.primary_key.columns
),
dict(id=1, name="name1"),
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
result = conn.execute(
insert(users).on_conflict_do_nothing(
index_elements=users.primary_key.columns
),
dict(id=1, name="name2"),
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name1")],
)
def test_on_conflict_do_update_one(self):
users = self.tables.users
with testing.db.connect() as conn:
conn.execute(users.insert(), dict(id=1, name="name1"))
i = insert(users)
i = i.on_conflict_do_update(
index_elements=[users.c.id], set_=dict(name=i.excluded.name)
)
result = conn.execute(i, dict(id=1, name="name1"))
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name1")],
)
def test_on_conflict_do_update_two(self):
users = self.tables.users
with testing.db.connect() as conn:
conn.execute(users.insert(), dict(id=1, name="name1"))
i = insert(users)
i = i.on_conflict_do_update(
index_elements=[users.c.id],
set_=dict(id=i.excluded.id, name=i.excluded.name),
)
result = conn.execute(i, dict(id=1, name="name2"))
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name2")],
)
def test_on_conflict_do_update_three(self):
users = self.tables.users
with testing.db.connect() as conn:
conn.execute(users.insert(), dict(id=1, name="name1"))
i = insert(users)
i = i.on_conflict_do_update(
index_elements=users.primary_key.columns,
set_=dict(name=i.excluded.name),
)
result = conn.execute(i, dict(id=1, name="name3"))
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name3")],
)
def test_on_conflict_do_update_four(self):
users = self.tables.users
with testing.db.connect() as conn:
conn.execute(users.insert(), dict(id=1, name="name1"))
i = insert(users)
i = i.on_conflict_do_update(
index_elements=users.primary_key.columns,
set_=dict(id=i.excluded.id, name=i.excluded.name),
).values(id=1, name="name4")
result = conn.execute(i)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name4")],
)
def test_on_conflict_do_update_five(self):
users = self.tables.users
with testing.db.connect() as conn:
conn.execute(users.insert(), dict(id=1, name="name1"))
i = insert(users)
i = i.on_conflict_do_update(
index_elements=users.primary_key.columns,
set_=dict(id=10, name="I'm a name"),
).values(id=1, name="name4")
result = conn.execute(i)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(
users.select().where(users.c.id == 10)
).fetchall(),
[(10, "I'm a name")],
)
def test_on_conflict_do_update_multivalues(self):
users = self.tables.users
with testing.db.connect() as conn:
conn.execute(users.insert(), dict(id=1, name="name1"))
conn.execute(users.insert(), dict(id=2, name="name2"))
i = insert(users)
i = i.on_conflict_do_update(
index_elements=users.primary_key.columns,
set_=dict(name="updated"),
where=(i.excluded.name != "name12"),
).values(
[
dict(id=1, name="name11"),
dict(id=2, name="name12"),
dict(id=3, name="name13"),
dict(id=4, name="name14"),
]
)
result = conn.execute(i)
eq_(result.inserted_primary_key, [None])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().order_by(users.c.id)).fetchall(),
[(1, "updated"), (2, "name2"), (3, "name13"), (4, "name14")],
)
def _exotic_targets_fixture(self, conn):
users = self.tables.users_xtra
conn.execute(
insert(users),
dict(
id=1,
name="name1",
login_email="name1@gmail.com",
lets_index_this="not",
),
)
conn.execute(
users.insert(),
dict(
id=2,
name="name2",
login_email="name2@gmail.com",
lets_index_this="not",
),
)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name1", "name1@gmail.com", "not")],
)
def test_on_conflict_do_update_exotic_targets_two(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
self._exotic_targets_fixture(conn)
# try primary key constraint: cause an upsert on unique id column
i = insert(users)
i = i.on_conflict_do_update(
index_elements=users.primary_key.columns,
set_=dict(
name=i.excluded.name, login_email=i.excluded.login_email
),
)
result = conn.execute(
i,
dict(
id=1,
name="name2",
login_email="name1@gmail.com",
lets_index_this="not",
),
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, None)
eq_(
conn.execute(users.select().where(users.c.id == 1)).fetchall(),
[(1, "name2", "name1@gmail.com", "not")],
)
def test_on_conflict_do_update_exotic_targets_three(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
self._exotic_targets_fixture(conn)
# try unique constraint: cause an upsert on target
# login_email, not id
i = insert(users)
i = i.on_conflict_do_update(
constraint=self.unique_constraint,
set_=dict(
id=i.excluded.id,
name=i.excluded.name,
login_email=i.excluded.login_email,
),
)
# note: lets_index_this value totally ignored in SET clause.
result = conn.execute(
i,
dict(
id=42,
name="nameunique",
login_email="name2@gmail.com",
lets_index_this="unique",
),
)
eq_(result.inserted_primary_key, [42])
eq_(result.returned_defaults, None)
eq_(
conn.execute(
users.select().where(
users.c.login_email == "name2@gmail.com"
)
).fetchall(),
[(42, "nameunique", "name2@gmail.com", "not")],
)
def test_on_conflict_do_update_exotic_targets_four(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
self._exotic_targets_fixture(conn)
# try unique constraint by name: cause an
# upsert on target login_email, not id
i = insert(users)
i = i.on_conflict_do_update(
constraint=self.unique_constraint.name,
set_=dict(
id=i.excluded.id,
name=i.excluded.name,
login_email=i.excluded.login_email,
),
)
# note: lets_index_this value totally ignored in SET clause.
result = conn.execute(
i,
dict(
id=43,
name="nameunique2",
login_email="name2@gmail.com",
lets_index_this="unique",
),
)
eq_(result.inserted_primary_key, [43])
eq_(result.returned_defaults, None)
eq_(
conn.execute(
users.select().where(
users.c.login_email == "name2@gmail.com"
)
).fetchall(),
[(43, "nameunique2", "name2@gmail.com", "not")],
)
def test_on_conflict_do_update_exotic_targets_four_no_pk(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
self._exotic_targets_fixture(conn)
# try unique constraint by name: cause an
# upsert on target login_email, not id
i = insert(users)
i = i.on_conflict_do_update(
index_elements=[users.c.login_email],
set_=dict(
id=i.excluded.id,
name=i.excluded.name,
login_email=i.excluded.login_email,
),
)
result = conn.execute(
i, dict(name="name3", login_email="name1@gmail.com")
)
eq_(result.inserted_primary_key, [1])
eq_(result.returned_defaults, (1,))
eq_(
conn.execute(users.select().order_by(users.c.id)).fetchall(),
[
(1, "name3", "name1@gmail.com", "not"),
(2, "name2", "name2@gmail.com", "not"),
],
)
def test_on_conflict_do_update_exotic_targets_five(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
self._exotic_targets_fixture(conn)
# try bogus index
i = insert(users)
i = i.on_conflict_do_update(
index_elements=self.bogus_index.columns,
index_where=self.bogus_index.dialect_options["postgresql"][
"where"
],
set_=dict(
name=i.excluded.name, login_email=i.excluded.login_email
),
)
assert_raises(
exc.ProgrammingError,
conn.execute,
i,
dict(
id=1,
name="namebogus",
login_email="bogus@gmail.com",
lets_index_this="bogus",
),
)
def test_on_conflict_do_update_exotic_targets_six(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
conn.execute(
insert(users),
dict(
id=1,
name="name1",
login_email="mail1@gmail.com",
lets_index_this="unique_name",
),
)
i = insert(users)
i = i.on_conflict_do_update(
index_elements=self.unique_partial_index.columns,
index_where=self.unique_partial_index.dialect_options[
"postgresql"
]["where"],
set_=dict(
name=i.excluded.name, login_email=i.excluded.login_email
),
)
conn.execute(
i,
[
dict(
name="name1",
login_email="mail2@gmail.com",
lets_index_this="unique_name",
)
],
)
eq_(
conn.execute(users.select()).fetchall(),
[(1, "name1", "mail2@gmail.com", "unique_name")],
)
def test_on_conflict_do_update_no_row_actually_affected(self):
users = self.tables.users_xtra
with testing.db.connect() as conn:
self._exotic_targets_fixture(conn)
i = insert(users)
i = i.on_conflict_do_update(
index_elements=[users.c.login_email],
set_=dict(name="new_name"),
where=(i.excluded.name == "other_name"),
)
result = conn.execute(
i, dict(name="name2", login_email="name1@gmail.com")
)
eq_(result.returned_defaults, None)
eq_(result.inserted_primary_key, None)
eq_(
conn.execute(users.select()).fetchall(),
[
(1, "name1", "name1@gmail.com", "not"),
(2, "name2", "name2@gmail.com", "not"),
],
)
def test_on_conflict_do_update_special_types_in_set(self):
bind_targets = self.tables.bind_targets
with testing.db.connect() as conn:
i = insert(bind_targets)
conn.execute(i, {"id": 1, "data": "initial data"})
eq_(
conn.scalar(sql.select([bind_targets.c.data])),
"initial data processed",
)
i = insert(bind_targets)
i = i.on_conflict_do_update(
index_elements=[bind_targets.c.id],
set_=dict(data="new updated data"),
)
conn.execute(i, {"id": 1, "data": "new inserted data"})
eq_(
conn.scalar(sql.select([bind_targets.c.data])),
"new updated data processed",
)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_agent_request(
resource_group_name: str,
server_name: str,
job_agent_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"jobAgentName": _SERIALIZER.url("job_agent_name", job_agent_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
resource_group_name: str,
server_name: str,
job_agent_name: str,
credential_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials/{credentialName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"jobAgentName": _SERIALIZER.url("job_agent_name", job_agent_name, 'str'),
"credentialName": _SERIALIZER.url("credential_name", credential_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_or_update_request(
resource_group_name: str,
server_name: str,
job_agent_name: str,
credential_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials/{credentialName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"jobAgentName": _SERIALIZER.url("job_agent_name", job_agent_name, 'str'),
"credentialName": _SERIALIZER.url("credential_name", credential_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request(
resource_group_name: str,
server_name: str,
job_agent_name: str,
credential_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials/{credentialName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"serverName": _SERIALIZER.url("server_name", server_name, 'str'),
"jobAgentName": _SERIALIZER.url("job_agent_name", job_agent_name, 'str'),
"credentialName": _SERIALIZER.url("credential_name", credential_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
**kwargs
)
class JobCredentialsOperations(object):
"""JobCredentialsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.sql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_by_agent(
self,
resource_group_name: str,
server_name: str,
job_agent_name: str,
**kwargs: Any
) -> Iterable["_models.JobCredentialListResult"]:
"""Gets a list of jobs credentials.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param job_agent_name: The name of the job agent.
:type job_agent_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either JobCredentialListResult or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.sql.models.JobCredentialListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobCredentialListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_agent_request(
resource_group_name=resource_group_name,
server_name=server_name,
job_agent_name=job_agent_name,
subscription_id=self._config.subscription_id,
template_url=self.list_by_agent.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_agent_request(
resource_group_name=resource_group_name,
server_name=server_name,
job_agent_name=job_agent_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("JobCredentialListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_agent.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
server_name: str,
job_agent_name: str,
credential_name: str,
**kwargs: Any
) -> "_models.JobCredential":
"""Gets a jobs credential.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param job_agent_name: The name of the job agent.
:type job_agent_name: str
:param credential_name: The name of the credential.
:type credential_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobCredential, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.JobCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
server_name=server_name,
job_agent_name=job_agent_name,
credential_name=credential_name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('JobCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials/{credentialName}'} # type: ignore
@distributed_trace
def create_or_update(
self,
resource_group_name: str,
server_name: str,
job_agent_name: str,
credential_name: str,
parameters: "_models.JobCredential",
**kwargs: Any
) -> "_models.JobCredential":
"""Creates or updates a job credential.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param job_agent_name: The name of the job agent.
:type job_agent_name: str
:param credential_name: The name of the credential.
:type credential_name: str
:param parameters: The requested job credential state.
:type parameters: ~azure.mgmt.sql.models.JobCredential
:keyword callable cls: A custom type or function that will be passed the direct response
:return: JobCredential, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.JobCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.JobCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'JobCredential')
request = build_create_or_update_request(
resource_group_name=resource_group_name,
server_name=server_name,
job_agent_name=job_agent_name,
credential_name=credential_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('JobCredential', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('JobCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials/{credentialName}'} # type: ignore
@distributed_trace
def delete(
self,
resource_group_name: str,
server_name: str,
job_agent_name: str,
credential_name: str,
**kwargs: Any
) -> None:
"""Deletes a job credential.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param job_agent_name: The name of the job agent.
:type job_agent_name: str
:param credential_name: The name of the credential.
:type credential_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
server_name=server_name,
job_agent_name=job_agent_name,
credential_name=credential_name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/jobAgents/{jobAgentName}/credentials/{credentialName}'} # type: ignore
|
|
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import inspect
import IECore
import Gaffer
import GafferTest
class ContextVariablesTest( GafferTest.TestCase ) :
def test( self ) :
n = GafferTest.StringInOutNode()
self.assertHashesValid( n )
c = Gaffer.ContextVariables()
c.setup( Gaffer.StringPlug() )
c["in"].setInput( n["out"] )
n["in"].setValue( "$a" )
self.assertEqual( c["out"].getValue(), "" )
c["variables"].addChild( Gaffer.NameValuePlug( "a", IECore.StringData( "A" ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
self.assertEqual( c["out"].getValue(), "A" )
def testDirtyPropagation( self ) :
n = GafferTest.StringInOutNode()
c = Gaffer.ContextVariables()
c.setup( Gaffer.StringPlug() )
c["in"].setInput( n["out"] )
# adding a variable should dirty the output:
dirtied = GafferTest.CapturingSlot( c.plugDirtiedSignal() )
c["variables"].addChild( Gaffer.NameValuePlug( "a", IECore.StringData( "A" ), "member1", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
self.assertIn( c["out"], [ p[0] for p in dirtied ] )
# modifying the variable should dirty the output:
dirtied = GafferTest.CapturingSlot( c.plugDirtiedSignal() )
c["variables"]["member1"]["value"].setValue("b")
self.assertIn( c["out"], [ p[0] for p in dirtied ] )
# removing the variable should also dirty the output:
dirtied = GafferTest.CapturingSlot( c.plugDirtiedSignal() )
c["variables"].removeChild(c["variables"]["member1"])
self.assertIn( c["out"], [ p[0] for p in dirtied ] )
def testSerialisation( self ) :
s = Gaffer.ScriptNode()
s["n"] = GafferTest.StringInOutNode()
s["c"] = Gaffer.ContextVariables()
s["c"].setup( Gaffer.StringPlug() )
s["c"]["in"].setInput( s["n"]["out"] )
s["n"]["in"].setValue( "$a" )
self.assertEqual( s["c"]["out"].getValue(), "" )
s["c"]["variables"].addChild( Gaffer.NameValuePlug( "a", IECore.StringData( "A" ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
self.assertEqual( s["c"]["out"].getValue(), "A" )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["c"].keys(), s["c"].keys() )
self.assertEqual( s2["c"]["out"].getValue(), "A" )
def testExtraVariables( self ) :
s = Gaffer.ScriptNode()
s["n"] = GafferTest.StringInOutNode()
s["c"] = Gaffer.ContextVariables()
s["c"].setup( Gaffer.StringPlug() )
s["c"]["in"].setInput( s["n"]["out"] )
s["n"]["in"].setValue( "$a" )
self.assertEqual( s["c"]["out"].getValue(), "" )
dirtied = GafferTest.CapturingSlot( s["c"].plugDirtiedSignal() )
s["c"]["extraVariables"].setValue( IECore.CompoundData( { "a" : "A" } ) )
self.assertIn( s["c"]["out"], { p[0] for p in dirtied } )
self.assertEqual( s["c"]["out"].getValue(), "A" )
# Extra variables trump regular variables of the same name
s["c"]["variables"].addChild( Gaffer.NameValuePlug( "a", IECore.StringData( "B" ) ) )
self.assertEqual( s["c"]["out"].getValue(), "A" )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["c"]["out"].getValue(), "A" )
def testExtraVariablesExpression( self ) :
s = Gaffer.ScriptNode()
s["n"] = GafferTest.StringInOutNode()
s["c"] = Gaffer.ContextVariables()
s["c"].setup( Gaffer.StringPlug() )
s["c"]["in"].setInput( s["n"]["out"] )
s["n"]["in"].setValue( "$a$b$c" )
self.assertEqual( s["c"]["out"].getValue(), "" )
s["e"] = Gaffer.Expression()
s["e"].setExpression( inspect.cleandoc(
"""
result = IECore.CompoundData()
if context.getFrame() > 1 :
result["a"] = "A"
if context.getFrame() > 2 :
result["b"] = "B"
if context.getFrame() > 3 :
result["c"] = "C"
parent["c"]["extraVariables"] = result
"""
) )
with Gaffer.Context() as c :
self.assertEqual( s["c"]["out"].getValue(), "" )
c.setFrame( 2 )
self.assertEqual( s["c"]["out"].getValue(), "A" )
c.setFrame( 3 )
self.assertEqual( s["c"]["out"].getValue(), "AB" )
c.setFrame( 4 )
self.assertEqual( s["c"]["out"].getValue(), "ABC" )
def testEnabledPlugAffectsOutput( self ) :
c = Gaffer.ContextVariables()
c.setup( Gaffer.StringPlug() )
cs = GafferTest.CapturingSlot( c.plugDirtiedSignal() )
c["enabled"].setValue( False )
self.assertEqual( len( cs ), 2 )
self.assertEqual( { x[0] for x in cs }, { c["enabled"], c["out"] } )
def testSerialisationUsesSetup( self ) :
s1 = Gaffer.ScriptNode()
s1["c"] = Gaffer.ContextVariables()
s1["c"].setup( Gaffer.IntPlug() )
ss = s1.serialise()
self.assertIn( "setup", ss )
self.assertEqual( ss.count( "addChild" ), 1 )
self.assertNotIn( "Dynamic", ss )
self.assertNotIn( "setInput", ss )
s2 = Gaffer.ScriptNode()
s2.execute( ss )
self.assertIn( "in", s2["c"] )
self.assertIn( "out", s2["c"] )
self.assertIsInstance( s2["c"]["in"], Gaffer.IntPlug )
self.assertIsInstance( s2["c"]["out"], Gaffer.IntPlug )
@GafferTest.TestRunner.PerformanceTestMethod()
def testPerformance( self ):
c = Gaffer.ContextVariables()
c.setup( Gaffer.IntPlug() )
for i in range( 10 ):
c["variables"].addChild( Gaffer.NameValuePlug( "a%i"%i, IECore.StringData( "A" * 100 ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
c["variables"].addChild( Gaffer.NameValuePlug( "intName", IECore.IntData( 100 ), flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
# This would be a bit more representative if our source node was actually affected by the context variables,
# but without access to OSL in this test we don't have any efficient way to read context variables handy,
# and we're mostly just interested in the amount of overhead anyway
n = GafferTest.MultiplyNode()
c["in"].setInput( n["product"] )
GafferTest.parallelGetValue( c["out"], 1000000, "iter" )
if __name__ == "__main__":
unittest.main()
|
|
#!/usr/bin/python
#-*- coding: utf-8 -*-
# (c) 2013, Yeukhon Wong <yeukhon@acm.org>
# (c) 2014, Nate Coraor <nate@bx.psu.edu>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: hg
short_description: Manages Mercurial (hg) repositories.
description:
- Manages Mercurial (hg) repositories. Supports SSH, HTTP/S and local address.
version_added: "1.0"
author: "Yeukhon Wong (@yeukhon)"
options:
repo:
description:
- The repository address.
required: true
default: null
aliases: [ name ]
dest:
description:
- Absolute path of where the repository should be cloned to.
This parameter is required, unless clone and update are set to no
required: true
default: null
revision:
description:
- Equivalent C(-r) option in hg command which could be the changeset, revision number,
branch name or even tag.
required: false
default: null
aliases: [ version ]
force:
description:
- Discards uncommitted changes. Runs C(hg update -C). Prior to
1.9, the default was `yes`.
required: false
default: "no"
choices: [ "yes", "no" ]
purge:
description:
- Deletes untracked files. Runs C(hg purge).
required: false
default: "no"
choices: [ "yes", "no" ]
update:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.0"
description:
- If C(no), do not retrieve new revisions from the origin repository
clone:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.3"
description:
- If C(no), do not clone the repository if it does not exist locally.
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to hg executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the hg module, with the following command: ssh-keyscan remote_host.com >> /etc/ssh/ssh_known_hosts."
requirements: [ ]
'''
EXAMPLES = '''
# Ensure the current working copy is inside the stable branch and deletes untracked files if any.
- hg:
repo: https://bitbucket.org/user/repo1
dest: /home/user/repo1
revision: stable
purge: yes
# Example just get information about the repository whether or not it has
# already been cloned locally.
- hg:
repo: git://bitbucket.org/user/repo
dest: /srv/checkout
clone: no
update: no
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
class Hg(object):
def __init__(self, module, dest, repo, revision, hg_path):
self.module = module
self.dest = dest
self.repo = repo
self.revision = revision
self.hg_path = hg_path
def _command(self, args_list):
(rc, out, err) = self.module.run_command([self.hg_path] + args_list)
return (rc, out, err)
def _list_untracked(self):
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest, '--print']
return self._command(args)
def get_revision(self):
"""
hg id -b -i -t returns a string in the format:
"<changeset>[+] <branch_name> <tag>"
This format lists the state of the current working copy,
and indicates whether there are uncommitted changes by the
plus sign. Otherwise, the sign is omitted.
Read the full description via hg id --help
"""
(rc, out, err) = self._command(['id', '-b', '-i', '-t', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
else:
return to_native(out).strip('\n')
def get_remote_revision(self):
(rc, out, err) = self._command(['id', self.repo])
if rc != 0:
self.module.fail_json(msg=err)
else:
return to_native(out).strip('\n')
def has_local_mods(self):
now = self.get_revision()
if '+' in now:
return True
else:
return False
def discard(self):
before = self.has_local_mods()
if not before:
return False
args = ['update', '-C', '-R', self.dest, '-r', '.']
(rc, out, err) = self._command(args)
if rc != 0:
self.module.fail_json(msg=err)
after = self.has_local_mods()
if before != after and not after: # no more local modification
return True
def purge(self):
# before purge, find out if there are any untracked files
(rc1, out1, err1) = self._list_untracked()
if rc1 != 0:
self.module.fail_json(msg=err1)
# there are some untrackd files
if out1 != '':
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest]
(rc2, out2, err2) = self._command(args)
if rc2 != 0:
self.module.fail_json(msg=err2)
return True
else:
return False
def cleanup(self, force, purge):
discarded = False
purged = False
if force:
discarded = self.discard()
if purge:
purged = self.purge()
if discarded or purged:
return True
else:
return False
def pull(self):
return self._command(
['pull', '-R', self.dest, self.repo])
def update(self):
if self.revision is not None:
return self._command(['update', '-r', self.revision, '-R', self.dest])
return self._command(['update', '-R', self.dest])
def clone(self):
if self.revision is not None:
return self._command(['clone', self.repo, self.dest, '-r', self.revision])
return self._command(['clone', self.repo, self.dest])
@property
def at_revision(self):
"""
There is no point in pulling from a potentially down/slow remote site
if the desired changeset is already the current changeset.
"""
if self.revision is None or len(self.revision) < 7:
# Assume it's a rev number, tag, or branch
return False
(rc, out, err) = self._command(['--debug', 'id', '-i', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
if out.startswith(self.revision):
return True
return False
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
repo = dict(required=True, aliases=['name']),
dest = dict(type='path'),
revision = dict(default=None, aliases=['version']),
force = dict(default='no', type='bool'),
purge = dict(default='no', type='bool'),
update = dict(default='yes', type='bool'),
clone = dict(default='yes', type='bool'),
executable = dict(default=None),
),
)
repo = module.params['repo']
dest = module.params['dest']
revision = module.params['revision']
force = module.params['force']
purge = module.params['purge']
update = module.params['update']
clone = module.params['clone']
hg_path = module.params['executable'] or module.get_bin_path('hg', True)
if dest is not None:
hgrc = os.path.join(dest, '.hg/hgrc')
# initial states
before = ''
changed = False
cleaned = False
if not dest and (clone or update):
module.fail_json(msg="the destination directory must be specified unless clone=no and update=no")
hg = Hg(module, dest, repo, revision, hg_path)
# If there is no hgrc file, then assume repo is absent
# and perform clone. Otherwise, perform pull and update.
if not clone and not update:
out = hg.get_remote_revision()
module.exit_json(after=out, changed=False)
if not os.path.exists(hgrc):
if clone:
(rc, out, err) = hg.clone()
if rc != 0:
module.fail_json(msg=err)
else:
module.exit_json(changed=False)
elif not update:
# Just return having found a repo already in the dest path
before = hg.get_revision()
elif hg.at_revision:
# no update needed, don't pull
before = hg.get_revision()
# but force and purge if desired
cleaned = hg.cleanup(force, purge)
else:
# get the current state before doing pulling
before = hg.get_revision()
# can perform force and purge
cleaned = hg.cleanup(force, purge)
(rc, out, err) = hg.pull()
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = hg.update()
if rc != 0:
module.fail_json(msg=err)
after = hg.get_revision()
if before != after or cleaned:
changed = True
module.exit_json(before=before, after=after, changed=changed, cleaned=cleaned)
if __name__ == '__main__':
main()
|
|
# Copyright 2022 The MT3 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dense attention classes and mask/weighting functions."""
# pylint: disable=attribute-defined-outside-init,g-bare-generic
import dataclasses
import functools
import operator
from typing import Any, Callable, Iterable, Optional, Sequence, Tuple, Union
from flax import linen as nn
from flax.linen import partitioning as nn_partitioning
import jax
from jax import lax
from jax import random
import jax.numpy as jnp
import numpy as np
# from flax.linen.partitioning import param_with_axes, with_sharding_constraint
param_with_axes = nn_partitioning.param_with_axes
with_sharding_constraint = nn_partitioning.with_sharding_constraint
# Type annotations
Array = jnp.ndarray
DType = jnp.dtype
PRNGKey = jnp.ndarray
Shape = Iterable[int]
Activation = Callable[..., Array]
# Parameter initializers.
Initializer = Callable[[PRNGKey, Shape, DType], Array]
default_embed_init = nn.initializers.variance_scaling(
1.0, 'fan_in', 'normal', out_axis=0)
def sinusoidal(min_scale: float = 1.0,
max_scale: float = 10000.0,
dtype: DType = jnp.float32) -> Initializer:
"""Creates 1D Sinusoidal Position Embedding Initializer.
Args:
min_scale: Minimum frequency-scale in sine grating.
max_scale: Maximum frequency-scale in sine grating.
dtype: The DType of the returned values.
Returns:
The sinusoidal initialization function.
"""
def init(key: PRNGKey, shape: Shape, dtype: DType = dtype) -> Array:
"""Sinusoidal init."""
del key
if dtype != np.float32:
raise ValueError('The sinusoidal initializer only supports float32.')
if len(list(shape)) != 2:
raise ValueError(
f'Expected a 2D shape (max_len, features), but got {shape}.')
max_len, features = shape
pe = np.zeros((max_len, features), dtype=dtype)
position = np.arange(0, max_len)[:, np.newaxis]
scale_factor = -np.log(max_scale / min_scale) / (features // 2 - 1)
div_term = min_scale * np.exp(np.arange(0, features // 2) * scale_factor)
pe[:, :features // 2] = np.sin(position * div_term)
pe[:, features // 2:2 * (features // 2)] = np.cos(position * div_term)
return jnp.array(pe)
return init
def dot_product_attention(query: Array,
key: Array,
value: Array,
bias: Optional[Array] = None,
dropout_rng: Optional[PRNGKey] = None,
dropout_rate: float = 0.,
deterministic: bool = False,
dtype: DType = jnp.float32,
float32_logits: bool = False):
"""Computes dot-product attention given query, key, and value.
This is the core function for applying attention based on
https://arxiv.org/abs/1706.03762. It calculates the attention weights given
query and key and combines the values using the attention weights.
Args:
query: queries for calculating attention with shape of `[batch, q_length,
num_heads, qk_depth_per_head]`.
key: keys for calculating attention with shape of `[batch, kv_length,
num_heads, qk_depth_per_head]`.
value: values to be used in attention with shape of `[batch, kv_length,
num_heads, v_depth_per_head]`.
bias: bias for the attention weights. This should be broadcastable to the
shape `[batch, num_heads, q_length, kv_length]` This can be used for
incorporating causal masks, padding masks, proximity bias, etc.
dropout_rng: JAX PRNGKey: to be used for dropout
dropout_rate: dropout rate
deterministic: bool, deterministic or not (to apply dropout)
dtype: the dtype of the computation (default: float32)
float32_logits: bool, if True then compute logits in float32 to avoid
numerical issues with bfloat16.
Returns:
Output of shape `[batch, length, num_heads, v_depth_per_head]`.
"""
assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'
assert query.shape[:-3] == key.shape[:-3] == value.shape[:-3], (
'q, k, v batch dims must match.')
assert query.shape[-2] == key.shape[-2] == value.shape[-2], (
'q, k, v num_heads must match.')
assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'
assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'
# Casting logits and softmax computation for float32 for model stability.
if float32_logits:
query = query.astype(jnp.float32)
key = key.astype(jnp.float32)
# `attn_weights`: [batch, num_heads, q_length, kv_length]
attn_weights = jnp.einsum('bqhd,bkhd->bhqk', query, key)
# Apply attention bias: masking, dropout, proximity bias, etc.
if bias is not None:
attn_weights = attn_weights + bias.astype(attn_weights.dtype)
# Normalize the attention weights across `kv_length` dimension.
attn_weights = jax.nn.softmax(attn_weights).astype(dtype)
# Apply attention dropout.
if not deterministic and dropout_rate > 0.:
keep_prob = 1.0 - dropout_rate
# T5 broadcasts along the "length" dim, but unclear which one that
# corresponds to in positional dimensions here, assuming query dim.
dropout_shape = list(attn_weights.shape)
dropout_shape[-2] = 1
keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape)
keep = jnp.broadcast_to(keep, attn_weights.shape)
multiplier = (
keep.astype(attn_weights.dtype) / jnp.asarray(keep_prob, dtype=dtype))
attn_weights = attn_weights * multiplier
# Take the linear combination of `value`.
return jnp.einsum('bhqk,bkhd->bqhd', attn_weights, value)
dynamic_vector_slice_in_dim = jax.vmap(
lax.dynamic_slice_in_dim, in_axes=(None, 0, None, None))
class MultiHeadDotProductAttention(nn.Module):
"""Multi-head dot-product attention.
Attributes:
num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])
should be divisible by the number of heads.
head_dim: dimension of each head.
dtype: the dtype of the computation.
dropout_rate: dropout rate
kernel_init: initializer for the kernel of the Dense layers.
float32_logits: bool, if True then compute logits in float32 to avoid
numerical issues with bfloat16.
"""
num_heads: int
head_dim: int
dtype: DType = jnp.float32
dropout_rate: float = 0.
kernel_init: Initializer = nn.initializers.variance_scaling(
1.0, 'fan_in', 'normal')
float32_logits: bool = False # computes logits in float32 for stability.
@nn.compact
def __call__(self,
inputs_q: Array,
inputs_kv: Array,
mask: Optional[Array] = None,
bias: Optional[Array] = None,
*,
decode: bool = False,
deterministic: bool = False) -> Array:
"""Applies multi-head dot product attention on the input data.
Projects the inputs into multi-headed query, key, and value vectors,
applies dot-product attention and project the results to an output vector.
There are two modes: decoding and non-decoding (e.g., training). The mode is
determined by `decode` argument. For decoding, this method is called twice,
first to initialize the cache and then for an actual decoding process. The
two calls are differentiated by the presence of 'cached_key' in the variable
dict. In the cache initialization stage, the cache variables are initialized
as zeros and will be filled in the subsequent decoding process.
In the cache initialization call, `inputs_q` has a shape [batch, length,
q_features] and `inputs_kv`: [batch, length, kv_features]. During the
incremental decoding stage, query, key and value all have the shape [batch,
1, qkv_features] corresponding to a single step.
Args:
inputs_q: input queries of shape `[batch, q_length, q_features]`.
inputs_kv: key/values of shape `[batch, kv_length, kv_features]`.
mask: attention mask of shape `[batch, num_heads, q_length, kv_length]`.
bias: attention bias of shape `[batch, num_heads, q_length, kv_length]`.
decode: Whether to prepare and use an autoregressive cache.
deterministic: Disables dropout if set to True.
Returns:
output of shape `[batch, length, q_features]`.
"""
projection = functools.partial(
DenseGeneral,
axis=-1,
features=(self.num_heads, self.head_dim),
kernel_axes=('embed', 'joined_kv'),
dtype=self.dtype)
# NOTE: T5 does not explicitly rescale the attention logits by
# 1/sqrt(depth_kq)! This is folded into the initializers of the
# linear transformations, which is equivalent under Adafactor.
depth_scaling = jnp.sqrt(self.head_dim).astype(self.dtype)
query_init = lambda *args: self.kernel_init(*args) / depth_scaling
# Project inputs_q to multi-headed q/k/v
# dimensions are then [batch, length, num_heads, head_dim]
query = projection(kernel_init=query_init, name='query')(inputs_q)
key = projection(kernel_init=self.kernel_init, name='key')(inputs_kv)
value = projection(kernel_init=self.kernel_init, name='value')(inputs_kv)
query = with_sharding_constraint(query, ('batch', 'length', 'heads', 'kv'))
key = with_sharding_constraint(key, ('batch', 'length', 'heads', 'kv'))
value = with_sharding_constraint(value, ('batch', 'length', 'heads', 'kv'))
if decode:
# Detect if we're initializing by absence of existing cache data.
is_initialized = self.has_variable('cache', 'cached_key')
# The key and value have dimension [batch, length, num_heads, head_dim],
# but we cache them as [batch, num_heads, head_dim, length] as a TPU
# fusion optimization. This also enables the "scatter via one-hot
# broadcast" trick, which means we do a one-hot broadcast instead of a
# scatter/gather operations, resulting in a 3-4x speedup in practice.
swap_dims = lambda x: x[:-3] + tuple(x[i] for i in [-2, -1, -3])
cached_key = self.variable('cache', 'cached_key', jnp.zeros,
swap_dims(key.shape), key.dtype)
cached_value = self.variable('cache', 'cached_value', jnp.zeros,
swap_dims(value.shape), value.dtype)
cache_index = self.variable('cache', 'cache_index',
lambda: jnp.array(0, dtype=jnp.int32))
if is_initialized:
batch, num_heads, head_dim, length = (cached_key.value.shape)
# During fast autoregressive decoding, we feed one position at a time,
# and cache the keys and values step by step.
# Sanity shape check of cached key against input query.
expected_shape = (batch, 1, num_heads, head_dim)
if expected_shape != query.shape:
raise ValueError('Autoregressive cache shape error, '
'expected query shape %s instead got %s.' %
(expected_shape, query.shape))
# Create a OHE of the current index. NOTE: the index is increased below.
cur_index = cache_index.value
one_hot_indices = jax.nn.one_hot(cur_index, length, dtype=key.dtype)
# In order to update the key, value caches with the current key and
# value, we move the length axis to the back, similar to what we did for
# the cached ones above.
# Note these are currently the key and value of a single position, since
# we feed one position at a time.
one_token_key = jnp.moveaxis(key, -3, -1)
one_token_value = jnp.moveaxis(value, -3, -1)
# Update key, value caches with our new 1d spatial slices.
# We implement an efficient scatter into the cache via one-hot
# broadcast and addition.
key = cached_key.value + one_token_key * one_hot_indices
value = cached_value.value + one_token_value * one_hot_indices
cached_key.value = key
cached_value.value = value
cache_index.value = cache_index.value + 1
# Move the keys and values back to their original shapes.
key = jnp.moveaxis(key, -1, -3)
value = jnp.moveaxis(value, -1, -3)
# Causal mask for cached decoder self-attention: our single query
# position should only attend to those key positions that have already
# been generated and cached, not the remaining zero elements.
mask = combine_masks(
mask,
jnp.broadcast_to(
jnp.arange(length) <= cur_index,
# (1, 1, length) represent (head dim, query length, key length)
# query length is 1 because during decoding we deal with one
# index.
# The same mask is applied to all batch elements and heads.
(batch, 1, 1, length)))
# Grab the correct relative attention bias during decoding. This is
# only required during single step decoding.
if bias is not None:
# The bias is a full attention matrix, but during decoding we only
# have to take a slice of it.
# This is equivalent to bias[..., cur_index:cur_index+1, :].
bias = dynamic_vector_slice_in_dim(
jnp.squeeze(bias, axis=0), jnp.reshape(cur_index, (-1)), 1, -2)
# Convert the boolean attention mask to an attention bias.
if mask is not None:
# attention mask in the form of attention bias
attention_bias = lax.select(
mask > 0,
jnp.full(mask.shape, 0.).astype(self.dtype),
jnp.full(mask.shape, -1e10).astype(self.dtype))
else:
attention_bias = None
# Add provided bias term (e.g. relative position embedding).
if bias is not None:
attention_bias = combine_biases(attention_bias, bias)
dropout_rng = None
if not deterministic and self.dropout_rate > 0.:
dropout_rng = self.make_rng('dropout')
# Apply attention.
x = dot_product_attention(
query,
key,
value,
bias=attention_bias,
dropout_rng=dropout_rng,
dropout_rate=self.dropout_rate,
deterministic=deterministic,
dtype=self.dtype,
float32_logits=self.float32_logits)
# Back to the original inputs dimensions.
out = DenseGeneral(
features=inputs_q.shape[-1], # output dim is set to the input dim.
axis=(-2, -1),
kernel_init=self.kernel_init,
kernel_axes=('joined_kv', 'embed'),
dtype=self.dtype,
name='out')(
x)
return out
def _normalize_axes(axes: Iterable[int], ndim: int) -> Tuple[int]:
# A tuple by convention. len(axes_tuple) then also gives the rank efficiently.
return tuple([ax if ax >= 0 else ndim + ax for ax in axes])
def _canonicalize_tuple(x):
if isinstance(x, Iterable):
return tuple(x)
else:
return (x,)
#------------------------------------------------------------------------------
# DenseGeneral for attention layers.
#------------------------------------------------------------------------------
class DenseGeneral(nn.Module):
"""A linear transformation (without bias) with flexible axes.
Attributes:
features: tuple with numbers of output features.
axis: tuple with axes to apply the transformation on.
dtype: the dtype of the computation (default: float32).
kernel_init: initializer function for the weight matrix.
"""
features: Union[Iterable[int], int]
axis: Union[Iterable[int], int] = -1
dtype: DType = jnp.float32
kernel_init: Initializer = nn.initializers.variance_scaling(
1.0, 'fan_in', 'truncated_normal')
kernel_axes: Tuple[str, ...] = ()
@nn.compact
def __call__(self, inputs: Array) -> Array:
"""Applies a linear transformation to the inputs along multiple dimensions.
Args:
inputs: The nd-array to be transformed.
Returns:
The transformed input.
"""
features = _canonicalize_tuple(self.features)
axis = _canonicalize_tuple(self.axis)
inputs = jnp.asarray(inputs, self.dtype)
axis = _normalize_axes(axis, inputs.ndim)
kernel_shape = tuple([inputs.shape[ax] for ax in axis]) + features
kernel_param_shape = (np.prod([inputs.shape[ax] for ax in axis]),
np.prod(features))
kernel = param_with_axes(
'kernel',
self.kernel_init,
kernel_param_shape,
jnp.float32,
axes=self.kernel_axes)
kernel = jnp.asarray(kernel, self.dtype)
kernel = jnp.reshape(kernel, kernel_shape)
contract_ind = tuple(range(0, len(axis)))
return lax.dot_general(inputs, kernel, ((axis, contract_ind), ((), ())))
def _convert_to_activation_function(
fn_or_string: Union[str, Callable]) -> Callable:
"""Convert a string to an activation function."""
if fn_or_string == 'linear':
return lambda x: x
elif isinstance(fn_or_string, str):
return getattr(nn, fn_or_string)
elif callable(fn_or_string):
return fn_or_string
else:
raise ValueError("don't know how to convert %s to an activation function" %
(fn_or_string,))
class MlpBlock(nn.Module):
"""Transformer MLP / feed-forward block.
Attributes:
intermediate_dim: Shared dimension of hidden layers.
activations: Type of activations for each layer. Each element is either
'linear', a string function name in flax.linen, or a function.
kernel_init: Kernel function, passed to the dense layers.
deterministic: Whether the dropout layers should be deterministic.
intermediate_dropout_rate: Dropout rate used after the intermediate layers.
dtype: Type for the dense layer.
"""
intermediate_dim: int = 2048
activations: Sequence[Union[str, Callable]] = ('relu',)
kernel_init: Initializer = nn.initializers.variance_scaling(
1.0, 'fan_in', 'truncated_normal')
intermediate_dropout_rate: float = 0.1
dtype: Any = jnp.float32
@nn.compact
def __call__(self, inputs, decode: bool = False, deterministic: bool = False):
"""Applies Transformer MlpBlock module."""
# Iterate over specified MLP input activation functions.
# e.g. ('relu',) or ('gelu', 'linear') for gated-gelu.
activations = []
for idx, act_fn in enumerate(self.activations):
dense_name = 'wi' if len(self.activations) == 1 else f'wi_{idx}'
x = DenseGeneral(
self.intermediate_dim,
dtype=self.dtype,
kernel_init=self.kernel_init,
kernel_axes=('embed', 'mlp'),
name=dense_name)(
inputs)
x = _convert_to_activation_function(act_fn)(x)
activations.append(x)
# Take elementwise product of above intermediate activations.
x = functools.reduce(operator.mul, activations)
# Apply dropout and final dense output projection.
x = nn.Dropout(
rate=self.intermediate_dropout_rate, broadcast_dims=(-2,))(
x, deterministic=deterministic) # Broadcast along length.
x = with_sharding_constraint(x, ('batch', 'length', 'mlp'))
output = DenseGeneral(
inputs.shape[-1],
dtype=self.dtype,
kernel_init=self.kernel_init,
kernel_axes=('mlp', 'embed'),
name='wo')(
x)
return output
class Embed(nn.Module):
"""A parameterized function from integers [0, n) to d-dimensional vectors.
Attributes:
num_embeddings: number of embeddings.
features: number of feature dimensions for each embedding.
dtype: the dtype of the embedding vectors (default: float32).
embedding_init: embedding initializer.
one_hot: performs the gather with a one-hot contraction rather than a true
gather. This is currently needed for SPMD partitioning.
"""
num_embeddings: int
features: int
cast_input_dtype: Optional[DType] = None
dtype: DType = jnp.float32
attend_dtype: Optional[DType] = None
embedding_init: Initializer = default_embed_init
one_hot: bool = False
embedding: Array = dataclasses.field(init=False)
def setup(self):
self.embedding = param_with_axes(
'embedding',
self.embedding_init, (self.num_embeddings, self.features),
jnp.float32,
axes=('vocab', 'embed'))
def __call__(self, inputs: Array) -> Array:
"""Embeds the inputs along the last dimension.
Args:
inputs: input data, all dimensions are considered batch dimensions.
Returns:
Output which is embedded input data. The output shape follows the input,
with an additional `features` dimension appended.
"""
if self.cast_input_dtype:
inputs = inputs.astype(self.cast_input_dtype)
if not jnp.issubdtype(inputs.dtype, jnp.integer):
raise ValueError('Input type must be an integer or unsigned integer.')
if self.one_hot:
iota = lax.iota(jnp.int32, self.num_embeddings)
one_hot = jnp.array(inputs[..., jnp.newaxis] == iota, dtype=self.dtype)
output = jnp.dot(one_hot, jnp.asarray(self.embedding, self.dtype))
else:
output = jnp.asarray(self.embedding, self.dtype)[inputs]
output = with_sharding_constraint(output, ('batch', 'length', 'embed'))
return output
def attend(self, query: Array) -> Array:
"""Attend over the embedding using a query array.
Args:
query: array with last dimension equal the feature depth `features` of the
embedding.
Returns:
An array with final dim `num_embeddings` corresponding to the batched
inner-product of the array of query vectors against each embedding.
Commonly used for weight-sharing between embeddings and logit transform
in NLP models.
"""
dtype = self.attend_dtype if self.attend_dtype is not None else self.dtype
return jnp.dot(query, jnp.asarray(self.embedding, dtype).T)
class FixedEmbed(nn.Module):
"""Fixed (not learnable) embeddings specified by the initializer function.
Attributes:
init_fn: The initializer function that defines the embeddings.
max_length: The maximum supported length.
dtype: The DType to use for the embeddings.
"""
features: int
max_length: int = 2048
embedding_init: Initializer = sinusoidal()
dtype: jnp.dtype = jnp.float32
def setup(self):
# The key is set to None because sinusoid init is deterministic.
shape = (self.max_length, self.features)
self.embedding = self.embedding_init(None, shape, self.dtype) # pylint: disable=too-many-function-args
@nn.compact
def __call__(self,
inputs,
*,
decode: bool = False):
"""Returns the fixed position embeddings specified by the initializer.
Args:
inputs: <int>[batch_size, seq_len] input position indices.
decode: True if running in single-position autoregressive decode mode.
Returns:
The fixed position embeddings <float32>[batch_size, seq_len, features].
"""
# We use a cache position index for tracking decoding position.
if decode:
position_embedder_index = self.variable(
'cache', 'position_embedder_index',
lambda: jnp.array(-1, dtype=jnp.uint32))
i = position_embedder_index.value
position_embedder_index.value = i + 1
return jax.lax.dynamic_slice(self.embedding, jnp.array((i, 0)),
np.array((1, self.features)))
return jnp.take(self.embedding, inputs, axis=0)
#------------------------------------------------------------------------------
# T5 Layernorm - no subtraction of mean or bias.
#------------------------------------------------------------------------------
class LayerNorm(nn.Module):
"""T5 Layer normalization operating on the last axis of the input data."""
epsilon: float = 1e-6
dtype: Any = jnp.float32
scale_init: Initializer = nn.initializers.ones
@nn.compact
def __call__(self, x: jnp.ndarray) -> jnp.ndarray:
"""Applies layer normalization on the input."""
x = jnp.asarray(x, jnp.float32)
features = x.shape[-1]
mean2 = jnp.mean(lax.square(x), axis=-1, keepdims=True)
y = jnp.asarray(x * lax.rsqrt(mean2 + self.epsilon), self.dtype)
scale = param_with_axes(
'scale', self.scale_init, (features,), jnp.float32, axes=('embed',))
scale = jnp.asarray(scale, self.dtype)
return y * scale
#------------------------------------------------------------------------------
# Mask-making utility functions.
#------------------------------------------------------------------------------
def make_attention_mask(query_input: Array,
key_input: Array,
pairwise_fn: Callable = jnp.multiply,
extra_batch_dims: int = 0,
dtype: DType = jnp.float32) -> Array:
"""Mask-making helper for attention weights.
In case of 1d inputs (i.e., `[batch, len_q]`, `[batch, len_kv]`, the
attention weights will be `[batch, heads, len_q, len_kv]` and this
function will produce `[batch, 1, len_q, len_kv]`.
Args:
query_input: a batched, flat input of query_length size
key_input: a batched, flat input of key_length size
pairwise_fn: broadcasting elementwise comparison function
extra_batch_dims: number of extra batch dims to add singleton axes for, none
by default
dtype: mask return dtype
Returns:
A `[batch, 1, len_q, len_kv]` shaped mask for 1d attention.
"""
# [batch, len_q, len_kv]
mask = pairwise_fn(
# [batch, len_q] -> [batch, len_q, 1]
jnp.expand_dims(query_input, axis=-1),
# [batch, len_q] -> [batch, 1, len_kv]
jnp.expand_dims(key_input, axis=-2))
# [batch, 1, len_q, len_kv]. This creates the head dim.
mask = jnp.expand_dims(mask, axis=-3)
mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))
return mask.astype(dtype)
def make_causal_mask(x: Array,
extra_batch_dims: int = 0,
dtype: DType = jnp.float32) -> Array:
"""Make a causal mask for self-attention.
In case of 1d inputs (i.e., `[batch, len]`, the self-attention weights
will be `[batch, heads, len, len]` and this function will produce a
causal mask of shape `[batch, 1, len, len]`.
Note that a causal mask does not depend on the values of x; it only depends on
the shape. If x has padding elements, they will not be treated in a special
manner.
Args:
x: input array of shape `[batch, len]`
extra_batch_dims: number of batch dims to add singleton axes for, none by
default
dtype: mask return dtype
Returns:
A `[batch, 1, len, len]` shaped causal mask for 1d attention.
"""
idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)
return make_attention_mask(
idxs,
idxs,
jnp.greater_equal,
extra_batch_dims=extra_batch_dims,
dtype=dtype)
def combine_masks(*masks: Optional[Array], dtype: DType = jnp.float32):
"""Combine attention masks.
Args:
*masks: set of attention mask arguments to combine, some can be None.
dtype: final mask dtype
Returns:
Combined mask, reduced by logical and, returns None if no masks given.
"""
masks = [m for m in masks if m is not None]
if not masks:
return None
assert all(map(lambda x: x.ndim == masks[0].ndim, masks)), (
f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks))}')
mask, *other_masks = masks
for other_mask in other_masks:
mask = jnp.logical_and(mask, other_mask)
return mask.astype(dtype)
def combine_biases(*masks: Optional[Array]):
"""Combine attention biases.
Args:
*masks: set of attention bias arguments to combine, some can be None.
Returns:
Combined mask, reduced by summation, returns None if no masks given.
"""
masks = [m for m in masks if m is not None]
if not masks:
return None
assert all(map(lambda x: x.ndim == masks[0].ndim, masks)), (
f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks))}')
mask, *other_masks = masks
for other_mask in other_masks:
mask = mask + other_mask
return mask
def make_decoder_mask(decoder_target_tokens: Array,
dtype: DType,
decoder_causal_attention: Optional[Array] = None,
decoder_segment_ids: Optional[Array] = None) -> Array:
"""Compute the self-attention mask for a decoder.
Decoder mask is formed by combining a causal mask, a padding mask and an
optional packing mask. If decoder_causal_attention is passed, it makes the
masking non-causal for positions that have value of 1.
A prefix LM is applied to a dataset which has a notion of "inputs" and
"targets", e.g., a machine translation task. The inputs and targets are
concatenated to form a new target. `decoder_target_tokens` is the concatenated
decoder output tokens.
The "inputs" portion of the concatenated sequence can attend to other "inputs"
tokens even for those at a later time steps. In order to control this
behavior, `decoder_causal_attention` is necessary. This is a binary mask with
a value of 1 indicating that the position belonged to "inputs" portion of the
original dataset.
Example:
Suppose we have a dataset with two examples.
ds = [{"inputs": [6, 7], "targets": [8]},
{"inputs": [3, 4], "targets": [5]}]
After the data preprocessing with packing, the two examples are packed into
one example with the following three fields (some fields are skipped for
simplicity).
decoder_target_tokens = [[6, 7, 8, 3, 4, 5, 0]]
decoder_segment_ids = [[1, 1, 1, 2, 2, 2, 0]]
decoder_causal_attention = [[1, 1, 0, 1, 1, 0, 0]]
where each array has [batch, length] shape with batch size being 1. Then,
this function computes the following mask.
mask = [[[[1, 1, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0]]]]
mask[b, 1, :, :] represents the mask for the example `b` in the batch.
Because mask is for a self-attention layer, the mask's shape is a square of
shape [query length, key length].
mask[b, 1, i, j] = 1 means that the query token at position i can attend to
the key token at position j.
Args:
decoder_target_tokens: decoder output tokens. [batch, length]
dtype: dtype of the output mask.
decoder_causal_attention: a binary mask indicating which position should
only attend to earlier positions in the sequence. Others will attend
bidirectionally. [batch, length]
decoder_segment_ids: decoder segmentation info for packed examples. [batch,
length]
Returns:
the combined decoder mask.
"""
masks = []
# The same mask is applied to all attention heads. So the head dimension is 1,
# i.e., the mask will be broadcast along the heads dim.
# [batch, 1, length, length]
causal_mask = make_causal_mask(decoder_target_tokens, dtype=dtype)
# Positions with value 1 in `decoder_causal_attneition` can attend
# bidirectionally.
if decoder_causal_attention is not None:
# [batch, 1, length, length]
inputs_mask = make_attention_mask(
decoder_causal_attention,
decoder_causal_attention,
jnp.logical_and,
dtype=dtype)
masks.append(jnp.logical_or(causal_mask, inputs_mask).astype(dtype))
else:
masks.append(causal_mask)
# Padding mask.
masks.append(
make_attention_mask(
decoder_target_tokens > 0, decoder_target_tokens > 0, dtype=dtype))
# Packing mask
if decoder_segment_ids is not None:
masks.append(
make_attention_mask(
decoder_segment_ids, decoder_segment_ids, jnp.equal, dtype=dtype))
return combine_masks(*masks, dtype=dtype)
|
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import logging as orig_logging
import os
import re
import urlparse
import boto
from boto import ec2
from boto import exception
from boto import s3
import keystoneclient.exceptions
import six
import tempest.clients
from tempest.common.utils import file_utils
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
import tempest.test
from tempest.thirdparty.boto.utils import wait
CONF = config.CONF
LOG = logging.getLogger(__name__)
def decision_maker():
A_I_IMAGES_READY = True # ari,ami,aki
S3_CAN_CONNECT_ERROR = None
EC2_CAN_CONNECT_ERROR = None
secret_matcher = re.compile("[A-Za-z0-9+/]{32,}") # 40 in other system
id_matcher = re.compile("[A-Za-z0-9]{20,}")
def all_read(*args):
return all(map(file_utils.have_effective_read_access, args))
materials_path = CONF.boto.s3_materials_path
ami_path = materials_path + os.sep + CONF.boto.ami_manifest
aki_path = materials_path + os.sep + CONF.boto.aki_manifest
ari_path = materials_path + os.sep + CONF.boto.ari_manifest
A_I_IMAGES_READY = all_read(ami_path, aki_path, ari_path)
boto_logger = logging.getLogger('boto')
level = boto_logger.logger.level
# suppress logging for boto
boto_logger.logger.setLevel(orig_logging.CRITICAL)
def _cred_sub_check(connection_data):
if not id_matcher.match(connection_data["aws_access_key_id"]):
raise Exception("Invalid AWS access Key")
if not secret_matcher.match(connection_data["aws_secret_access_key"]):
raise Exception("Invalid AWS secret Key")
raise Exception("Unknown (Authentication?) Error")
openstack = tempest.clients.Manager()
try:
if urlparse.urlparse(CONF.boto.ec2_url).hostname is None:
raise Exception("Failed to get hostname from the ec2_url")
ec2client = openstack.ec2api_client
try:
ec2client.get_all_regions()
except exception.BotoServerError as exc:
if exc.error_code is None:
raise Exception("EC2 target does not looks EC2 service")
_cred_sub_check(ec2client.connection_data)
except keystoneclient.exceptions.Unauthorized:
EC2_CAN_CONNECT_ERROR = "AWS credentials not set," +\
" faild to get them even by keystoneclient"
except Exception as exc:
EC2_CAN_CONNECT_ERROR = str(exc)
try:
if urlparse.urlparse(CONF.boto.s3_url).hostname is None:
raise Exception("Failed to get hostname from the s3_url")
s3client = openstack.s3_client
try:
s3client.get_bucket("^INVALID*#()@INVALID.")
except exception.BotoServerError as exc:
if exc.status == 403:
_cred_sub_check(s3client.connection_data)
except Exception as exc:
S3_CAN_CONNECT_ERROR = str(exc)
except keystoneclient.exceptions.Unauthorized:
S3_CAN_CONNECT_ERROR = "AWS credentials not set," +\
" faild to get them even by keystoneclient"
boto_logger.logger.setLevel(level)
return {'A_I_IMAGES_READY': A_I_IMAGES_READY,
'S3_CAN_CONNECT_ERROR': S3_CAN_CONNECT_ERROR,
'EC2_CAN_CONNECT_ERROR': EC2_CAN_CONNECT_ERROR}
class BotoExceptionMatcher(object):
STATUS_RE = r'[45]\d\d'
CODE_RE = '.*' # regexp makes sense in group match
def match(self, exc):
""":returns: Returns with an error string if it does not match,
returns with None when it matches.
"""
if not isinstance(exc, exception.BotoServerError):
return "%r not an BotoServerError instance" % exc
LOG.info("Status: %s , error_code: %s", exc.status, exc.error_code)
if re.match(self.STATUS_RE, str(exc.status)) is None:
return ("Status code (%s) does not match"
"the expected re pattern \"%s\""
% (exc.status, self.STATUS_RE))
if re.match(self.CODE_RE, str(exc.error_code)) is None:
return ("Error code (%s) does not match" +
"the expected re pattern \"%s\"") %\
(exc.error_code, self.CODE_RE)
return None
class ClientError(BotoExceptionMatcher):
STATUS_RE = r'4\d\d'
class ServerError(BotoExceptionMatcher):
STATUS_RE = r'5\d\d'
def _add_matcher_class(error_cls, error_data, base=BotoExceptionMatcher):
"""
Usable for adding an ExceptionMatcher(s) into the exception tree.
The not leaf elements does wildcard match
"""
# in error_code just literal and '.' characters expected
if not isinstance(error_data, six.string_types):
(error_code, status_code) = map(str, error_data)
else:
status_code = None
error_code = error_data
parts = error_code.split('.')
basematch = ""
num_parts = len(parts)
max_index = num_parts - 1
add_cls = error_cls
for i_part in six.moves.xrange(num_parts):
part = parts[i_part]
leaf = i_part == max_index
if not leaf:
match = basematch + part + "[.].*"
else:
match = basematch + part
basematch += part + "[.]"
if not hasattr(add_cls, part):
cls_dict = {"CODE_RE": match}
if leaf and status_code is not None:
cls_dict["STATUS_RE"] = status_code
cls = type(part, (base, ), cls_dict)
setattr(add_cls, part, cls())
add_cls = cls
elif leaf:
raise LookupError("Tries to redefine an error code \"%s\"" % part)
else:
add_cls = getattr(add_cls, part)
# TODO(afazekas): classmethod handling
def friendly_function_name_simple(call_able):
name = ""
if hasattr(call_able, "im_class"):
name += call_able.im_class.__name__ + "."
name += call_able.__name__
return name
def friendly_function_call_str(call_able, *args, **kwargs):
string = friendly_function_name_simple(call_able)
string += "(" + ", ".join(map(str, args))
if len(kwargs):
if len(args):
string += ", "
string += ", ".join("=".join(map(str, (key, value)))
for (key, value) in kwargs.items())
return string + ")"
class BotoTestCase(tempest.test.BaseTestCase):
"""Recommended to use as base class for boto related test."""
@classmethod
def resource_setup(cls):
super(BotoTestCase, cls).resource_setup()
cls.conclusion = decision_maker()
cls.os = cls.get_client_manager()
# The trash contains cleanup functions and paramaters in tuples
# (function, *args, **kwargs)
cls._resource_trash_bin = {}
cls._sequence = -1
if (hasattr(cls, "EC2") and
cls.conclusion['EC2_CAN_CONNECT_ERROR'] is not None):
raise cls.skipException("EC2 " + cls.__name__ + ": " +
cls.conclusion['EC2_CAN_CONNECT_ERROR'])
if (hasattr(cls, "S3") and
cls.conclusion['S3_CAN_CONNECT_ERROR'] is not None):
raise cls.skipException("S3 " + cls.__name__ + ": " +
cls.conclusion['S3_CAN_CONNECT_ERROR'])
@classmethod
def addResourceCleanUp(cls, function, *args, **kwargs):
"""Adds CleanUp callable, used by tearDownClass.
Recommended to a use (deep)copy on the mutable args.
"""
cls._sequence = cls._sequence + 1
cls._resource_trash_bin[cls._sequence] = (function, args, kwargs)
return cls._sequence
@classmethod
def cancelResourceCleanUp(cls, key):
"""Cancel Clean up request."""
del cls._resource_trash_bin[key]
# TODO(afazekas): Add "with" context handling
def assertBotoError(self, excMatcher, callableObj,
*args, **kwargs):
"""Example usage:
self.assertBotoError(self.ec2_error_code.client.
InvalidKeyPair.Duplicate,
self.client.create_keypair,
key_name)
"""
try:
callableObj(*args, **kwargs)
except exception.BotoServerError as exc:
error_msg = excMatcher.match(exc)
if error_msg is not None:
raise self.failureException, error_msg
else:
raise self.failureException, "BotoServerError not raised"
@classmethod
def resource_cleanup(cls):
"""Calls the callables added by addResourceCleanUp,
when you overwrite this function don't forget to call this too.
"""
fail_count = 0
trash_keys = sorted(cls._resource_trash_bin, reverse=True)
for key in trash_keys:
(function, pos_args, kw_args) = cls._resource_trash_bin[key]
try:
func_name = friendly_function_call_str(function, *pos_args,
**kw_args)
LOG.debug("Cleaning up: %s" % func_name)
function(*pos_args, **kw_args)
except BaseException:
fail_count += 1
LOG.exception("Cleanup failed %s" % func_name)
finally:
del cls._resource_trash_bin[key]
cls.clear_isolated_creds()
super(BotoTestCase, cls).resource_cleanup()
# NOTE(afazekas): let the super called even on exceptions
# The real exceptions already logged, if the super throws another,
# does not causes hidden issues
if fail_count:
raise exceptions.TearDownException(num=fail_count)
ec2_error_code = BotoExceptionMatcher()
# InsufficientInstanceCapacity can be both server and client error
ec2_error_code.server = ServerError()
ec2_error_code.client = ClientError()
s3_error_code = BotoExceptionMatcher()
s3_error_code.server = ServerError()
s3_error_code.client = ClientError()
valid_image_state = set(('available', 'pending', 'failed'))
# NOTE(afazekas): 'paused' is not valid status in EC2, but it does not have
# a good mapping, because it uses memory, but not really a running machine
valid_instance_state = set(('pending', 'running', 'shutting-down',
'terminated', 'stopping', 'stopped', 'paused'))
valid_volume_status = set(('creating', 'available', 'in-use',
'deleting', 'deleted', 'error'))
valid_snapshot_status = set(('pending', 'completed', 'error'))
gone_set = set(('_GONE',))
@classmethod
def get_lfunction_gone(cls, obj):
"""If the object is instance of a well know type returns back with
with the correspoding function otherwise it assumes the obj itself
is the function.
"""
ec = cls.ec2_error_code
if isinstance(obj, ec2.instance.Instance):
colusure_matcher = ec.client.InvalidInstanceID.NotFound
status_attr = "state"
elif isinstance(obj, ec2.image.Image):
colusure_matcher = ec.client.InvalidAMIID.NotFound
status_attr = "state"
elif isinstance(obj, ec2.snapshot.Snapshot):
colusure_matcher = ec.client.InvalidSnapshot.NotFound
status_attr = "status"
elif isinstance(obj, ec2.volume.Volume):
colusure_matcher = ec.client.InvalidVolume.NotFound
status_attr = "status"
else:
return obj
def _status():
try:
obj.update(validate=True)
except ValueError:
return "_GONE"
except exception.EC2ResponseError as exc:
if colusure_matcher.match(exc) is None:
return "_GONE"
else:
raise
return getattr(obj, status_attr)
return _status
def state_wait_gone(self, lfunction, final_set, valid_set):
if not isinstance(final_set, set):
final_set = set((final_set,))
final_set |= self.gone_set
lfunction = self.get_lfunction_gone(lfunction)
state = wait.state_wait(lfunction, final_set, valid_set)
self.assertIn(state, valid_set | self.gone_set)
return state
def waitImageState(self, lfunction, wait_for):
return self.state_wait_gone(lfunction, wait_for,
self.valid_image_state)
def waitInstanceState(self, lfunction, wait_for):
return self.state_wait_gone(lfunction, wait_for,
self.valid_instance_state)
def waitSnapshotStatus(self, lfunction, wait_for):
return self.state_wait_gone(lfunction, wait_for,
self.valid_snapshot_status)
def waitVolumeStatus(self, lfunction, wait_for):
return self.state_wait_gone(lfunction, wait_for,
self.valid_volume_status)
def assertImageStateWait(self, lfunction, wait_for):
state = self.waitImageState(lfunction, wait_for)
self.assertIn(state, wait_for)
def assertInstanceStateWait(self, lfunction, wait_for):
state = self.waitInstanceState(lfunction, wait_for)
self.assertIn(state, wait_for)
def assertVolumeStatusWait(self, lfunction, wait_for):
state = self.waitVolumeStatus(lfunction, wait_for)
self.assertIn(state, wait_for)
def assertSnapshotStatusWait(self, lfunction, wait_for):
state = self.waitSnapshotStatus(lfunction, wait_for)
self.assertIn(state, wait_for)
def assertAddressDissasociatedWait(self, address):
def _disassociate():
cli = self.ec2_client
addresses = cli.get_all_addresses(addresses=(address.public_ip,))
if len(addresses) != 1:
return "INVALID"
if addresses[0].instance_id:
LOG.info("%s associated to %s",
address.public_ip,
addresses[0].instance_id)
return "ASSOCIATED"
return "DISASSOCIATED"
state = wait.state_wait(_disassociate, "DISASSOCIATED",
set(("ASSOCIATED", "DISASSOCIATED")))
self.assertEqual(state, "DISASSOCIATED")
def assertAddressReleasedWait(self, address):
def _address_delete():
# NOTE(afazekas): the filter gives back IP
# even if it is not associated to my tenant
if (address.public_ip not in map(lambda a: a.public_ip,
self.ec2_client.get_all_addresses())):
return "DELETED"
return "NOTDELETED"
state = wait.state_wait(_address_delete, "DELETED")
self.assertEqual(state, "DELETED")
def assertReSearch(self, regexp, string):
if re.search(regexp, string) is None:
raise self.failureException("regexp: '%s' not found in '%s'" %
(regexp, string))
def assertNotReSearch(self, regexp, string):
if re.search(regexp, string) is not None:
raise self.failureException("regexp: '%s' found in '%s'" %
(regexp, string))
def assertReMatch(self, regexp, string):
if re.match(regexp, string) is None:
raise self.failureException("regexp: '%s' not matches on '%s'" %
(regexp, string))
def assertNotReMatch(self, regexp, string):
if re.match(regexp, string) is not None:
raise self.failureException("regexp: '%s' matches on '%s'" %
(regexp, string))
@classmethod
def destroy_bucket(cls, connection_data, bucket):
"""Destroys the bucket and its content, just for teardown."""
exc_num = 0
try:
with contextlib.closing(
boto.connect_s3(**connection_data)) as conn:
if isinstance(bucket, basestring):
bucket = conn.lookup(bucket)
assert isinstance(bucket, s3.bucket.Bucket)
for obj in bucket.list():
try:
bucket.delete_key(obj.key)
obj.close()
except BaseException:
LOG.exception("Failed to delete key %s " % obj.key)
exc_num += 1
conn.delete_bucket(bucket)
except BaseException:
LOG.exception("Failed to destroy bucket %s " % bucket)
exc_num += 1
if exc_num:
raise exceptions.TearDownException(num=exc_num)
@classmethod
def destroy_reservation(cls, reservation):
"""Terminate instances in a reservation, just for teardown."""
exc_num = 0
def _instance_state():
try:
instance.update(validate=True)
except ValueError:
return "_GONE"
except exception.EC2ResponseError as exc:
if cls.ec2_error_code.\
client.InvalidInstanceID.NotFound.match(exc) is None:
return "_GONE"
# NOTE(afazekas): incorrect code,
# but the resource must be destoreyd
if exc.error_code == "InstanceNotFound":
return "_GONE"
return instance.state
for instance in reservation.instances:
try:
instance.terminate()
wait.re_search_wait(_instance_state, "_GONE")
except BaseException:
LOG.exception("Failed to terminate instance %s " % instance)
exc_num += 1
if exc_num:
raise exceptions.TearDownException(num=exc_num)
# NOTE(afazekas): The incorrect ErrorCodes makes very, very difficult
# to write better teardown
@classmethod
def destroy_security_group_wait(cls, group):
"""Delete group.
Use just for teardown!
"""
# NOTE(afazekas): should wait/try until all related instance terminates
group.delete()
@classmethod
def destroy_volume_wait(cls, volume):
"""Delete volume, tries to detach first.
Use just for teardown!
"""
exc_num = 0
snaps = volume.snapshots()
if len(snaps):
LOG.critical("%s Volume has %s snapshot(s)", volume.id,
map(snaps.id, snaps))
# NOTE(afazekas): detaching/attching not valid EC2 status
def _volume_state():
volume.update(validate=True)
try:
# NOTE(gmann): Make sure volume is attached.
# Checking status as 'not "available"' is not enough to make
# sure volume is attached as it can be in "error" state
if volume.status == "in-use":
volume.detach(force=True)
except BaseException:
LOG.exception("Failed to detach volume %s" % volume)
# exc_num += 1 "nonlocal" not in python2
return volume.status
try:
wait.re_search_wait(_volume_state, "available")
# not validates status
LOG.info(_volume_state())
volume.delete()
except BaseException:
LOG.exception("Failed to delete volume %s" % volume)
exc_num += 1
if exc_num:
raise exceptions.TearDownException(num=exc_num)
@classmethod
def destroy_snapshot_wait(cls, snapshot):
"""delete snapshot, wait until it ceases to exist."""
snapshot.delete()
def _update():
snapshot.update(validate=True)
wait.wait_exception(_update)
# you can specify tuples if you want to specify the status pattern
for code in ('AddressLimitExceeded', 'AttachmentLimitExceeded', 'AuthFailure',
'Blocked', 'CustomerGatewayLimitExceeded', 'DependencyViolation',
'DiskImageSizeTooLarge', 'FilterLimitExceeded',
'Gateway.NotAttached', 'IdempotentParameterMismatch',
'IncorrectInstanceState', 'IncorrectState',
'InstanceLimitExceeded', 'InsufficientInstanceCapacity',
'InsufficientReservedInstancesCapacity',
'InternetGatewayLimitExceeded', 'InvalidAMIAttributeItemValue',
'InvalidAMIID.Malformed', 'InvalidAMIID.NotFound',
'InvalidAMIID.Unavailable', 'InvalidAssociationID.NotFound',
'InvalidAttachment.NotFound', 'InvalidConversionTaskId',
'InvalidCustomerGateway.DuplicateIpAddress',
'InvalidCustomerGatewayID.NotFound', 'InvalidDevice.InUse',
'InvalidDhcpOptionsID.NotFound', 'InvalidFormat',
'InvalidFilter', 'InvalidGatewayID.NotFound',
'InvalidGroup.Duplicate', 'InvalidGroupId.Malformed',
'InvalidGroup.InUse', 'InvalidGroup.NotFound',
'InvalidGroup.Reserved', 'InvalidInstanceID.Malformed',
'InvalidInstanceID.NotFound',
'InvalidInternetGatewayID.NotFound', 'InvalidIPAddress.InUse',
'InvalidKeyPair.Duplicate', 'InvalidKeyPair.Format',
'InvalidKeyPair.NotFound', 'InvalidManifest',
'InvalidNetworkAclEntry.NotFound',
'InvalidNetworkAclID.NotFound', 'InvalidParameterCombination',
'InvalidParameterValue', 'InvalidPermission.Duplicate',
'InvalidPermission.Malformed', 'InvalidReservationID.Malformed',
'InvalidReservationID.NotFound', 'InvalidRoute.NotFound',
'InvalidRouteTableID.NotFound',
'InvalidSecurity.RequestHasExpired',
'InvalidSnapshotID.Malformed', 'InvalidSnapshot.NotFound',
'InvalidUserID.Malformed', 'InvalidReservedInstancesId',
'InvalidReservedInstancesOfferingId',
'InvalidSubnetID.NotFound', 'InvalidVolumeID.Duplicate',
'InvalidVolumeID.Malformed', 'InvalidVolumeID.ZoneMismatch',
'InvalidVolume.NotFound', 'InvalidVpcID.NotFound',
'InvalidVpnConnectionID.NotFound',
'InvalidVpnGatewayID.NotFound',
'InvalidZone.NotFound', 'LegacySecurityGroup',
'MissingParameter', 'NetworkAclEntryAlreadyExists',
'NetworkAclEntryLimitExceeded', 'NetworkAclLimitExceeded',
'NonEBSInstance', 'PendingSnapshotLimitExceeded',
'PendingVerification', 'OptInRequired', 'RequestLimitExceeded',
'ReservedInstancesLimitExceeded', 'Resource.AlreadyAssociated',
'ResourceLimitExceeded', 'RouteAlreadyExists',
'RouteLimitExceeded', 'RouteTableLimitExceeded',
'RulesPerSecurityGroupLimitExceeded',
'SecurityGroupLimitExceeded',
'SecurityGroupsPerInstanceLimitExceeded',
'SnapshotLimitExceeded', 'SubnetLimitExceeded',
'UnknownParameter', 'UnsupportedOperation',
'VolumeLimitExceeded', 'VpcLimitExceeded',
'VpnConnectionLimitExceeded',
'VpnGatewayAttachmentLimitExceeded', 'VpnGatewayLimitExceeded'):
_add_matcher_class(BotoTestCase.ec2_error_code.client,
code, base=ClientError)
for code in ('InsufficientAddressCapacity', 'InsufficientInstanceCapacity',
'InsufficientReservedInstanceCapacity', 'InternalError',
'Unavailable'):
_add_matcher_class(BotoTestCase.ec2_error_code.server,
code, base=ServerError)
for code in (('AccessDenied', 403),
('AccountProblem', 403),
('AmbiguousGrantByEmailAddress', 400),
('BadDigest', 400),
('BucketAlreadyExists', 409),
('BucketAlreadyOwnedByYou', 409),
('BucketNotEmpty', 409),
('CredentialsNotSupported', 400),
('CrossLocationLoggingProhibited', 403),
('EntityTooSmall', 400),
('EntityTooLarge', 400),
('ExpiredToken', 400),
('IllegalVersioningConfigurationException', 400),
('IncompleteBody', 400),
('IncorrectNumberOfFilesInPostRequest', 400),
('InlineDataTooLarge', 400),
('InvalidAccessKeyId', 403),
'InvalidAddressingHeader',
('InvalidArgument', 400),
('InvalidBucketName', 400),
('InvalidBucketState', 409),
('InvalidDigest', 400),
('InvalidLocationConstraint', 400),
('InvalidPart', 400),
('InvalidPartOrder', 400),
('InvalidPayer', 403),
('InvalidPolicyDocument', 400),
('InvalidRange', 416),
('InvalidRequest', 400),
('InvalidSecurity', 403),
('InvalidSOAPRequest', 400),
('InvalidStorageClass', 400),
('InvalidTargetBucketForLogging', 400),
('InvalidToken', 400),
('InvalidURI', 400),
('KeyTooLong', 400),
('MalformedACLError', 400),
('MalformedPOSTRequest', 400),
('MalformedXML', 400),
('MaxMessageLengthExceeded', 400),
('MaxPostPreDataLengthExceededError', 400),
('MetadataTooLarge', 400),
('MethodNotAllowed', 405),
('MissingAttachment'),
('MissingContentLength', 411),
('MissingRequestBodyError', 400),
('MissingSecurityElement', 400),
('MissingSecurityHeader', 400),
('NoLoggingStatusForKey', 400),
('NoSuchBucket', 404),
('NoSuchKey', 404),
('NoSuchLifecycleConfiguration', 404),
('NoSuchUpload', 404),
('NoSuchVersion', 404),
('NotSignedUp', 403),
('NotSuchBucketPolicy', 404),
('OperationAborted', 409),
('PermanentRedirect', 301),
('PreconditionFailed', 412),
('Redirect', 307),
('RequestIsNotMultiPartContent', 400),
('RequestTimeout', 400),
('RequestTimeTooSkewed', 403),
('RequestTorrentOfBucketError', 400),
('SignatureDoesNotMatch', 403),
('TemporaryRedirect', 307),
('TokenRefreshRequired', 400),
('TooManyBuckets', 400),
('UnexpectedContent', 400),
('UnresolvableGrantByEmailAddress', 400),
('UserKeyMustBeSpecified', 400)):
_add_matcher_class(BotoTestCase.s3_error_code.client,
code, base=ClientError)
for code in (('InternalError', 500),
('NotImplemented', 501),
('ServiceUnavailable', 503),
('SlowDown', 503)):
_add_matcher_class(BotoTestCase.s3_error_code.server,
code, base=ServerError)
|
|
#!/usr/bin/python
import argparse
import boto
import json
import datetime
import time
from boto.sqs.message import RawMessage
from boto.dynamodb2.table import Table
from boto.dynamodb2.fields import HashKey
from boto.dynamodb2.fields import RangeKey
import pprint
import sys
import os
import jinja2
import jinja2_time
MEMON_VERSION = '0.0.1'
class Notification:
Unknown, Down, Up, Late, ConfigError = range(0, 5)
class PeriodType:
Fixed = 'fixed'
Rolling = 'rolling'
class Schema:
Name = 'Name'
Enabled = 'Enabled'
ErrorCount = 'ErrorCount'
Period = 'Period'
Description = 'Description'
Type = 'Type'
LastBlockTime = 'LastBlockTime'
NextBlockTime = 'NextBlockTime'
LastSuccessTime = 'LastSuccessTime'
class MEMon(object):
# Default constructor of the class.
def __init__(self):
self.queue = "memon"
self.table_name = "memon"
self.table = None
self.sns = "memon"
self.sns_email = None
self.debug = False
self.max_notify_count = 3
self.server_time = True
self.sqs = boto.connect_sqs()
self.sns_conn = boto.connect_sns()
self.db = boto.connect_dynamodb()
self.pp = pprint.PrettyPrinter()
self.now = int(time.time())
def aws_init(self):
print 'Creating sqs queue %s' % (self.queue)
self.sqs.create_queue(self.queue)
print 'Creating dynamodb table %s' % (self.table_name)
try:
Table.create(self.table_name,
schema=[HashKey(Schema.Name)],
throughput={'read': 1, 'write': 1})
except boto.exception.JSONResponseError as e:
print e
print 'Creating sns topic %s' % (self.sns)
self.sns_conn.create_topic(self.sns)
if self.sns_email:
print ('Subscribing %s to the sns topic %s'
'(click confirmation link in email)' %
(self.sns_email, self.sns))
self.sns_conn.subscribe(self.get_topic_arn(),
'email',
self.sns_email)
else:
print 'Remember to subscribe to the sns topic %s' % (self.sns)
def send(self, name):
q = self.sqs.get_queue(self.queue)
m = RawMessage()
msg = {
'name': name,
'time': self.now,
}
if self.debug:
self.pp.pprint(msg)
m.set_body(json.dumps(msg))
q.write(m)
def poll(self):
q = self.sqs.get_queue(self.queue)
results = q.get_messages(10)
for result in results:
msg = json.loads(result.get_body())
if self.debug:
print msg
self.record(msg['name'], msg['time'])
q.delete_message(result)
def show(self, name=None, error_only=False):
results = sorted(list(self.table.scan()), key=(
lambda event:
int(event[Schema.NextBlockTime]) if Schema.NextBlockTime in event
else 0))
for event in results:
if (error_only and
(not Schema.ErrorCount in event or
event[Schema.ErrorCount] == 0)):
continue
if name is None or name == event[Schema.Name]:
print "\n%s\n---" % (event[Schema.Name])
try:
if not event[Schema.Enabled]:
print "***DISABLED***"
if (Schema.ErrorCount in event and
event[Schema.ErrorCount] > 0):
print ("***ERRORS (%d)***" %
(event[Schema.ErrorCount]))
if Schema.Description in event:
print "Desc: %s" % event[Schema.Description]
print "Type: %s" % event[Schema.Type]
period_sec = datetime.timedelta(
seconds=event[Schema.Period])
print ("Period: %s (%ds)" %
(str(period_sec), event[Schema.Period]))
if Schema.NextBlockTime in event:
print ("Due: %s" %
time.ctime(event[Schema.NextBlockTime]))
if Schema.LastSuccessTime in event:
print ("Last Ran: %s" %
time.ctime(event[Schema.LastSuccessTime]))
if self.debug:
self.pp.pprint(dict(event))
except Exception:
self.pp.pprint(dict(event))
print "\n"
def return_html(self, error_only=False):
if os.path.islink(__file__):
path = os.path.dirname(os.path.realpath(__file__))
else:
path = os.path.dirname(__file__)
results = sorted(list(self.table.scan()), key=(
lambda event:
int(event[Schema.NextBlockTime]) if Schema.NextBlockTime in event
else 0))
j2_env = jinja2.Environment(loader=jinja2.FileSystemLoader(path),
trim_blocks=True,
extensions=['jinja2_time.TimeExtension'],
)
def datetimeformat(value, format='%H:%M %d/%m/%Y'):
return datetime.datetime.fromtimestamp(value).strftime(format)
j2_env.filters['datetimeformat'] = datetimeformat
print j2_env.get_template('dashboard.html.j2').render({'results':results})
def notify_down_events(self):
results = self.table.scan()
for event in results:
# we only want to notify based on the period,
# so we're not notifying every minute
error_count = 0
if Schema.ErrorCount in event and event[Schema.ErrorCount]:
error_count = event[Schema.ErrorCount]
next_notify = None
if Schema.NextBlockTime in event:
next_notify = (event[Schema.NextBlockTime] +
error_count * event[Schema.Period])
is_enabled = Schema.Enabled in event and event[Schema.Enabled]
if next_notify and next_notify <= self.now and is_enabled:
if self.debug:
print "%s\n---" % (event[Schema.Name])
self.pp.pprint(dict(event))
if error_count < int(self.max_notify_count):
self.notify(event[Schema.Name], Notification.Down, event)
elif self.debug:
print "Exceeded notify count for %s" % (event[Schema.Name])
event[Schema.ErrorCount] = error_count + 1
event.save()
def get_topic_arn(self):
# todo: handle pagination of topics
all_topics = self.sns_conn.get_all_topics()
topics = all_topics['ListTopicsResponse']['ListTopicsResult']['Topics']
for topic in topics:
if topic['TopicArn'].endswith(':' + self.sns):
# todo: cache arn
return topic['TopicArn']
raise Exception('Unable to locate topic arn for %s' % (self.sns))
def notify(self, name, notification, event=None):
topicArn = self.get_topic_arn()
message = None
if notification == Notification.Down:
message = 'Down: %s' % (name)
elif notification == Notification.Up:
message = 'Up: %s' % (name)
elif notification == Notification.Late:
message = 'Late: %s' % (name)
elif notification == Notification.ConfigError:
message = 'Config: %s has a configuration error' % (name)
else:
raise Exception('Invalid notification type')
if self.debug:
print message
subject = "[MEMon] %s" % (message)
message = "MEMon Alert\n-----------\n\n%s" % (message)
if event:
if Schema.Description in event:
message = ("%s\n%s: %s" %
(message, name, event[Schema.Description]))
message = "%s\n\n--\nMEMon" % (message)
self.sns_conn.publish(topicArn, message, subject)
def record(self, name, event_time):
if self.server_time:
event_time = self.now
else:
event_time = int(event_time)
try:
event = self.table.get_item(name)
if not Schema.Period in event:
return self.notify(name, Notification.ConfigError)
# If we're processing an older sqs message, we can just ignore it
if (Schema.LastSuccessTime in event and
event_time < event[Schema.LastSuccessTime]):
return
if (Schema.NextBlockTime in event and
event[Schema.NextBlockTime] < event_time and
event[Schema.Enabled]):
if event[Schema.ErrorCount] == 0:
self.notify(name, Notification.Late, event)
else:
self.notify(name, Notification.Up, event)
event[Schema.LastSuccessTime] = event_time
if event[Schema.Type] == PeriodType.Rolling:
event[Schema.LastBlockTime] = event_time
event[Schema.NextBlockTime] = (event_time +
int(event[Schema.Period]))
elif event[Schema.Type] == PeriodType.Fixed:
periods = 1
while (event_time > event[Schema.LastBlockTime]
+ int(event[Schema.Period]) * periods):
periods += 1
event[Schema.LastBlockTime] = (event[Schema.LastBlockTime] +
int(event[Schema.Period]) *
periods)
event[Schema.NextBlockTime] = (event[Schema.LastBlockTime] +
int(event[Schema.Period]))
else:
return self.notify(name, Notification.ConfigError)
event[Schema.ErrorCount] = int(0)
event.save()
except Exception:
self.notify(name, Notification.ConfigError)
def config(self, name, period, enabled, event_type=None,
description=None, initial_date=None, initial_time=None):
set_date = False
date = datetime.datetime.now().date()
if not initial_date is None:
date = initial_date
print date
set_date = True
if not initial_time is None:
# Add 1 day if earlier time to avoid initial alerts for new events
if datetime.datetime.now().time() > initial_time:
date = date + datetime.timedelta(days=1)
date = datetime.datetime.combine(date, initial_time)
set_date = True
try:
event = self.table.get_item(hash_key=name)
if self.debug:
print 'Get Event %s' % event
if not period is None:
event[Schema.Period] = period
if not enabled is None:
event[Schema.Enabled] = enabled
if not event_type is None:
event[Schema.Type] = event_type
if not description is None:
event[Schema.Description] = description
if set_date:
event[Schema.NextBlockTime] = int(date.strftime('%s'))
event[Schema.LastBlockTime] = (int(date.strftime('%s')) -
event[Schema.Period])
event.save()
except boto.dynamodb.exceptions.DynamoDBValidationError as v_err:
raise v_err
except Exception as e:
if self.debug:
print e
if not period:
raise Exception('Period is required for new events')
if not event_type:
raise Exception('Event Type is required for new events')
# ErrorCount should only be set when adding new items
data = {
Schema.Period: period,
Schema.Enabled: enabled,
Schema.ErrorCount: 0,
Schema.Type: event_type
}
if description:
data[Schema.Description] = description
if set_date:
data[Schema.NextBlockTime] = int(date.strftime('%s'))
data[Schema.LastBlockTime] = int(date.strftime('%s')) - period
print data
# self.table.put_item(data)
item = self.table.new_item(hash_key=name, attrs=data)
item.put()
if self.debug:
self.show(name)
def main(self):
parser = argparse.ArgumentParser(description='Missing Event Monitor')
parser.add_argument('--region',
default='us-east-1',
help=('Region to use (default: %(default)s)'))
parser.add_argument('--queue',
default=self.queue,
help='MEMon SQS Name (default: %(default)s)')
parser.add_argument('--table',
default=self.table_name,
help=('MEMon DynamoDb Table Name '
'(default: %(default)s)'))
parser.add_argument('--sns',
default=self.sns,
help='MEMon SNS Topic Name (default: %(default)s)')
parser.add_argument('--sns-email',
help=('Init only: Subscribe email to '
'sns notifications'))
parser.add_argument('--use-server-time',
dest='server_time',
action='store_true',
help=('Base time calculations on when '
'the server processes the event'))
parser.add_argument('--use-client-time',
dest='server_time',
action='store_false',
help=('Base time calculations on when '
'the client sent the event'))
parser.set_defaults(server_time=True)
parser.add_argument('--debug',
default=False,
action='store_true',
help='Print debug statements')
parser.add_argument('--max-notify-count',
type=int,
default=3,
help='Max # of sns notify events per name')
parser.add_argument('--poll-count',
type=int,
default=3,
help='Number of times to poll in period')
parser.add_argument('--period',
type=int,
default=None,
help=('Config only: Notification '
'period in seconds)'))
parser.add_argument('--initial-date',
type=mkdate,
help=('Config only: Initial date (yyy-mm-dd) '
'when the first run should be expected'))
parser.add_argument('--initial-time',
type=mktime,
help=('Config only: Initial time (HH:MM) '
'when the first run should be expected'))
parser.add_argument('--description',
default=None,
help=('Config only: '
'Optional description for event'))
parser.add_argument('--type',
choices=[PeriodType.Fixed, PeriodType.Rolling],
default=None,
help='Config only')
parser.add_argument('--enabled',
dest='enabled',
action='store_true',
help='Config only: enable/disable event')
parser.add_argument('--disabled',
dest='enabled',
action='store_false',
help='Config only: enable/disable event')
parser.add_argument('--only-errors',
default=False,
action='store_true',
help='Show only: Only show events in error')
parser.set_defaults(enabled=True)
parser.add_argument('action',
choices=['init', 'send', 'poll',
'config', 'show', 'version' , 'html'],
help='Action to perform')
parser.add_argument('name',
nargs='?',
help='Event name (required for send and config)')
args = parser.parse_args()
self.table_name = args.table
self.queue = args.queue
self.sns = args.sns
self.sns_email = args.sns_email
self.debug = args.debug
self.max_notify_count = args.max_notify_count
# set region
self.db = boto.dynamodb.connect_to_region(args.region)
self.sqs = boto.sqs.connect_to_region(args.region)
self.sns_conn = boto.sns.connect_to_region(args.region)
if args.action == 'init':
self.aws_init()
sys.exit(0)
# get_table needs to be after init or init will fail
self.table = self.db.get_table(self.table_name)
if args.action == 'send':
if not args.name:
raise Exception('Missing event name')
self.send(args.name)
elif args.action == 'poll':
for i in xrange(args.poll_count):
if self.debug:
print "Poll attempt %d" % (i)
self.poll()
self.notify_down_events()
elif args.action == 'config':
if not args.name:
raise Exception('Missing event name')
print 'Setting config for %s' % (args.name)
self.config(args.name,
args.period,
args.enabled,
args.type,
args.description,
args.initial_date,
args.initial_time)
elif args.action == 'show':
self.show(args.name, args.only_errors)
elif args.action == 'html':
self.return_html(args.only_errors)
elif args.action == 'version':
print "MEMon Version %s" % (MEMON_VERSION)
sys.exit(0)
else:
raise Exception('Unknown action')
def mkdate(datestring):
try:
dt = datetime.datetime.strptime(datestring, '%Y-%m-%d').date()
print dt
return dt
except Exception as e:
print e
raise e
def mktime(timestring):
try:
ts = datetime.datetime.strptime(timestring, '%H:%M').time()
return ts
except Exception as e:
print e
raise e
if __name__ == '__main__':
memon = MEMon()
memon.main()
|
|
#!/usr/bin/env python
#a Imports
from OpenGL.GLUT import *
from OpenGL.GLU import *
from OpenGL.GL import *
from OpenGL.GL import shaders
import sys
import math
from gjslib.math import quaternion, matrix, vectors
import OpenGL.arrays.vbo as vbo
import numpy
#a Default shaders
shader_code={}
shader_code["standard_vertex"] = """
#version 330 core
layout(location = 0) in vec3 V_m;
layout(location = 1) in vec2 V_UV;
layout(location = 2) in vec3 N_m;
out vec2 UV;
out vec3 V_w;
out vec3 V_c;
uniform mat4 M;
uniform mat4 V;
uniform mat4 P;
void main(){
V_w = (M * vec4(V_m,1)).xyz + 0*N_m;// Use N_m or lose it...
V_c = (V * M * vec4(V_m,1)).xyz;
gl_Position = P * V * M * vec4(V_m,1);
UV = V_UV;
}
"""
shader_code["standard_fragment"] = """
#version 330 core
in vec3 V_m;
in vec2 V_UV;
in vec3 N_m;
out vec4 color;
uniform vec3 C;
void main(){
color = vec4(C,1);
}
"""
shader_code["texture_fragment"] = """
#version 330 core
in vec3 V_w;
in vec2 UV;
in vec3 V_c;
out vec3 color;
uniform sampler2D sampler;
void main(){
color = texture(sampler,UV).rgb*0.7;
}
"""
shader_code["font_fragment"] = """
#version 330 core
in vec3 V_w;
in vec2 UV;
in vec3 V_c;
out vec4 color;
uniform sampler2D sampler;
uniform vec3 C;
void main(){
color = texture(sampler,UV).r * vec4(C,1.0);
if (texture(sampler,UV).r<0.1) discard;
}
"""
#a Shader classes - move to opengl_shader
class c_opengl_shader(object):
#f __init__
def __init__(self):
pass
#f compile
def compile(self):
self.vao = glGenVertexArrays(1)
glBindVertexArray(self.vao)
self.vertex_shader = shaders.compileShader(self.vertex_src, GL_VERTEX_SHADER)
self.fragment_shader = shaders.compileShader(self.fragment_src, GL_FRAGMENT_SHADER)
self.program = shaders.compileProgram(self.vertex_shader, self.fragment_shader)
self.attrib_ids = {}
for k in self.attribs:
self.attrib_ids[k] = glGetAttribLocation(self.program,k)
pass
self.uniform_ids = {}
for k in self.uniforms:
self.uniform_ids[k] = glGetUniformLocation(self.program,k)
pass
for k in self.attrib_ids:
if self.attrib_ids[k]==-1:
raise Exception("Failed to create attribute",k)
pass
for k in self.uniform_ids:
if self.uniform_ids[k]==-1:
raise Exception("Failed to create uniform",k)
pass
pass
#f use
def use(self):
shaders.glUseProgram(self.program)
pass
#f bind_vbo
def bind_vbo(self, t=None, v=None, n=None, uv=None, **kwargs):
from ctypes import sizeof, c_float, c_void_p, c_uint
for (d,k,s) in ( (v,"V_m",3), (n,"N_m",3), (uv,"V_UV",2) ):
if d is not None and k in self.attrib_ids:
glEnableVertexAttribArray(self.attrib_ids[k])
glVertexAttribPointer(self.attrib_ids[k], s, GL_FLOAT, GL_FALSE, t*sizeof(c_float), c_void_p(d*sizeof(c_float)) )
pass
for (k,v) in kwargs.iteritems():
if k in self.uniform_ids:
if type(v)==float:
glUniformMatrix1f(self.uniform_ids[k],v)
pass
elif len(v)==3:
glUniform3f(self.uniform_ids[k],v[0],v[1],v[2])
pass
elif len(v)==4:
glUniform4f(self.uniform_ids[k],v[0],v[1],v[2],v[3])
pass
pass
pass
pass
#f set_matrices
def set_matrices(self, matrix_stacks):
glUniformMatrix4fv(self.uniform_ids["M"],1,GL_TRUE,matrix_stacks["model"][-1].get_matrix())
glUniformMatrix4fv(self.uniform_ids["V"],1,GL_TRUE,matrix_stacks["view"][-1].get_matrix())
glUniformMatrix4fv(self.uniform_ids["P"],1,GL_TRUE,matrix_stacks["project"][-1].get_matrix())
pass
#f All done
pass
#c c_opengl_shader_color_standard
class c_opengl_shader_color_standard(c_opengl_shader):
vertex_src = shader_code["standard_vertex"]
fragment_src = shader_code["standard_fragment"]
attribs = ("V_m", "V_UV", "N_m")
uniforms = ("M", "V", "P", "C")
pass
#c c_opengl_shader_texture_standard
class c_opengl_shader_texture_standard(c_opengl_shader):
vertex_src = shader_code["standard_vertex"]
fragment_src = shader_code["texture_fragment"]
attribs = ("V_m", "V_UV", "N_m")
uniforms = ("M", "V", "P")
pass
#c c_opengl_shader_font_standard
class c_opengl_shader_font_standard(c_opengl_shader):
vertex_src = shader_code["standard_vertex"]
fragment_src = shader_code["font_fragment"]
attribs = ("V_m", "V_UV", "N_m")
uniforms = ("M", "V", "P", "C")
pass
#a Class for c_opengl
#c c_opengl_app
class c_opengl_app(object):
window_title = "opengl_main"
#f __init__
def __init__(self, window_size):
self.window_size = window_size
self.display_has_errored = False
self.fonts = {}
self.display_matrices = {"model": [matrix.c_matrixNxN(order=4).identity()],
"view": [matrix.c_matrixNxN(order=4).identity()],
"project":[matrix.c_matrixNxN(order=4).identity()],
}
self.clips = []
self.selected_shader = None
self.simple_object = {}
self.simple_object["cross"] = {"vectors":vbo.VBO(data=numpy.array([1.0,0.2,0, -1.0,0.2,0, 1.0,-0.2,0, -1.0,-0.2,0,
0.2,1.0,0, 0.2,-1.0,0, -0.2,1.0,0, -0.2,-1.0,0, ],
dtype=numpy.float32), target=GL_ARRAY_BUFFER ),
"indices":vbo.VBO(data=numpy.array([0,1,2,1,2,3,4,5,6,5,6,7],
dtype=numpy.uint8), target=GL_ELEMENT_ARRAY_BUFFER ),
}
self.simple_object["diamond"] = {"vectors":vbo.VBO(data=numpy.array([1,0,0, -1,0,0, 0,1,0, 0,-1,0, 0,0,1, 0,0,-1],
dtype=numpy.float32), target=GL_ARRAY_BUFFER ),
"indices":vbo.VBO(data=numpy.array([0,2,4, 0,2,5, 0,3,4, 0,3,5,
1,2,4, 1,2,5, 1,3,4, 1,3,5],
dtype=numpy.uint8), target=GL_ELEMENT_ARRAY_BUFFER ),
}
pass
#f window_xy
def window_xy(self, xy):
return ((xy[0]+1.0)*self.window_size[0]/2, (xy[1]+1.0)*self.window_size[1]/2)
#f uniform_xy
def uniform_xy(self, xy):
return (-1.0+2*float(xy[0])/self.window_size[0], -1.0+2*float(xy[1])/self.window_size[1])
#f attach_menu
def attach_menu(self, menu, name):
glutSetMenu(menu.glut_id(name))
glutAttachMenu(GLUT_RIGHT_BUTTON)
pass
#f clip_push
def clip_push(self, x,y,w,h):
"""
Can do clipping also by giving portions of the depth buffer space to different levels; each time you push you go farther back into the depth buffer
Can do clipping using a clip volume in the fragment shader using a clip transformation (of MVP vector to clip volume - discard if outside unit cube)
Can do clipping by overwriting the depth buffer
"""
x,y,w,h = int(x),int(y),int(w),int(h)
self.clips.append((x,y,w,h))
glViewport(x,y,w,h)
glScissor(x,y,w,h)
glEnable(GL_SCISSOR_TEST)
pass
#f clip_pop
def clip_pop(self, matrix="model"):
self.clips.pop()
if len(self.clips)==0:
(x,y,w,h) = (0,0,self.window_size[0],self.window_size[1])
glDisable(GL_SCISSOR_TEST)
pass
else:
(x,y,w,h) = self.clips[-1]
pass
glViewport(x,y,w,h)
glScissor(x,y,w,h)
pass
#f matrix_push
def matrix_push(self, matrix="model"):
m = self.display_matrices[matrix][-1].copy()
self.display_matrices[matrix].append(m)
if len(self.display_matrices[matrix])>100:
raise Exception("Too many matrices pushed")
pass
#f matrix_pop
def matrix_pop(self, matrix="model"):
m = self.display_matrices[matrix].pop()
pass
#f matrix_mult
def matrix_mult(self, by, matrix="model"):
self.display_matrices[matrix][-1].postmult(by)
pass
#f matrix_scale
def matrix_scale(self, scale=1.0, matrix="model"):
if type(scale)==float:
scale = (scale,scale,scale,1.0)
pass
self.display_matrices[matrix][-1].scale(scale)
pass
#f matrix_rotate
def matrix_rotate(self, angle, axis, matrix="model"):
q = quaternion.c_quaternion.of_rotation(angle=angle, axis=axis, degrees=True)
self.display_matrices[matrix][-1].postmult(q.get_matrixn(order=4))
pass
#f matrix_translate
def matrix_translate(self, translate, matrix="model"):
self.display_matrices[matrix][-1].translate(translate)
pass
#f matrix_set
def matrix_set(self, m, matrix="project"):
self.display_matrices[matrix][-1] = m
pass
#f matrix_identity
def matrix_identity(self, matrix="model"):
self.display_matrices[matrix][-1].identity()
pass
#f matrix_perspective
def matrix_perspective(self, fovx=None, fovy=None, aspect=1.0, zNear=None, zFar=None, matrix="project"):
m = self.display_matrices[matrix][-1]
for r in range(4):
for c in range(4):
m[r,c] = 0.0
pass
pass
if fovx is None:
fy = 1/math.tan(math.radians(fovy)/2)
fx = fy/aspect
pass
else:
fx = 1/math.tan(math.radians(fovx)/2)
if fovy is None:
fy = fx*aspect
pass
else:
fy = 1/math.tan(math.radians(fovy)/2)
pass
pass
m[0,0] = fx
m[1,1] = fy
m[2,2] = (zNear+zFar)/(zNear-zFar)
m[2,3] = 2*zNear*zFar/(zNear-zFar)
m[3,2] = -1.0
pass
#f matrix_use
def matrix_use(self):
self.selected_shader.set_matrices(self.display_matrices)
pass
#f shaders_compile
def shaders_compile(self):
self.shaders = {}
self.shaders["color_standard"] = c_opengl_shader_color_standard()
self.shaders["texture_standard"] = c_opengl_shader_texture_standard()
self.shaders["font_standard"] = c_opengl_shader_font_standard()
for k in self.shaders:
self.shaders[k].compile()
pass
#f shader_set_attributes
def shader_set_attributes(self, **kwargs):
self.selected_shader.bind_vbo(**kwargs)
pass
#f shader_use
def shader_use(self,shader_name="color_standard"):
self.selected_shader = self.shaders[shader_name]
self.selected_shader.use()
pass
#f draw_simple_object
def draw_simple_object(self, obj, c, xyz, sc, angle=0, axis=(0,0,1)):
self.matrix_push()
self.matrix_translate(xyz)
self.matrix_rotate(angle, axis)
self.matrix_scale(sc)
self.matrix_use()
self.simple_object[obj]["vectors"].bind()
self.simple_object[obj]["indices"].bind()
self.shader_set_attributes( t=3, v=0, C=c )
glDrawElements(GL_TRIANGLES,len(self.simple_object[obj]["indices"]),GL_UNSIGNED_BYTE, None)
self.simple_object[obj]["vectors"].unbind()
self.simple_object[obj]["indices"].unbind()
self.matrix_pop()
pass
#f draw_lines
def draw_lines(self, line_data):
vectors = vbo.VBO(data=numpy.array(line_data, dtype=numpy.float32), target=GL_ARRAY_BUFFER )
vectors.bind()
self.shader_set_attributes(t=3, v=0)
glDrawArrays(GL_LINES,0,len(line_data))
vectors.unbind()
pass
#f init_opengl
def init_opengl(self):
glutInit(sys.argv)
glutInitDisplayMode(GLUT_3_2_CORE_PROFILE |GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(self.window_size[0],self.window_size[1])
glutCreateWindow(self.window_title)
#print glGetString(GL_VERSION)
self.shaders_compile()
self.shader_use()
glClearColor(0.,0.,0.,1.)
#glShadeModel(GL_SMOOTH)
#glEnable(GL_CULL_FACE)
glEnable(GL_DEPTH_TEST)
self.opengl_post_init()
pass
#f opengl_post_init
def opengl_post_init(self):
"""Subclass should provide this"""
pass
#f main_loop
def main_loop(self):
glutKeyboardFunc(self.keypress_callback)
glutKeyboardUpFunc(self.keyrelease_callback)
glutMouseFunc(self.mouse_callback)
glutMotionFunc(self.motion_callback)
glutDisplayFunc(self.display_callback)
glutIdleFunc(self.idle_callback)
glutIgnoreKeyRepeat(True)
glutMainLoop()
return
#f display_callback
def display_callback(self):
if (not self.display_has_errored):
try:
self.display()
except SystemExit as e:
raise
except:
traceback.print_exc()
self.display_has_errored = True
pass
pass
pass
#f keypress_callback
def keypress_callback(self, key,x,y):
w = glutGet(GLUT_WINDOW_WIDTH)
h = glutGet(GLUT_WINDOW_HEIGHT)
y = h-y # Invert y as OpenGL want it from BL
m = glutGetModifiers()
if self.keypress(key,m,x,y):
return
if ord(key)==17: # ctrl-Q
sys.exit()
pass
#f keyrelease_callback
def keyrelease_callback(self, key,x,y):
w = glutGet(GLUT_WINDOW_WIDTH)
h = glutGet(GLUT_WINDOW_HEIGHT)
y = h-y # Invert y as OpenGL want it from BL
m = glutGetModifiers()
if self.keyrelease(key,m,x,y):
return
if ord(key)==17: # ctrl-Q
sys.exit()
pass
#f mouse_callback
def mouse_callback(self, button,state,x,y):
w = glutGet(GLUT_WINDOW_WIDTH)
h = glutGet(GLUT_WINDOW_HEIGHT)
y = h-y # Invert y as OpenGL want it from BL
m = glutGetModifiers()
b = "left"
s = "up"
if state == GLUT_UP: s="up"
if state == GLUT_DOWN: s="down"
if button == GLUT_LEFT_BUTTON: b="left"
if button == GLUT_MIDDLE_BUTTON: b="middle"
if button == GLUT_RIGHT_BUTTON: b="right"
self.mouse(b,s,m,x,y)
pass
#f motion_callback
def motion_callback(self, x,y):
w = glutGet(GLUT_WINDOW_WIDTH)
h = glutGet(GLUT_WINDOW_HEIGHT)
y = h-y # Invert y as OpenGL want it from BL
self.motion(x,y)
pass
#f idle_callback
def idle_callback(self):
self.idle()
glutPostRedisplay()
pass
#f display
def display(self):
"""
Should be provided by the subclass
"""
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
glutSwapBuffers()
pass
#f keypress
def keypress(self, k, m, x, y):
"""
Should be provided by the subclass
"""
pass
#f keyrelease
def keyrelease(self, k, m, x, y):
"""
Should be provided by the subclass
"""
pass
#f mouse
def mouse(self, b, s, m, x, y):
"""
Should be provided by the subclass
"""
pass
#f motion
def motion(self, x, y):
"""
Should be provided by the subclass
"""
pass
#f idle
def idle(self):
"""
Should be provided by the subclass
"""
pass
#f get_font
def get_font(self, fontname):
if fontname not in self.fonts:
fontname = self.fonts.keys()[0]
pass
return self.fonts[fontname]
#f load_font
def load_font(self, bitmap_filename):
import numpy
from gjslib.graphics.font import c_bitmap_font
bf = c_bitmap_font()
bf.load(bitmap_filename)
png_data = numpy.array(list(bf.image.getdata()), numpy.uint8)
texture = glGenTextures(1)
glPixelStorei(GL_UNPACK_ALIGNMENT,1)
glBindTexture(GL_TEXTURE_2D, texture)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexImage2D(GL_TEXTURE_2D, 0, GL_R8, bf.image_size[0], bf.image_size[1], 0, GL_RED, GL_UNSIGNED_BYTE, png_data)
glFlush()
self.fonts[bf.fontname] = (bf, texture)
return bf
#f debug
def debug(self, reason, options=None):
print "*"*80
print "opengl_app.debug",reason
print "*"*80
print self.clips
print self.display_matrices["project"][-1]
print self.display_matrices["view"][-1]
print self.display_matrices["model"][-1]
pass
#f All done
pass
#c c_opengl_camera_app
class c_opengl_camera_app(c_opengl_app):
camera_control_keys = { "x":(("roll",1,0),),
"z":(("roll",2,0),),
"s":(("pitch",1,0),),
"a":(("pitch",2,0),),
".":(("yaw",1,0),),
";":(("yaw",2,0),),
"[":(("fov",1,0),),
"]":(("fov",2,0),),
"/":(("speed",1,0),),
"'":(("speed",2,0),),
" ":(("roll",0,-1),("yaw",0,-1),("pitch",0,-1),("speed",4,3),),
}
#f __init__
def __init__(self, **kwargs):
c_opengl_app.__init__(self, **kwargs)
self.camera = {"position":[0,0,-10],
"facing":quaternion.c_quaternion.identity(),
"rpy":[0,0,0],
"speed":0,
"fov":90,
}
self.mvp = None
self.aspect = 1.0
self.zNear=1.0
self.zFar=40.0
self.camera_controls = set()
self.camera_quats = {("roll",1):quaternion.c_quaternion.roll(+0.002),
("roll",2):quaternion.c_quaternion.roll(-0.002),
("yaw",1):quaternion.c_quaternion.yaw(+0.002),
("yaw",2):quaternion.c_quaternion.yaw(-0.002),
("pitch",1):quaternion.c_quaternion.pitch(+0.002),
("pitch",2):quaternion.c_quaternion.pitch(-0.002),
}
pass
#f set_camera
def set_camera(self, camera=None, orientation=None, yfov=None):
if camera is not None:
self.camera["position"] = list(camera)
pass
if orientation is not None:
self.camera["facing"] = orientation
pass
if yfov is not None:
self.camera["fov"] = yfov
pass
#f change_angle
def change_angle(self, angle, dirn, angle_delta=0.01 ):
if (self.camera["rpy"][angle]*dirn)<0:
self.camera["rpy"][angle]=0
pass
else:
self.camera["rpy"][angle] += dirn*angle_delta
pass
pass
#f change_position
def change_position(self, x,y,z ):
scale = 0.1+self.camera["speed"]*5
self.camera["position"] = [self.camera["position"][0]+x*scale,
self.camera["position"][1]+y*scale,
self.camera["position"][2]+z*scale
]
pass
#f change_fov
def change_fov(self, fov):
self.camera["fov"] += fov
if self.camera["fov"]<10: self.camera["fov"]=10
if self.camera["fov"]>140: self.camera["fov"]=140
pass
#f idle
def idle(self):
acceleration = 0.02
self.camera["speed"] = self.camera["speed"]*0.9
actions = {}
for c in self.camera_controls:
for action in self.camera_control_keys[c]:
(a,s,c) = action
if a in actions:
s = s | actions[a][0]
c = c | actions[a][1]
pass
actions[a] = (s,c)
pass
pass
for a in actions:
(s,c) = actions[a]
controls = s &~ c
if controls!=0:
if (a,controls) in self.camera_quats:
self.camera["facing"] = self.camera_quats[(a,controls)].copy().multiply(self.camera["facing"])
elif a=="speed":
self.camera["speed"] += acceleration*(2*controls-3)
if controls&4: self.camera["speed"]=0
elif a=="fov":
self.camera["fov"] *= 1+0.1*(2*controls-3)
pass
pass
if self.camera["speed"]!=0:
m = self.camera["facing"].get_matrix()
self.camera["position"][0] += self.camera["speed"]*m[0][2]
self.camera["position"][1] += self.camera["speed"]*m[1][2]
self.camera["position"][2] += self.camera["speed"]*m[2][2]
pass
pass
#f key_updown
def key_updown(self, key,m,x,y,key_down):
if key in self.camera_control_keys:
if key_down:
self.camera_controls.add(key)
pass
else:
self.camera_controls.discard(key)
pass
return True
pass
#f keyrelease
def keyrelease(self, key,m,x,y):
if self.key_updown(key,m,x,y,False):
return
pass
#f keypress
def keypress(self, key,m,x,y):
if self.key_updown(key,m,x,y,True):
return
if key==' ': self.camera["speed"] = 0
if key=='e': self.camera["rpy"] = [0,0,0]
if key=='r': self.camera["position"] = [0,0,-10]
if key=='r': self.camera["facing"] = quaternion.c_quaternion.identity()
if key=='r': self.camera["fov"] = 90
pass
#f opengl_post_init
def opengl_post_init(self):
pass
#f display
def display(self, show_crosshairs=False, focus_xxyyzz=None):
self.matrix_perspective(fovy=self.camera["fov"], aspect=self.aspect, zNear=self.zNear, zFar=self.zFar, matrix="project")
if self.mvp is not None:
self.mvp.perspective(self.camera["fov"],self.aspect,self.zNear,self.zFar)
pass
self.camera["facing"] = quaternion.c_quaternion.roll(self.camera["rpy"][0]).multiply(self.camera["facing"])
self.camera["facing"] = quaternion.c_quaternion.pitch(self.camera["rpy"][1]).multiply(self.camera["facing"])
self.camera["facing"] = quaternion.c_quaternion.yaw(self.camera["rpy"][2]).multiply(self.camera["facing"])
m = self.camera["facing"].get_matrixn(order=4)
self.camera["position"][0] += self.camera["speed"]*m[0,2]
self.camera["position"][1] += self.camera["speed"]*m[1,2]
self.camera["position"][2] += self.camera["speed"]*m[2,2]
if focus_xxyyzz is not None:
m2 = m.copy()
#m2.transpose()
#self.camera["position"] = vectors.vector_add((0,-1,0),m2.apply((0,0,-10,1))[0:3])
self.camera["position"] = vectors.vector_add((focus_xxyyzz[0],focus_xxyyzz[2],focus_xxyyzz[4]),
m2.apply((focus_xxyyzz[1],focus_xxyyzz[3],focus_xxyyzz[5],1))[0:3])
pass
self.matrix_set(m.transpose(), matrix="view")
self.matrix_translate(self.camera["position"], matrix="view")
self.matrix_identity(matrix="model")
if self.mvp is not None:
m3 = self.camera["facing"].get_matrix3()
self.mvp.mult3x3(m9=m3.matrix)
self.mvp.translate(self.camera["position"])
pass
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
if show_crosshairs: # Draw crosshairs
self.matrix_push("project")
self.matrix_push("view")
self.matrix_push("model")
self.matrix_identity("project")
self.matrix_identity("view")
self.matrix_identity("model")
self.shader_use("color_standard")
self.shader_set_attributes(C=(0.7,0.7,0.9))
self.matrix_use()
self.draw_lines((-1,0,-1,1,0,-1, 0,-1,-1,0,1,-1))
self.matrix_pop("project")
self.matrix_pop("view")
self.matrix_pop("model")
pass
pass
#f All done
pass
#a Test app
class c_opengl_test_app(c_opengl_camera_app):
use_primitive_restart = False
patches = { "flat_xy_square": ( (0,0,0), (1/3.0,0,0), (2/3.0,0,0), (1,0,0),
(0,1/3.0,0), (1/3.0,1/3.0,0), (2/3.0,1/3.0,0), (1,1/3.0,0),
(0,2/3.0,0), (1/3.0,2/3.0,0), (2/3.0,2/3.0,0), (1,2/3.0,0),
(0,1,0), (1/3.0,1,0), (2/3.0,1,0), (1,1,0),
),
"bump_one": ( (0,0,0), (0.1,0,0.1), (0.9,0,0.1), (1,0,0),
(0,0.1,0.1), (0.1,0.1,0.1), (0.9,0.1,0.1), (1,0.1,0.1),
(0,0.9,0.1), (0.1,0.9,0.1), (0.9,0.9,0.1), (1,0.9,0.1),
(0,1,0), (0.1,1,0.1), (0.9,1,0.1), (1,1,0),
),
"bump_two": ( (0,0,0), (0.2,-0.2,0.2), (0.8,-0.2,0.2), (1,0,0),
(-0.2,0.2,0.2), (0.2,0.2,-0.1), (0.8,0.2,-0.1), (1.2,0.2,0.2),
(-0.2,0.8,0.2), (0.2,0.8,-0.1), (0.8,0.8,-0.1), (1.2,0.8,0.2),
(0,1,0), (0.2,1.2,0.2), (0.8,1.2,0.2), (1,1,0),
),
}
#f __init__
def __init__(self, patch_name, **kwargs):
c_opengl_camera_app.__init__(self, **kwargs)
self.patch = self.patches[patch_name]
self.opengl_surface = {}
self.xxx = 0.0
self.yyy = 0.0
self.window_title = "OpenGL Test app '%s'"%patch_name
pass
#f opengl_post_init
def opengl_post_init(self):
from gjslib.math import bezier
from ctypes import sizeof, c_float, c_void_p, c_uint
pts = []
for coords in self.patch:
pts.append( bezier.c_point(coords=coords) )
pass
bp = bezier.c_bezier_patch( pts=pts )
float_size = sizeof(c_float)
vertex_offset = c_void_p(0 * float_size)
normal_offset = c_void_p(3 * float_size)
record_len = 6 * float_size
data_array = []
n = 14
for i in range(n+1):
for j in range(n+1):
data_array.append( bp.coord(i/(n+0.0),j/(n+0.0)).get_coords(scale=(2.0,2.0,2.0),offset=(-1.,-1.0,.0)) )
data_array.append( bp.normal(i/(n+0.0),j/(n+0.0)).get_coords() )
pass
pass
vertices = vbo.VBO( data=numpy.array(data_array, dtype=numpy.float32) )
index_list = []
if self.use_primitive_restart:
glEnable(GL_PRIMITIVE_RESTART)
pass
for j in range(n):
for i in range(n+1):
index_list.append( i+j*(n+1) )
index_list.append( i+(j+1)*(n+1) )
pass
if j<(n-1):
if self.use_primitive_restart:
index_list.append( 255 )
pass
else:
index_list.append( (n)+(j+1)*(n+1) )
index_list.append( (n)+(j+1)*(n+1) )
index_list.append( (j+1)*(n+1) )
index_list.append( (j+1)*(n+1) )
pass
pass
print index_list
indices = vbo.VBO( data=numpy.array( index_list, dtype=numpy.uint8),
target=GL_ELEMENT_ARRAY_BUFFER )
vertices.bind()
indices.bind()
self.opengl_surface["vertices"] = vertices
self.opengl_surface["indices"] = indices
self.opengl_surface["vertex_offset"] = vertex_offset
self.opengl_surface["normal_offset"] = normal_offset
self.opengl_surface["record_len"] = record_len
pass
#f display
def display(self):
c_opengl_camera_app.display(self)
self.yyy += 0.03
lightZeroPosition = [4.+3*math.sin(self.yyy),4.,4.-3*math.cos(self.yyy),1.]
lightZeroColor = [0.7,1.0,0.7,1.0] #white
ambient_lightZeroColor = [1.0,1.0,1.0,1.0] #green tinged
glLightfv(GL_LIGHT0, GL_POSITION, lightZeroPosition)
glLightfv(GL_LIGHT0, GL_DIFFUSE, lightZeroColor)
glLightf(GL_LIGHT0, GL_CONSTANT_ATTENUATION, 0.1)
glLightf(GL_LIGHT0, GL_LINEAR_ATTENUATION, 0.05)
glEnable(GL_LIGHT0)
glLightfv(GL_LIGHT1, GL_AMBIENT, ambient_lightZeroColor)
glEnable(GL_LIGHT1)
glPushMatrix()
color = [1.0,0.,0.,1.]
glMaterialfv(GL_FRONT,GL_DIFFUSE,[1.0,1.0,1.0,1.0])
glMaterialfv(GL_FRONT,GL_AMBIENT,[1.0,1.0,1.0,1.0])
glTranslate(lightZeroPosition[0],lightZeroPosition[1],lightZeroPosition[2])
glScale(0.3,0.3,0.3)
glutSolidSphere(2,40,40)
glPopMatrix()
glMaterialfv(GL_FRONT,GL_AMBIENT,[0.1,0.1,0.1,1.0])
glPushMatrix()
#glTranslate(0.0 ,2.75, 0.0)
color = [0.5,0,0.,0.,1.]
glMaterialfv(GL_FRONT,GL_DIFFUSE,color)
#glutSolidSphere(2,40,40)
glutSolidOctahedron()
glPopMatrix()
glPushMatrix()
self.xxx += 0.3
brightness = 0.4
glRotate(self.xxx,1,1,0)
glTranslate(0.0 ,-0.75, 0.0)
glMaterialfv(GL_FRONT,GL_DIFFUSE,[brightness*1.0,brightness*1.,brightness*0.,1.])
glPushMatrix()
glTranslate(0,0,1)
self.draw_object()
glPopMatrix()
glPushMatrix()
glRotate(180,0,1,0)
glTranslate(0,0,1)
self.draw_object()
glPopMatrix()
glMaterialfv(GL_FRONT,GL_DIFFUSE,[brightness*0.5,brightness*1.,brightness*0.,1.])
glPushMatrix()
glRotate(-90,0,1,0)
glTranslate(0,0,1)
self.draw_object()
glPopMatrix()
glPushMatrix()
glRotate(90,0,1,0)
glTranslate(0,0,1)
self.draw_object()
glPopMatrix()
glMaterialfv(GL_FRONT,GL_DIFFUSE,[brightness*0,brightness*0.5,brightness*0.5,1.])
glPushMatrix()
glRotate(-90,1,0,0)
glTranslate(0,0,1)
self.draw_object()
glPopMatrix()
glPushMatrix()
glRotate(90,1,0,0)
glTranslate(0,0,1)
self.draw_object()
glPopMatrix()
glPopMatrix()
glutSwapBuffers()
pass
#f draw_object
def draw_object(self):
self.opengl_surface["vertices"].bind()
glEnableClientState(GL_VERTEX_ARRAY)
glEnableClientState(GL_VERTEX_ARRAY)
glEnableClientState(GL_NORMAL_ARRAY)
glVertexPointer( 3, GL_FLOAT, self.opengl_surface["record_len"], self.opengl_surface["vertex_offset"] )
glNormalPointer( GL_FLOAT, self.opengl_surface["record_len"], self.opengl_surface["normal_offset"])
self.opengl_surface["indices"].bind()
glDrawElements( GL_TRIANGLE_STRIP,
len(self.opengl_surface["indices"]),
GL_UNSIGNED_BYTE,
self.opengl_surface["indices"] )
pass
#f All done
pass
#a Toplevel
if __name__ == '__main__':
a = c_opengl_test_app(patch_name="bump_one", window_size=(1000,1000))
a.init_opengl()
a.main_loop()
pass
|
|
# -*- coding: utf8 -*-
import os
import bitcoin
from secp256k1 import PrivateKey
import pytest
import serpent
from rlp.utils import decode_hex
from ethereum import tester, utils, abi
from ethereum.utils import safe_ord, big_endian_to_int
# Test EVM contracts
serpent_code = '''
def main(a,b):
return(a ^ b)
'''
def test_evm():
evm_code = serpent.compile(serpent_code)
translator = abi.ContractTranslator(serpent.mk_full_signature(
serpent_code))
data = translator.encode('main', [2, 5])
s = tester.state()
c = s.evm(evm_code)
o = translator.decode('main', s.send(tester.k0, c, 0, data))
assert o == [32]
# Test serpent compilation of variables using _with_, doing a simple
# arithmetic calculation 20 * 30 + 10 = 610
sixten_code =\
'''
(with 'x 10
(with 'y 20
(with 'z 30
(seq
(set 'a (add (mul (get 'y) (get 'z)) (get 'x)))
(return (ref 'a) 32)
)
)
)
)
'''
def test_sixten():
s = tester.state()
c = decode_hex('1231231231231234564564564564561231231231')
s.block.set_code(c, serpent.compile_lll(sixten_code))
o1 = s.send(tester.k0, c, 0)
assert utils.big_endian_to_int(o1) == 610
with_code = \
"""
def f1():
o = array(4)
with x = 5:
o[0] = x
with y = 7:
o[1] = y
with x = 8:
o[2] = x
o[3] = x
return(o:arr)
def f2():
with x = 5:
with y = 7:
x = 2
return(x)
def f3():
with x = 5:
with y = seq(x = 7, 2):
return(x)
def f4():
o = array(4)
with x = 5:
o[0] = x
with y = 7:
o[1] = y
with x = x:
o[2] = x
with y = x:
o[3] = y
return(o:arr)
"""
def test_with():
s = tester.state()
c = s.abi_contract(with_code)
assert c.f1() == [5, 7, 8, 5]
assert c.f2() == 2
assert c.f3() == 7
assert c.f4() == [5, 7, 5, 5]
# Test Serpent's import mechanism
mul2_code = \
'''
def double(v):
log(v)
return(v*2)
'''
filename = "mul2_qwertyuioplkjhgfdsa.se"
returnten_code = \
'''
extern mul2: [double:i]
x = create("%s")
log(x)
return(x.double(5))
''' % filename
def test_returnten():
s = tester.state()
open(filename, 'w').write(mul2_code)
c = s.contract(returnten_code)
o1 = s.send(tester.k0, c, 0)
os.remove(filename)
assert utils.big_endian_to_int(o1) == 10
# Test inset
inset_inner_code = \
'''
def g(n):
return(n + 10)
def f(n):
return n*2
'''
filename2 = "inner_qwertyuioplkjhgfdsa.se"
inset_outer_code = \
'''
inset("%s")
def foo():
res = self.g(12)
return res
''' % filename2
def test_inset():
s = tester.state()
open(filename2, 'w').write(inset_inner_code)
c = s.abi_contract(inset_outer_code)
assert c.foo() == 22
os.remove(filename2)
# Inset at the end instead
inset_inner_code2 = \
'''
def g(n):
return(n + 10)
def f(n):
return n*2
'''
filename25 = "inner_qwertyuioplkjhgfdsa.se"
inset_outer_code2 = \
'''
def foo():
res = self.g(12)
return res
inset("%s")
''' % filename25
def test_inset2():
s = tester.state()
open(filename25, 'w').write(inset_inner_code2)
c = s.abi_contract(inset_outer_code2)
assert c.foo() == 22
os.remove(filename25)
# Test a simple namecoin implementation
namecoin_code =\
'''
def main(k, v):
if !self.storage[k]:
self.storage[k] = v
return(1)
else:
return(0)
'''
def test_namecoin():
s = tester.state()
c = s.abi_contract(namecoin_code)
o1 = c.main("george", 45)
assert o1 == 1
o2 = c.main("george", 20)
assert o2 == 0
o3 = c.main("harry", 60)
assert o3 == 1
assert s.block.to_dict()
# Test a simple currency implementation
currency_code = '''
data balances[2^160]
def init():
self.balances[msg.sender] = 1000
def query(addr):
return(self.balances[addr])
def send(to, value):
from = msg.sender
fromvalue = self.balances[from]
if fromvalue >= value:
self.balances[from] = fromvalue - value
self.balances[to] = self.balances[to] + value
log(from, to, value)
return(1)
else:
return(0)
'''
def test_currency():
s = tester.state()
c = s.abi_contract(currency_code, sender=tester.k0)
o1 = c.send(tester.a2, 200)
assert o1 == 1
o2 = c.send(tester.a2, 900)
assert o2 == 0
o3 = c.query(tester.a0)
assert o3 == 800
o4 = c.query(tester.a2)
assert o4 == 200
# Test a data feed
data_feed_code = '''
data creator
data values[]
def init():
self.creator = msg.sender
def set(k, v):
if msg.sender == self.creator:
self.values[k] = v
return(1)
else:
return(0)
def get(k):
return(self.values[k])
'''
def test_data_feeds():
s = tester.state()
c = s.abi_contract(data_feed_code, sender=tester.k0)
o2 = c.get(500)
assert o2 == 0
o3 = c.set(500, 19)
assert o3 == 1
o4 = c.get(500)
assert o4 == 19
o5 = c.set(500, 726, sender=tester.k1)
assert o5 == 0
o6 = c.set(500, 726)
assert o6 == 1
return s, c
# Test an example hedging contract, using the data feed. This tests
# contracts calling other contracts
hedge_code = '''
extern datafeed: [set:ii, get:i]
data partyone
data partytwo
data hedgeValue
data datafeed
data index
data fiatValue
data maturity
def main(datafeed, index):
if !self.partyone:
self.partyone = msg.sender
self.hedgeValue = msg.value
self.datafeed = datafeed
self.index = index
return(1)
elif !self.partytwo:
ethvalue = self.hedgeValue
if msg.value >= ethvalue:
self.partytwo = msg.sender
c = self.datafeed.get(self.index)
othervalue = ethvalue * c
self.fiatValue = othervalue
self.maturity = block.timestamp + 500
return(othervalue)
else:
othervalue = self.fiatValue
ethvalue = othervalue / self.datafeed.get(self.index)
if ethvalue >= self.balance:
send(self.partyone, self.balance)
return(3)
elif block.timestamp > self.maturity:
send(self.partytwo, self.balance - ethvalue)
send(self.partyone, ethvalue)
return(4)
else:
return(5)
'''
def test_hedge():
s, c = test_data_feeds()
c2 = s.abi_contract(hedge_code, sender=tester.k0)
# Have the first party register, sending 10^16 wei and
# asking for a hedge using currency code 500
o1 = c2.main(c.address, 500, value=10 ** 16)
assert o1 == 1
# Have the second party register. It should receive the
# amount of units of the second currency that it is
# entitled to. Note that from the previous test this is
# set to 726
o2 = c2.main(0, 0, value=10 ** 16, sender=tester.k2)
assert o2 == 7260000000000000000
snapshot = s.snapshot()
# Set the price of the asset down to 300 wei
o3 = c.set(500, 300)
assert o3 == 1
# Finalize the contract. Expect code 3, meaning a margin call
o4 = c2.main(0, 0)
assert o4 == 3
s.revert(snapshot)
# Don't change the price. Finalize, and expect code 5, meaning
# the time has not expired yet
o5 = c2.main(0, 0)
assert o5 == 5
s.mine(100, tester.a3)
# Mine ten blocks, and try. Expect code 4, meaning a normal execution
# where both get their share
o6 = c2.main(0, 0)
assert o6 == 4
# Test the LIFO nature of call
arither_code = '''
def init():
self.storage[0] = 10
def f1():
self.storage[0] += 1
def f2():
self.storage[0] *= 10
self.f1()
self.storage[0] *= 10
def f3():
return(self.storage[0])
'''
def test_lifo():
s = tester.state()
c = s.abi_contract(arither_code)
c.f2()
assert c.f3() == 1010
# Test suicides and suicide reverts
suicider_code = '''
def mainloop(rounds):
self.storage[15] = 40
self.suicide()
i = 0
while i < rounds:
i += 1
self.storage[i] = i
def entry(rounds):
self.storage[15] = 20
self.mainloop(rounds, gas=msg.gas - 600)
def ping_ten():
return(10)
def suicide():
suicide(0)
def ping_storage15():
return(self.storage[15])
'''
def test_suicider():
s = tester.state()
c = s.abi_contract(suicider_code)
prev_gas_limit = tester.gas_limit
tester.gas_limit = 200000
# Run normally: suicide processes, so the attempt to ping the
# contract fails
c.entry(5)
o2 = c.ping_ten()
assert o2 is None
c = s.abi_contract(suicider_code)
# Run the suicider in such a way that it suicides in a sub-call,
# then runs out of gas, leading to a revert of the suicide and the
# storage mutation
c.entry(8000)
# Check that the suicide got reverted
o2 = c.ping_ten()
assert o2 == 10
# Check that the storage op got reverted
o3 = c.ping_storage15()
assert o3 == 20
tester.gas_limit = prev_gas_limit
# Test reverts
reverter_code = '''
def entry():
self.non_recurse(gas=100000)
self.recurse(gas=100000)
def non_recurse():
send(7, 9)
self.storage[8080] = 4040
self.storage[160160] = 2020
def recurse():
send(8, 9)
self.storage[8081] = 4039
self.storage[160161] = 2019
self.recurse()
while msg.gas > 0:
self.storage["waste_some_gas"] = 0
'''
def test_reverter():
s = tester.state()
c = s.abi_contract(reverter_code, endowment=10 ** 15)
c.entry()
assert s.block.get_storage_data(c.address, 8080) == 4040
assert s.block.get_balance(decode_hex('0' * 39 + '7')) == 9
assert s.block.get_storage_data(c.address, 8081) == 0
assert s.block.get_balance(decode_hex('0' * 39 + '8')) == 0
# Test stateless contracts
add1_code = \
'''
def main(x):
self.storage[1] += x
'''
filename3 = "stateless_qwertyuioplkjhgfdsa.se"
callcode_test_code = \
'''
extern add1: [main:i]
x = create("%s")
x.main(6)
x.main(4, call=code)
x.main(60, call=code)
x.main(40)
return(self.storage[1])
''' % filename3
def test_callcode():
s = tester.state()
open(filename3, 'w').write(add1_code)
c = s.contract(callcode_test_code)
o1 = s.send(tester.k0, c, 0)
os.remove(filename3)
assert utils.big_endian_to_int(o1) == 64
# https://github.com/ethereum/serpent/issues/8
array_code = '''
def main():
a = array(1)
a[0] = 1
return(a, items=1)
'''
def test_array():
s = tester.state()
c = s.abi_contract(array_code)
assert c.main() == [1]
array_code2 = '''
def main():
a = array(1)
something = 2
a[0] = 1
return(a, items=1)
'''
def test_array2():
s = tester.state()
c = s.abi_contract(array_code2)
assert c.main() == [1]
array_code3 = """
def main():
a = array(3)
return(a, items=3)
"""
def test_array3():
s = tester.state()
c = s.abi_contract(array_code3)
assert c.main() == [0, 0, 0]
calltest_code = """
def main():
self.first(1, 2, 3, 4, 5)
self.second(2, 3, 4, 5, 6)
self.third(3, 4, 5, 6, 7)
def first(a, b, c, d, e):
self.storage[1] = a * 10000 + b * 1000 + c * 100 + d * 10 + e
def second(a, b, c, d, e):
self.storage[2] = a * 10000 + b * 1000 + c * 100 + d * 10 + e
def third(a, b, c, d, e):
self.storage[3] = a * 10000 + b * 1000 + c * 100 + d * 10 + e
def get(k):
return(self.storage[k])
"""
def test_calls():
s = tester.state()
c = s.abi_contract(calltest_code)
c.main()
assert 12345 == c.get(1)
assert 23456 == c.get(2)
assert 34567 == c.get(3)
c.first(4, 5, 6, 7, 8)
assert 45678 == c.get(1)
c.second(5, 6, 7, 8, 9)
assert 56789 == c.get(2)
storage_object_test_code = """
extern moo: [ping, query_chessboard:ii, query_items:ii, query_person, query_stats:i, testping:ii, testping2:i]
data chessboard[8][8]
data users[100](health, x, y, items[5])
data person(head, arms[2](elbow, fingers[5]), legs[2])
def ping():
self.chessboard[0][0] = 1
self.chessboard[0][1] = 2
self.chessboard[3][0] = 3
self.users[0].health = 100
self.users[1].x = 15
self.users[1].y = 12
self.users[1].items[2] = 9
self.users[80].health = self
self.users[80].items[3] = self
self.person.head = 555
self.person.arms[0].elbow = 556
self.person.arms[0].fingers[0] = 557
self.person.arms[0].fingers[4] = 558
self.person.legs[0] = 559
self.person.arms[1].elbow = 656
self.person.arms[1].fingers[0] = 657
self.person.arms[1].fingers[4] = 658
self.person.legs[1] = 659
self.person.legs[1] += 1000
def query_chessboard(x, y):
return(self.chessboard[x][y])
def query_stats(u):
return([self.users[u].health, self.users[u].x, self.users[u].y]:arr)
def query_items(u, i):
return(self.users[u].items[i])
def query_person():
a = array(15)
a[0] = self.person.head
a[1] = self.person.arms[0].elbow
a[2] = self.person.arms[1].elbow
a[3] = self.person.legs[0]
a[4] = self.person.legs[1]
i = 0
while i < 5:
a[5 + i] = self.person.arms[0].fingers[i]
a[10 + i] = self.person.arms[1].fingers[i]
i += 1
return(a:arr)
def testping(x, y):
return([self.users[80].health.testping2(x), self.users[80].items[3].testping2(y)]:arr)
def testping2(x):
return(x*x)
"""
def test_storage_objects():
s = tester.state()
c = s.abi_contract(storage_object_test_code)
c.ping()
assert 1 == c.query_chessboard(0, 0)
assert 2 == c.query_chessboard(0, 1)
assert 3 == c.query_chessboard(3, 0)
assert [100, 0, 0] == c.query_stats(0)
assert [0, 15, 12] == c.query_stats(1)
assert 0 == c.query_items(1, 3)
assert 0 == c.query_items(0, 2)
assert 9 == c.query_items(1, 2)
assert [555, 556, 656, 559, 1659,
557, 0, 0, 0, 558,
657, 0, 0, 0, 658] == c.query_person()
assert [361, 441] == c.testping(19, 21)
infinite_storage_object_test_code = """
data chessboard[][8]
data users[100](health, x, y, items[])
data person(head, arms[](elbow, fingers[5]), legs[2])
def ping():
self.chessboard[0][0] = 1
self.chessboard[0][1] = 2
self.chessboard[3][0] = 3
self.users[0].health = 100
self.users[1].x = 15
self.users[1].y = 12
self.users[1].items[2] = 9
self.person.head = 555
self.person.arms[0].elbow = 556
self.person.arms[0].fingers[0] = 557
self.person.arms[0].fingers[4] = 558
self.person.legs[0] = 559
self.person.arms[1].elbow = 656
self.person.arms[1].fingers[0] = 657
self.person.arms[1].fingers[4] = 658
self.person.legs[1] = 659
self.person.legs[1] += 1000
def query_chessboard(x, y):
return(self.chessboard[x][y])
def query_stats(u):
return([self.users[u].health, self.users[u].x, self.users[u].y]:arr)
def query_items(u, i):
return(self.users[u].items[i])
def query_person():
a = array(15)
a[0] = self.person.head
a[1] = self.person.arms[0].elbow
a[2] = self.person.arms[1].elbow
a[3] = self.person.legs[0]
a[4] = self.person.legs[1]
i = 0
while i < 5:
a[5 + i] = self.person.arms[0].fingers[i]
a[10 + i] = self.person.arms[1].fingers[i]
i += 1
return(a:arr)
"""
def test_infinite_storage_objects():
s = tester.state()
c = s.abi_contract(infinite_storage_object_test_code)
c.ping()
assert 1 == c.query_chessboard(0, 0)
assert 2 == c.query_chessboard(0, 1)
assert 3 == c.query_chessboard(3, 0)
assert [100, 0, 0] == c.query_stats(0)
assert [0, 15, 12] == c.query_stats(1)
assert 0 == c.query_items(1, 3)
assert 0 == c.query_items(0, 2)
assert 9 == c.query_items(1, 2)
assert [555, 556, 656, 559, 1659,
557, 0, 0, 0, 558,
657, 0, 0, 0, 658] == c.query_person()
fail1 = """
data person(head, arms[2](elbow, fingers[5]), legs[2])
x = self.person.arms[0]
"""
fail2 = """
data person(head, arms[2](elbow, fingers[5]), legs[2])
x = self.person.arms[0].fingers
"""
fail3 = """
data person(head, arms[2](elbow, fingers[5]), legs[2])
x = self.person.arms[0].fingers[4][3]
"""
fail4 = """
data person(head, arms[2](elbow, fingers[5]), legs[2])
x = self.person.arms.elbow[0].fingers[4]
"""
fail5 = """
data person(head, arms[2](elbow, fingers[5]), legs[2])
x = self.person.arms[0].fingers[4].nail
"""
fail6 = """
data person(head, arms[2](elbow, fingers[5]), legs[2])
x = self.person.arms[0].elbow.skin
"""
fail7 = """
def return_array():
return([1,2,3], items=3)
def main():
return(self.return_array())
"""
def test_storagevar_fails():
s = tester.state()
success1, success2, success3, success4, success5, success6 = \
0, 0, 0, 0, 0, 0
try:
s.contract(fail1)
except Exception as e:
success1 = "Storage variable access not deep enough" in str(e)
assert success1, e
try:
s.contract(fail2)
except Exception as e:
success2 = "Too few array index lookups" in str(e)
assert success2, e
try:
s.contract(fail3)
except Exception as e:
success3 = "Too many array index lookups" in str(e)
assert success3, e
try:
s.contract(fail4)
except Exception as e:
success4 = "Too few array index lookups" in str(e)
assert success4, e
try:
s.contract(fail5)
except Exception as e:
success5 = "Invalid object member" in str(e)
assert success5, e
try:
s.contract(fail6)
except Exception as e:
success6 = "Invalid object member" in str(e)
assert success6, e
def test_type_system_fails():
s = tester.state()
success7 = False
try:
s.contract(fail7)
except Exception as e:
success7 = "Please specify maximum" in str(e)
assert success7, e
working_returnarray_code = """
def return_array():
return([1,2,3], items=3)
def main():
return(self.return_array(outitems=3):arr)
"""
def test_returnarray_code():
s = tester.state()
c = s.abi_contract(working_returnarray_code)
assert c.main() == [1, 2, 3]
crowdfund_code = """
data campaigns[2^80](recipient, goal, deadline, contrib_total, contrib_count, contribs[2^50](sender, value))
def create_campaign(id, recipient, goal, timelimit):
if self.campaigns[id].recipient:
return(0)
self.campaigns[id].recipient = recipient
self.campaigns[id].goal = goal
self.campaigns[id].deadline = block.timestamp + timelimit
def contribute(id):
# Update contribution total
total_contributed = self.campaigns[id].contrib_total + msg.value
self.campaigns[id].contrib_total = total_contributed
# Record new contribution
sub_index = self.campaigns[id].contrib_count
self.campaigns[id].contribs[sub_index].sender = msg.sender
self.campaigns[id].contribs[sub_index].value = msg.value
self.campaigns[id].contrib_count = sub_index + 1
# Enough funding?
if total_contributed >= self.campaigns[id].goal:
send(self.campaigns[id].recipient, total_contributed)
self.clear(id)
return(1)
# Expired?
if block.timestamp > self.campaigns[id].deadline:
i = 0
c = self.campaigns[id].contrib_count
while i < c:
send(self.campaigns[id].contribs[i].sender, self.campaigns[id].contribs[i].value)
i += 1
self.clear(id)
return(2)
# Progress report [2, id]
def progress_report(id):
return(self.campaigns[id].contrib_total)
# Clearing function for internal use
def clear(self, id):
if self == msg.sender:
self.campaigns[id].recipient = 0
self.campaigns[id].goal = 0
self.campaigns[id].deadline = 0
c = self.campaigns[id].contrib_count
self.campaigns[id].contrib_count = 0
self.campaigns[id].contrib_total = 0
i = 0
while i < c:
self.campaigns[id].contribs[i].sender = 0
self.campaigns[id].contribs[i].value = 0
i += 1
"""
def test_crowdfund():
s = tester.state()
c = s.abi_contract(crowdfund_code)
# Create a campaign with id 100
c.create_campaign(100, 45, 100000, 2)
# Create a campaign with id 200
c.create_campaign(200, 48, 100000, 2)
# Make some contributions
c.contribute(100, value=1, sender=tester.k1)
assert 1 == c.progress_report(100)
c.contribute(200, value=30000, sender=tester.k2)
c.contribute(100, value=59049, sender=tester.k3)
assert 59050 == c.progress_report(100)
c.contribute(200, value=70001, sender=tester.k4)
# Expect the 100001 units to be delivered to the destination
# account for campaign 2
assert 100001 == s.block.get_balance(utils.int_to_addr(48))
mida1 = s.block.get_balance(tester.a1)
mida3 = s.block.get_balance(tester.a3)
# Mine 5 blocks to expire the campaign
s.mine(5)
# Ping the campaign after expiry
c.contribute(100, value=1)
# Expect refunds
assert mida1 + 1 == s.block.get_balance(tester.a1)
assert mida3 + 59049 == s.block.get_balance(tester.a3)
saveload_code = """
data store[1000]
def kall():
a = text("sir bobalot to the rescue !!1!1!!1!1")
save(self.store[0], a, chars=60)
b = load(self.store[0], chars=60)
c = load(self.store[0], chars=33)
return([a[0], a[1], b[0], b[1], c[0], c[1]]:arr)
"""
def test_saveload():
s = tester.state()
c = s.abi_contract(saveload_code)
o = c.kall()
assert o[0] == 0x73697220626f62616c6f7420746f207468652072657363756520212131213121, bitcoin.encode(o[0], 16)
assert o[1] == 0x2131213100000000000000000000000000000000000000000000000000000000, bitcoin.encode(o[1], 16)
assert o[2] == 0x73697220626f62616c6f7420746f207468652072657363756520212131213121, bitcoin.encode(o[2], 16)
assert o[3] == 0x2131213100000000000000000000000000000000000000000000000000000000, bitcoin.encode(o[3], 16)
assert o[4] == 0x73697220626f62616c6f7420746f207468652072657363756520212131213121, bitcoin.encode(o[4], 16)
assert o[5] == 0x2100000000000000000000000000000000000000000000000000000000000000, bitcoin.encode(o[5], 16)
saveload_code2 = """
data buf
data buf2
mystr = text("01ab")
save(self.buf, mystr:str)
save(self.buf2, mystr, chars=4)
"""
def test_saveload2():
s = tester.state()
c = s.contract(saveload_code2)
s.send(tester.k0, c, 0)
assert bitcoin.encode(s.block.get_storage_data(c, 0), 256) == b'01ab' + b'\x00' * 28
assert bitcoin.encode(s.block.get_storage_data(c, 1), 256) == b'01ab' + b'\x00' * 28
sdiv_code = """
def kall():
return([2^255 / 2^253, 2^255 % 3]:arr)
"""
def test_sdiv():
s = tester.state()
c = s.abi_contract(sdiv_code)
assert [-4, -2] == c.kall()
basic_argcall_code = """
def argcall(args:arr):
log(1)
o = (args[0] + args[1] * 10 + args[2] * 100)
log(4)
return o
def argkall(args:arr):
log(2)
o = self.argcall(args)
log(3)
return o
"""
def test_argcall():
s = tester.state()
c = s.abi_contract(basic_argcall_code)
assert 375 == c.argcall([5, 7, 3])
assert 376 == c.argkall([6, 7, 3])
more_complex_argcall_code = """
def argcall(args:arr):
args[0] *= 2
args[1] *= 2
return(args:arr)
def argkall(args:arr):
return(self.argcall(args, outsz=2):arr)
"""
def test_argcall2():
s = tester.state()
c = s.abi_contract(more_complex_argcall_code)
assert [4, 8] == c.argcall([2, 4])
assert [6, 10] == c.argkall([3, 5])
sort_code = """
def sort(args:arr):
if len(args) < 2:
return(args:arr)
h = array(len(args))
hpos = 0
l = array(len(args))
lpos = 0
i = 1
while i < len(args):
if args[i] < args[0]:
l[lpos] = args[i]
lpos += 1
else:
h[hpos] = args[i]
hpos += 1
i += 1
x = slice(h, items=0, items=hpos)
h = self.sort(x, outsz=hpos)
l = self.sort(slice(l, items=0, items=lpos), outsz=lpos)
o = array(len(args))
i = 0
while i < lpos:
o[i] = l[i]
i += 1
o[lpos] = args[0]
i = 0
while i < hpos:
o[lpos + 1 + i] = h[i]
i += 1
return(o:arr)
"""
@pytest.mark.timeout(100)
def test_sort():
s = tester.state()
c = s.abi_contract(sort_code)
assert c.sort([9]) == [9]
assert c.sort([9, 5]) == [5, 9]
assert c.sort([9, 3, 5]) == [3, 5, 9]
assert c.sort([80, 234, 112, 112, 29]) == [29, 80, 112, 112, 234]
filename9 = "mul2_qwertyuioplkjhgfdsabarbar.se"
sort_tester_code = \
'''
extern sorter: [sort:a]
data sorter
def init():
self.sorter = create("%s")
def test(args:arr):
return(self.sorter.sort(args, outsz=len(args)):arr)
''' % filename9
@pytest.mark.timeout(100)
def test_indirect_sort():
s = tester.state()
open(filename9, 'w').write(sort_code)
c = s.abi_contract(sort_tester_code)
os.remove(filename9)
assert c.test([80, 234, 112, 112, 29]) == [29, 80, 112, 112, 234]
multiarg_code = """
def kall(a:arr, b, c:arr, d:str, e):
x = a[0] + 10 * b + 100 * c[0] + 1000 * a[1] + 10000 * c[1] + 100000 * e
return([x, getch(d, 0) + getch(d, 1) + getch(d, 2), len(d)]:arr)
"""
def test_multiarg_code():
s = tester.state()
c = s.abi_contract(multiarg_code)
o = c.kall([1, 2, 3], 4, [5, 6, 7], "doge", 8)
assert o == [862541, safe_ord('d') + safe_ord('o') + safe_ord('g'), 4]
peano_code = """
macro padd($x, psuc($y)):
psuc(padd($x, $y))
macro padd($x, z()):
$x
macro dec(psuc($x)):
dec($x) + 1
macro dec(z()):
0
macro pmul($x, z()):
z()
macro pmul($x, psuc($y)):
padd(pmul($x, $y), $x)
macro pexp($x, z()):
one()
macro pexp($x, psuc($y)):
pmul($x, pexp($x, $y))
macro fac(z()):
one()
macro fac(psuc($x)):
pmul(psuc($x), fac($x))
macro one():
psuc(z())
macro two():
psuc(psuc(z()))
macro three():
psuc(psuc(psuc(z())))
macro five():
padd(three(), two())
def main():
return([dec(pmul(three(), pmul(three(), three()))), dec(fac(five()))]:arr)
"""
def test_macros():
s = tester.state()
c = s.abi_contract(peano_code)
assert c.main() == [27, 120]
type_code = """
type f: [a, b, c, d, e]
macro f($a) + f($b):
f(add($a, $b))
macro f($a) - f($b):
f(sub($a, $b))
macro f($a) * f($b):
f(mul($a, $b) / 10000)
macro f($a) / f($b):
f(sdiv($a * 10000, $b))
macro f($a) % f($b):
f(smod($a, $b))
macro f($v) = f($w):
$v = $w
macro(10) f($a):
$a / 10000
macro fify($a):
f($a * 10000)
a = fify(5)
b = fify(2)
c = a / b
e = c + (a / b)
return(e)
"""
def test_types():
s = tester.state()
c = s.contract(type_code)
assert utils.big_endian_to_int(s.send(tester.k0, c, 0)) == 5
ecrecover_code = """
def test_ecrecover(h:uint256, v:uint256, r:uint256, s:uint256):
return(ecrecover(h, v, r, s))
"""
def test_ecrecover():
s = tester.state()
c = s.abi_contract(ecrecover_code)
priv = utils.sha3('some big long brainwallet password')
pub = bitcoin.privtopub(priv)
msghash = utils.sha3('the quick brown fox jumps over the lazy dog')
pk = PrivateKey(priv, raw=True)
signature = pk.ecdsa_recoverable_serialize(
pk.ecdsa_sign_recoverable(msghash, raw=True)
)
signature = signature[0] + chr(signature[1])
V = ord(signature[64]) + 27
R = big_endian_to_int(signature[0:32])
S = big_endian_to_int(signature[32:64])
assert bitcoin.ecdsa_raw_verify(msghash, (V, R, S), pub)
addr = utils.big_endian_to_int(utils.sha3(bitcoin.encode_pubkey(pub, 'bin')[1:])[12:])
assert utils.big_endian_to_int(utils.privtoaddr(priv)) == addr
result = c.test_ecrecover(utils.big_endian_to_int(msghash), V, R, S)
assert result == addr
sha256_code = """
def main():
return([sha256(0, chars=0), sha256(3), sha256(text("doge"), chars=3), sha256(text("dog"):str), sha256([0,0,0,0,0]:arr), sha256([0,0,0,0,0,0], items=5)]:arr)
"""
def test_sha256():
s = tester.state()
c = s.abi_contract(sha256_code)
assert c.main() == [
0xe3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 - 2 ** 256,
0xd9147961436944f43cd99d28b2bbddbf452ef872b30c8279e255e7daafc7f946 - 2 ** 256,
0xcd6357efdd966de8c0cb2f876cc89ec74ce35f0968e11743987084bd42fb8944 - 2 ** 256,
0xcd6357efdd966de8c0cb2f876cc89ec74ce35f0968e11743987084bd42fb8944 - 2 ** 256,
0xb393978842a0fa3d3e1470196f098f473f9678e72463cb65ec4ab5581856c2e4 - 2 ** 256,
0xb393978842a0fa3d3e1470196f098f473f9678e72463cb65ec4ab5581856c2e4 - 2 ** 256
]
ripemd160_code = """
def main():
return([ripemd160(0, chars=0), ripemd160(3), ripemd160(text("doge"), chars=3), ripemd160(text("dog"):str), ripemd160([0,0,0,0,0]:arr), ripemd160([0,0,0,0,0,0], items=5)]:arr)
"""
def test_ripemd160():
s = tester.state()
c = s.abi_contract(ripemd160_code)
assert c.main() == [
0x9c1185a5c5e9fc54612808977ee8f548b2258d31,
0x44d90e2d3714c8663b632fcf0f9d5f22192cc4c8,
0x2a5756a3da3bc6e4c66a65028f43d31a1290bb75,
0x2a5756a3da3bc6e4c66a65028f43d31a1290bb75,
0x9164cab7f680fd7a790080f2e76e049811074349,
0x9164cab7f680fd7a790080f2e76e049811074349]
sha3_code = """
def main():
return([sha3(0, chars=0), sha3(3), sha3(text("doge"), chars=3), sha3(text("dog"):str), sha3([0,0,0,0,0]:arr), sha3([0,0,0,0,0,0], items=5)]:arr)
"""
def test_sha3():
s = tester.state()
c = s.abi_contract(sha3_code)
assert c.main() == [
0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 - 2 ** 256,
0xc2575a0e9e593c00f959f8c92f12db2869c3395a3b0502d05e2516446f71f85b - 2 ** 256,
0x41791102999c339c844880b23950704cc43aa840f3739e365323cda4dfa89e7a,
0x41791102999c339c844880b23950704cc43aa840f3739e365323cda4dfa89e7a,
0xdfded4ed5ac76ba7379cfe7b3b0f53e768dca8d45a34854e649cfc3c18cbd9cd - 2 ** 256,
0xdfded4ed5ac76ba7379cfe7b3b0f53e768dca8d45a34854e649cfc3c18cbd9cd - 2 ** 256
]
types_in_functions_code = """
type fixedp: [a, b]
macro fixedp($x) * fixedp($y):
fixedp($x * $y / 2^64)
macro fixedp($x) / fixedp($y):
fixedp($x * 2^64 / $y)
macro raw_unfixedp(fixedp($x)):
$x / 2^64
macro set(fixedp($x), $y):
$x = 2^64 * $y
macro fixedp($x) = fixedp($y):
$x = $y
def sqrdiv(a, b):
return(raw_unfixedp((a / b) * (a / b)))
"""
def test_types_in_functions():
s = tester.state()
c = s.abi_contract(types_in_functions_code)
assert c.sqrdiv(25, 2) == 156
more_infinites_code = """
data a[](b, c)
def testVerifyTx():
self.a[0].b = 33
self.a[0].c = 55
return(self.a[0].b)
"""
def test_more_infinites():
s = tester.state()
c = s.abi_contract(more_infinites_code)
assert c.testVerifyTx() == 33
prevhashes_code = """
def get_prevhashes(k):
o = array(k)
i = 0
while i < k:
o[i] = block.prevhash(i)
i += 1
return(o:arr)
"""
@pytest.mark.timeout(100)
def test_prevhashes():
s = tester.state()
c = s.abi_contract(prevhashes_code)
s.mine(7)
# Hashes of last 14 blocks including existing one
o1 = [x % 2 ** 256 for x in c.get_prevhashes(14)]
# hash of self = 0, hash of blocks back to genesis block as is, hash of
# blocks before genesis block = 0
t1 = [0] + [utils.big_endian_to_int(b.hash) for b in s.blocks[-2::-1]] \
+ [0] * 6
assert o1 == t1
s.mine(256)
# Test 256 limit: only 1 <= g <= 256 generation ancestors get hashes shown
o2 = [x % 2 ** 256 for x in c.get_prevhashes(270)]
t2 = [0] + [utils.big_endian_to_int(b.hash) for b in s.blocks[-2:-258:-1]] \
+ [0] * 13
assert o2 == t2
abi_contract_code = """
def mul2(a):
return(a * 2)
def returnten():
return(10)
"""
def test_abi_contract():
s = tester.state()
c = s.abi_contract(abi_contract_code)
assert c.mul2(3) == 6
assert c.returnten() == 10
mcopy_code = """
def mcopy_test(foo:str, a, b, c):
info = string(32*3 + len(foo))
info[0] = a
info[1] = b
info[2] = c
mcopy(info+(items=3), foo, len(foo))
return(info:str)
"""
def test_mcopy():
s = tester.state()
c = s.abi_contract(mcopy_code)
assert c.mcopy_test("123", 5, 6, 259) == \
b'\x00'*31+b'\x05'+b'\x00'*31+b'\x06'+b'\x00'*30+b'\x01\x03'+b'123'
mcopy_code_2 = """
def mcopy_test():
myarr = array(3)
myarr[0] = 99
myarr[1] = 111
myarr[2] = 119
mystr = string(96)
mcopy(mystr, myarr, items=3)
return(mystr:str)
"""
def test_mcopy2():
s = tester.state()
c = s.abi_contract(mcopy_code_2)
assert c.mcopy_test() == \
b''.join([utils.zpad(utils.int_to_big_endian(x), 32) for x in [99, 111, 119]])
array_saveload_code = """
data a[5]
def array_saveload():
a = [1,2,3,4,5]
save(self.a[0], a, items=5)
a = load(self.a[0], items=4)
log(len(a))
return(load(self.a[0], items=4):arr)
"""
def test_saveload3():
s = tester.state()
c = s.abi_contract(array_saveload_code)
assert c.array_saveload() == [1, 2, 3, 4]
string_manipulation_code = """
def f1(istring:str):
setch(istring, 0, "a")
setch(istring, 1, "b")
return(istring:str)
def t1():
istring = text("cd")
res = self.f1(istring, outchars=2)
return([getch(res,0), getch(res,1)]:arr) # should return [97,98]
"""
def test_string_manipulation():
s = tester.state()
c = s.abi_contract(string_manipulation_code)
assert c.t1() == [97, 98]
more_infinite_storage_object_code = """
data block[2^256](_blockHeader(_prevBlock))
data numAncestorDepths
data logs[2]
def initAncestorDepths():
self.numAncestorDepths = 2
def testStoreB(number, blockHash, hashPrevBlock, i):
self.block[blockHash]._blockHeader._prevBlock = hashPrevBlock
self.logs[i] = self.numAncestorDepths
def test2():
self.initAncestorDepths()
self.testStoreB(45, 45, 44, 0)
self.testStoreB(46, 46, 45, 1)
return ([self.logs[0], self.logs[1]]:arr)
"""
def test_more_infinite_storage():
s = tester.state()
c = s.abi_contract(more_infinite_storage_object_code)
assert c.test2() == [2, 2]
double_array_code = """
def foo(a:arr, b:arr):
i = 0
tot = 0
while i < len(a):
tot = tot * 10 + a[i]
i += 1
j = 0
tot2 = 0
while j < len(b):
tot2 = tot2 * 10 + b[j]
j += 1
return ([tot, tot2]:arr)
def bar(a:arr, m:str, b:arr):
return(self.foo(a, b, outitems=2):arr)
"""
def test_double_array():
s = tester.state()
c = s.abi_contract(double_array_code)
assert c.foo([1, 2, 3], [4, 5, 6, 7]) == [123, 4567]
assert c.bar([1, 2, 3], "moo", [4, 5, 6, 7]) == [123, 4567]
abi_logging_code = """
event rabbit(x)
event frog(y:indexed)
event moose(a, b:str, c:indexed, d:arr)
event chicken(m:address:indexed)
def test_rabbit(eks):
log(type=rabbit, eks)
def test_frog(why):
log(type=frog, why)
def test_moose(eh, bee:str, see, dee:arr):
log(type=moose, eh, bee, see, dee)
def test_chicken(em:address):
log(type=chicken, em)
"""
def test_abi_logging():
s = tester.state()
c = s.abi_contract(abi_logging_code)
o = []
s.block.log_listeners.append(lambda x: o.append(c.translator.listen(x)))
c.test_rabbit(3)
assert o == [{"_event_type": b"rabbit", "x": 3}]
o.pop()
c.test_frog(5)
assert o == [{"_event_type": b"frog", "y": 5}]
o.pop()
c.test_moose(7, "nine", 11, [13, 15, 17])
assert o == [{"_event_type": b"moose", "a": 7, "b": b"nine",
"c": 11, "d": [13, 15, 17]}]
o.pop()
c.test_chicken(tester.a0)
assert o == [{"_event_type": b"chicken",
"m": utils.encode_hex(tester.a0)}]
o.pop()
new_format_inner_test_code = """
def foo(a, b:arr, c:str):
return a * 10 + b[1]
"""
filename4 = "nfitc2635987162498621846198246.se"
new_format_outer_test_code = """
extern blah: [foo:[int256,int256[],bytes]:int256]
def bar():
x = create("%s")
return x.foo(17, [3, 5, 7], text("dog"))
""" % filename4
def test_new_format():
s = tester.state()
open(filename4, 'w').write(new_format_inner_test_code)
c = s.abi_contract(new_format_outer_test_code)
assert c.bar() == 175
abi_address_output_test_code = """
data addrs[]
def get_address(key):
return(self.addrs[key]:address)
def register(key, addr:address):
if not self.addrs[key]:
self.addrs[key] = addr
"""
def test_abi_address_output():
s = tester.state()
c = s.abi_contract(abi_address_output_test_code)
c.register(123, b'1212121212121212121212121212121212121212')
c.register(123, b'3434343434343434343434343434343434343434')
c.register(125, b'5656565656565656565656565656565656565656')
assert c.get_address(123) == b'1212121212121212121212121212121212121212'
assert c.get_address(125) == b'5656565656565656565656565656565656565656'
filename5 = 'abi_output_tester_1264876521746198724124'
abi_address_caller_code = """
extern foo: [get_address:[int256]:address, register:[int256,address]:_]
data sub
def init():
self.sub = create("%s")
def get_address(key):
return(self.sub.get_address(key):address)
def register(key, addr:address):
self.sub.register(key, addr)
""" % filename5
def test_inner_abi_address_output():
s = tester.state()
open(filename5, 'w').write(abi_address_output_test_code)
c = s.abi_contract(abi_address_caller_code)
c.register(123, b'1212121212121212121212121212121212121212')
c.register(123, b'3434343434343434343434343434343434343434')
c.register(125, b'5656565656565656565656565656565656565656')
assert c.get_address(123) == b'1212121212121212121212121212121212121212'
assert c.get_address(125) == b'5656565656565656565656565656565656565656'
string_logging_code = """
event foo(x:string:indexed, y:bytes:indexed, z:str:indexed)
def moo():
log(type=foo, text("bob"), text("cow"), text("dog"))
"""
def test_string_logging():
s = tester.state()
c = s.abi_contract(string_logging_code)
o = []
s.block.log_listeners.append(lambda x: o.append(c.translator.listen(x)))
c.moo()
assert o == [{"_event_type": "foo", "x": "bob", "__hash_x": utils.sha3("bob"),
"y": "cow", "__hash_y": utils.sha3("cow"), "z": "dog",
"__hash_z": utils.sha3("dog")}]
params_code = """
data blah
def init():
self.blah = $FOO
def garble():
return(self.blah)
def marble():
return(text($BAR):str)
"""
def test_params_contract():
s = tester.state()
c = s.abi_contract(params_code, FOO=4, BAR='horse')
assert c.garble() == 4
assert c.marble() == 'horse'
prefix_types_in_functions_code = """
type fixedp: fp_
macro fixedp($x) * fixedp($y):
fixedp($x * $y / 2^64)
macro fixedp($x) / fixedp($y):
fixedp($x * 2^64 / $y)
macro raw_unfixedp(fixedp($x)):
$x / 2^64
macro set(fixedp($x), $y):
$x = 2^64 * $y
macro fixedp($x) = fixedp($y):
$x = $y
def sqrdiv(fp_a, fp_b):
return(raw_unfixedp((fp_a / fp_b) * (fp_a / fp_b)))
"""
def test_prefix_types_in_functions():
s = tester.state()
c = s.abi_contract(prefix_types_in_functions_code)
assert c.sqrdiv(25, 2) == 156
# test_evm = None
# test_sixten = None
# test_with = None
# test_returnten = None
# test_namecoin = None
# test_inset = None
# test_currency = None
# test_data_feeds = None
# test_hedge = None
# test_lifo = None
# test_suicider = None
# test_reverter = None
# test_callcode = None
# test_array = None
# test_array2 = None
# test_array3 = None
# test_calls = None
# test_storage_objects = None
# test_infinite_storage_objects = None
# test_storagevar_fails = None
# test_type_system_fails = None
# test_returnarray_code = None
# test_saveload = None
# test_saveload2 = None
# test_crowdfund = None
# test_sdiv = None
# test_argcall = None
# test_argcall2 = None
# test_sort = None
# test_indirect_sort = None
# test_multiarg_code = None
# test_macros = None
# test_types = None
# test_sha256 = None
# test_sha3 = None
# test_types_in_functions = None
# test_more_infinites = None
# test_prevhashes = None
# test_abi_contract = None
# test_mcopy = None
# test_saveload3 = None
# test_string_manipulation = None
# test_more_infinite_storage = None
# test_double_array = None
# test_abi_logging = None
# test_new_format = None
# test_abi_address_output = None
# test_string_logging = None
# test_params_contract = None
# test_prefix_types_in_functions = None
|
|
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import mock
import unittest
from cloudbaseinit import exception as cbinit_exception
class WindowsNetworkUtilsTests(unittest.TestCase):
def setUp(self):
self._ctypes_mock = mock.MagicMock()
self._moves_mock = mock.MagicMock()
self._module_patcher = mock.patch.dict(
'sys.modules',
{'ctypes': self._ctypes_mock,
'six.moves': self._moves_mock})
self._module_patcher.start()
self.network = importlib.import_module(
'cloudbaseinit.utils.windows.network')
self.network.iphlpapi = mock.MagicMock()
self.network.kernel32 = mock.MagicMock()
self.network.ws2_32 = mock.MagicMock()
def tearDown(self):
self._module_patcher.stop()
def test_format_mac_address(self):
phys_address = [00, 00, 00, 00]
response = self.network._format_mac_address(phys_address=phys_address,
phys_address_len=4)
self.assertEqual("00:00:00:00", response)
def _test_socket_addr_to_str(self, ret_val):
mock_socket_addr = mock.MagicMock()
mock_create_unicode_buffer = self._ctypes_mock.create_unicode_buffer
mock_byref = self._ctypes_mock.byref
self.network.ws2_32.WSAAddressToStringW.return_value = ret_val
if ret_val:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network._socket_addr_to_str,
mock_socket_addr)
self.network.ws2_32.WSAGetLastError.assert_called_once_with()
else:
response = self.network._socket_addr_to_str(mock_socket_addr)
self.assertEqual(mock_create_unicode_buffer.return_value.value,
response)
self._ctypes_mock.wintypes.DWORD.assert_called_once_with(256)
mock_create_unicode_buffer.assert_called_once_with(256)
self.network.ws2_32.WSAAddressToStringW.assert_called_once_with(
mock_socket_addr.lpSockaddr, mock_socket_addr.iSockaddrLength,
None, mock_create_unicode_buffer.return_value,
mock_byref.return_value)
mock_byref.assert_called_once_with(
self._ctypes_mock.wintypes.DWORD.return_value)
def test_socket_addr_to_str(self):
self._test_socket_addr_to_str(ret_val=None)
def test_socket_addr_to_str_fail(self):
self._test_socket_addr_to_str(ret_val=1)
def _test_get_registry_dhcp_server(self, dhcp_server, exception=None):
fake_adapter = mock.sentinel.fake_adapter_name
self._moves_mock.winreg.QueryValueEx.return_value = [dhcp_server]
if exception:
self._moves_mock.winreg.QueryValueEx.side_effect = [exception]
if exception.errno != 2:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network._get_registry_dhcp_server,
fake_adapter)
else:
response = self.network._get_registry_dhcp_server(fake_adapter)
if dhcp_server == "255.255.255.255":
self.assertEqual(None, response)
else:
self.assertEqual(dhcp_server, response)
self._moves_mock.winreg.OpenKey.assert_called_once_with(
self._moves_mock.winreg.HKEY_LOCAL_MACHINE,
"SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\"
"Interfaces\\%s" % fake_adapter, 0,
self._moves_mock.winreg.KEY_READ)
self._moves_mock.winreg.QueryValueEx.assert_called_once_with(
self._moves_mock.winreg.OpenKey.return_value.__enter__(),
"DhcpServer")
def test_get_registry_dhcp_server(self):
self._test_get_registry_dhcp_server(
dhcp_server=mock.sentinel.dhcp_server)
def test_get_registry_dhcp_server_expected(self):
self._test_get_registry_dhcp_server(dhcp_server="255.255.255.255")
def test_get_registry_dhcp_server_expeption_not_found(self):
ex = cbinit_exception.CloudbaseInitException()
ex.errno = 2
self._test_get_registry_dhcp_server(dhcp_server="", exception=ex)
def test_get_registry_dhcp_server_expeption_other(self):
ex = cbinit_exception.CloudbaseInitException()
ex.errno = 3
self._test_get_registry_dhcp_server(dhcp_server="", exception=ex)
@mock.patch('cloudbaseinit.utils.windows.network._format_mac_address')
@mock.patch('cloudbaseinit.utils.windows.network._socket_addr_to_str')
@mock.patch('cloudbaseinit.utils.windows.network'
'._get_registry_dhcp_server')
def _test_get_adapter_addresses(self, mock_get_registry_dhcp_server,
mock_socket_addr_to_str,
mock_format_mac_address,
ret_val, p, ret_val2, xp_data_length):
self.maxDiff = None
mock_byref = self._ctypes_mock.byref
mock_cast = self._ctypes_mock.cast
mock_POINTER = self._ctypes_mock.POINTER
self.network.iphlpapi.GetAdaptersAddresses.side_effect = [ret_val,
ret_val2]
self.network.kernel32.HeapAlloc.return_value = p
self.network.iphlpapi.IP_ADAPTER_DHCP_ENABLED = True
self.network.iphlpapi.IP_ADAPTER_IPV4_ENABLED = True
self.network.iphlpapi.IP_ADAPTER_ADDRESSES_SIZE_2003 = xp_data_length
p_curr_addr = mock.MagicMock()
compare_cast = []
net_adapters = []
compare_socket_addr_to_str = []
mock_cast.side_effect = [p_curr_addr, None, None]
curr_addr = p_curr_addr.contents
curr_addr.Flags = True
curr_addr.Union1.Struct1.Length = 2
curr_addr.Dhcpv4Server.iSockaddrLength = True
p_unicast_addr = curr_addr.FirstUnicastAddress
unicast_addr = p_unicast_addr.contents
unicast_addresses = [
(mock_socket_addr_to_str.return_value,
unicast_addr.Address.lpSockaddr.contents.sa_family)]
compare_GetAdaptersAddresses = [mock.call(
self.network.ws2_32.AF_UNSPEC,
self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST,
None, None, mock_byref.return_value)]
if not p:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network.get_adapter_addresses)
if ret_val2 and ret_val2 != self.network.kernel32.ERROR_NO_DATA:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network.get_adapter_addresses)
compare_cast.append(mock.call(p, mock_POINTER.return_value))
compare_GetAdaptersAddresses.append(mock.call(
self.network.ws2_32.AF_UNSPEC,
self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST, None,
p_curr_addr, mock_byref.return_value))
else:
response = self.network.get_adapter_addresses()
if ret_val == self.network.kernel32.ERROR_NO_DATA:
self.assertEqual([], response)
elif ret_val == self.network.kernel32.ERROR_BUFFER_OVERFLOW:
self.network.kernel32.GetProcessHeap.assert_called_once_with()
self.network.kernel32.HeapAlloc.assert_called_once_with(
self.network.kernel32.GetProcessHeap.return_value, 0,
self._ctypes_mock.wintypes.ULONG.return_value.value)
self.network.ws2_32.init_wsa.assert_called_once_with()
compare_cast.append(mock.call(p, mock_POINTER.return_value))
compare_GetAdaptersAddresses.append(mock.call(
self.network.ws2_32.AF_UNSPEC,
self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST, None,
p_curr_addr, mock_byref.return_value))
if ret_val2 == self.network.kernel32.ERROR_NO_DATA:
self.assertEqual([], response)
else:
compare_cast.append(mock.call(p_unicast_addr.contents.Next,
mock_POINTER.return_value))
mock_format_mac_address.assert_called_once_with(
p_curr_addr.contents.PhysicalAddress,
p_curr_addr.contents.PhysicalAddressLength)
if not curr_addr.Union1.Struct1.Length <= xp_data_length:
dhcp_server = mock_socket_addr_to_str.return_value
compare_socket_addr_to_str.append(
mock.call(curr_addr.Dhcpv4Server |
curr_addr.Dhcpv6Server))
else:
dhcp_server = \
mock_get_registry_dhcp_server.return_value
mock_get_registry_dhcp_server.assert_called_once_with(
curr_addr.AdapterName)
compare_cast.append(mock.call(curr_addr.Next,
mock_POINTER.return_value))
self.network.kernel32.HeapFree.assert_called_once_with(
self.network.kernel32.GetProcessHeap.return_value, 0,
p)
self.network.ws2_32.WSACleanup.assert_called_once_with()
compare_socket_addr_to_str.append(mock.call(
unicast_addr.Address))
net_adapters.append(
{"interface_index": curr_addr.Union1.Struct1.IfIndex,
"adapter_name": curr_addr.AdapterName,
"friendly_name": curr_addr.FriendlyName,
"description": curr_addr.Description,
"mtu": curr_addr.Mtu,
"mac_address": mock_format_mac_address.return_value,
"dhcp_enabled": True,
"dhcp_server": dhcp_server,
"interface_type": curr_addr.IfType,
"unicast_addresses": unicast_addresses})
self.assertEqual(net_adapters, response)
self.assertEqual(compare_cast, mock_cast.call_args_list)
self.assertEqual(
compare_GetAdaptersAddresses,
self.network.iphlpapi.GetAdaptersAddresses.call_args_list)
def test_get_adapter_addresses_no_data(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_NO_DATA,
p=True, ret_val2=self.network.kernel32.ERROR_NO_DATA,
xp_data_length=3)
def test_get_adapter_addresses_overflow_and_no_data(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=self.network.kernel32.ERROR_NO_DATA,
xp_data_length=3)
def test_get_adapter_addresses_overflow_other_ret_val(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=mock.sentinel.other_return_value,
xp_data_length=3)
def test_get_adapter_addresses_overflow(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=None,
xp_data_length=3)
def test_get_adapter_addresses_overflow_xp_data(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=None,
xp_data_length=0)
|
|
#!/usr/bin/env python
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
PURPOSE:
A script that performs basic check operations against the provided
package. This package should be a standard .rpm package that is
pre-built. Further, it should already be present on the local filesystem
along with the repo code pointed to the /var/wdir directory.
USAGE:
This script is meant to be executed on a server that natively would be
used to install the provided package. It is also meant to be executed
from a docker container, or at the very least, an expendable, virtual
server in case something is not handled properly.
THIS SCRIPT IS NOT MEANT to be used as an install script! It is assumed
that this script is used exclusively for testing purposes.
EXECUTION:
./fetch_and_install_deps.py <working_dir> <debian package>
"""
import errno
import glob
import json
import os
import re
import shutil
import subprocess
import sys
from collections import deque
from collections import namedtuple
from inspect import currentframe as cf
from inspect import getframeinfo as gfi
from inspect import getouterframes as gof
ReqDetails = namedtuple('RegDetails', 'name, oper, version')
dep_match_re = re.compile('^([\w\-\d]+)\s?([<=>]+)\s?(\S+)')
f5_dependency_re = re.compile('(f5[\-_].+)')
class InstallError(Exception):
"""InstallError
This is an exception-class object. This object can be used to generate logs
for subsequent reporting and use. It can also be raised in and of itself.
"""
default_msg = "An unknown error has occurred"
default_errnum = errno.ESPIPE
def __init__(self, *args, **kargs):
self._set_errnum(kargs)
self._set_frame(kargs)
self._set_msg(args, kargs)
super(self.__class__, self).__init__(self.msg)
def _set_errnum(self, kargs):
if 'errnum' in kargs:
self.errnum = kargs['errnum']
elif 'errno' in kargs:
self.errnum = kargs['errno']
else:
self.errnum = self.default_errnum
def _set_frame(self, kargs):
if 'frame' in kargs:
self.frame = kargs['frame']
else:
# gof gets a stack of [inner->outer] tuples. tuple[0] is frame
self.frame = gfi(gof(cf())[2][0])
def _set_msg(self, args, kargs):
msg = ''
frame = self.frame
if args:
msg = ': %s' % (', '.join(args))
elif 'message' in kargs:
msg = kargs['message'] + msg
elif 'msg' in kargs:
msg = kargs['msg'] + msg
else:
msg = self.default_msg + msg
self.msg = "(%s) %s [%s:%s]" % (str(self.errnum), msg, frame.filename,
str(frame.lineno))
class Dependency(object):
"""Dependency
Creates a dependency object instance. This object instance will
self-orchestrate the necessary actions to retrieve the dependencie's
requirements for installation. It does depend on yum to retrieve subsquent
dependencies.
"""
install_cmd = "yum install -y %s"
def __init__(self, req):
match = dep_match_re.search(str(req))
if match: # easiest that uses existing methods...
self.name, self.oper, self.version = match.groups()
else:
self._set_name = req
self._set_version = req
self._set_oper = req
self._set_req = req
@property
def name(self):
return self.__name
@property
def oper(self):
return self.__oper
@property
def req(self):
return self.__req
@property
def version(self):
return self.__version
@name.setter
def _set_name(self, req):
self.__name = req.name
@oper.setter
def _set_oper(self, req):
self.__oper = req.oper
@req.setter
def _set_req(self, req):
self.__req = req
@version.setter
def _set_version(self, req):
self.__version = req.version
def install_req(self):
"""install_req
This object method will install the attribute-defined package yielded at
object creation. This requires running commands at the command prompt.
"""
if '(' in self.name:
return
print("Installing %s(v%s)" % (self.name, self.version))
name = self.pkg_location if hasattr(self, 'pkg_location') \
else self.name
name = 'python-' + name if 'python-' not in name and \
'.rpm' not in name else name
results, status = runCommand(self.install_cmd % name)
if status:
raise InstallError(self.install_cmd % name, str(self.req),
msg="Unable to install dep",
frame=gfi(cf()), errno=errno.ESPIPE)
class F5Dependency(Dependency):
"""F5Dependency
Creates a F5Dependency object. This object will retrieve all relevant
information in regards to what is necessary for the dependency and all of its
subsequent dependencies.
The F5 packages often require further dependencies; thus, there are further
actions to perform for its automated installation.
"""
cmd = "rpm -qRp %s"
install_cmd = "rpm -i %s"
def __init__(self, req):
super(F5Dependency, self).__init__(req)
self._set_url()
self._download_pkg()
self._consolidate_deps()
def _consolidate_deps(self):
try:
dependencies = read_pkg_reqs(self.pkg_location)
f5_reqs, other_reqs = categorize_requirements(dependencies)
handle_f5_dependencies(f5_reqs)
handle_other_dependencies(other_reqs)
except Exception:
raise InstallError(str(self.req), msg="Unable to install req",
frame=gfi(cf()), errno=errno.ESPIPE)
def _download_pkg(self):
url = self.url
pkg_name = self.pkg_name
deps = "/tmp/deps"
pkg_tmp = deps + "/" + pkg_name
try:
os.mkdir(deps)
except OSError:
if not os.path.isdir(deps):
raise InstallError(deps, msg="Unable to create",
frame=gfi(cf()), errno=errno.EIO)
cmd = "curl -L -o %s %s" % (pkg_tmp, url)
output, status = runCommand(cmd)
if status:
raise InstallError(cmd, msg="Failed to download pkg",
errno=errno.ENODATA, frame=gfi(cf()))
self.pkg_location = "/tmp/deps/%s" % self.pkg_name
def _set_url(self):
self.url = "https://github.com/F5Networks/"
if 'f5-sdk' in self.name:
self.url = self.url + "f5-common-python/"
elif 'f5-icontrol-rest' in self.name:
self.url = self.url + "f5-icontrol-rest/"
elif 'f5-openstack-agent' in self.name:
self.url = self.url + "f5-openstack-agent/"
else:
self.url = self.url + re.sub('^python-', '', self.name)
self.url = self.url + "releases/download/v%s/" % \
re.sub('-\d+', '', self.version)
if '-1' in self.version:
pkg_name = "%s-%s.el7.noarch.rpm" % (self.name, self.version)
else:
pkg_name = "%s-%s-1.el7.noarch.rpm" % (self.name, self.version)
self.pkg_name = pkg_name
self.url = self.url + pkg_name
def usage():
"""usage
A function that prints to the CLI a usgae statement for the script.
"""
print("fetch_dependencies.py working_dir package")
def runCommand(cmd):
"""runCommand
Executes the command provided and returns the stdout and the resulting return
code. In the event that the execution fails, a return code of 99 is yielded
and the output is redacte to an empty string.
"""
output = ""
try:
output = subprocess.check_output(cmd, shell=True)
except OSError as e:
print("Execution failed: [%s:%s] " %
(cmd, os.listdir('/var/wdir')), str(e))
except subprocess.CalledProcessError as Error:
print("exceution failed: [{}]".format(Error))
return(output, errno.ESPIPE)
else:
return (output, 0)
return ('', 99)
def parse_req(location):
mod_read = re.compile('^([^<=>]+)([<=>]+)(\S+)')
skip_re = re.compile('^\s*#')
listing = list()
with open(location, 'r') as fh:
line = fh.readline()
while line:
if skip_re.search(line):
line = fh.readline()
continue
match = mod_read.search(line)
if match:
listing.append(ReqDetails(*match.groups()))
line = fh.readline()
return listing
def load_requirements(cfg):
"""load_requirements
Loads the requirements from the file that the 'setup_requirements' KVP
contains. The argument should provide the dictionary that contains this KVP
"""
reqs = parse_req(cfg['setup_requirements'])
return reqs
def categorize_requirements(reqs):
"""categorize_requirements
Takes in a list of dependencies and sorts them between F5 generated items and
standard packages. Then returns the F5's first, then the others in two
separate deque's.
"""
f5_specific = deque()
other = deque()
for req in reqs:
if f5_dependency_re.search(req.name):
f5_specific.append(req)
else:
other.append(req)
return f5_specific, other
def read_pkg_reqs(pkg_name):
"""read_pkg_reqs
Reads in from `rpm -qRp` of the package file and parses the output for package
dependencies.
"""
requires = deque()
# Get the sdk requirement.
requiresCmd = "rpm -qRp %s" % pkg_name
(output, status) = runCommand(requiresCmd)
if status:
print("Failed to read")
raise InstallError(pkg_name, msg="Could not read deps from pkg",
errnum=errno.EIO, frame=gfi(cf()))
for line in output.split('\n'):
match = dep_match_re.match(line)
if match:
groups = list(match.groups())
my_dep = ReqDetails(*groups)
requires.append(my_dep)
return requires
def compare_reqs(reqs_from_pkg, requirements):
"""compare_reqs
This function will compare the requirements extracted from the rpm command's
output against the requirements parsed from the setup_requirements.py. It
assumes that the rpm contents are in the first argument and the setup's in the
second.
When a discrepency is found, then an InstallError exception is thrown
"""
for setup_req in requirements:
accounted = False
for pkg_req in reqs_from_pkg:
if pkg_req.name == str(setup_req.name):
accounted = True
elif 'python-' + setup_req.name == pkg_req.name:
accounted = True
if not accounted:
raise \
InstallError(str(setup_req), msg="Could not find req in pkg",
errno=errno.ESPIPE, frame=gfi(cf()))
for pkg_req in reqs_from_pkg:
accounted = False
if '(' in pkg_req.name:
continue
for setup_req in requirements:
if str(setup_req.name) == pkg_req.name:
accounted = True
elif 'python-' + setup_req.name == pkg_req.name:
accounted = True
if not accounted:
raise InstallError(str(pkg_req), msg="Additional req in pkg",
errno=errno.ESPIPE, frame=gfi(cf()))
def handle_f5_dependencies(f5_reqs):
"""handle_f5_dependencies
This function orchestrates the proper installation of F5 packages. As such,
it will attempt to install the most relevant version of the current package.
"""
version_re = re.compile('(\d+)\.(\d+)\.(\d+)')
VersionBreakout = namedtuple('VersionBreakout', 'high, medium, low')
installed = dict()
pending_check = dict()
for item in f5_reqs:
version = item.version if version_re.search(item.version) else \
item.version + ".0.0"
version_breakout = \
VersionBreakout(*version_re.search(version).groups())
if item.name not in installed and '=' in item.oper:
req = F5Dependency(item)
installed[item.name] = version_breakout
req.install_req()
elif item.name in installed:
present = installed[item.name]
if present.high < version_breakout.high and '<' in item.oper:
pass
elif '<' not in item.oper:
pass
else:
raise InstallError(present, item, msg="Version mismatch!",
errnum=errno.ESPIPE, frame=gfi(cf()))
else:
pending_check[item.name] = version_breakout # could expand...
return
def handle_other_dependencies(other_reqs):
"""handle_other_dependencies
This function orchestrates the installation of non-F5 packages and modules.
"""
for item in other_reqs:
req = Dependency(item)
req.install_req()
return
def fetch_pkg_dependencies(config, pkg_name):
"""fetch_pkg_dependencies
This function will attempt to install all missing dependencies. Also, to
assure that the build was successful, the setup_requirements.txt will be
compared to the requirements laid out in the package.
Upon failure, it will return the exception that was thrown at that time, or it
will return 0. For simplicity of the main(), this method heavily depends upon
the InstallError exception for known scenarios where things may break.
"""
requirements = load_requirements(config)
f5_reqs, other_reqs = categorize_requirements(requirements)
# Copy pkg package to /tmp
print("Copying package to /tmp install directory")
try:
tmp_pkg_name = "/tmp/" + os.path.basename(pkg_name)
shutil.copyfile(pkg_name, tmp_pkg_name)
except Exception as error:
print("Failed")
return InstallError(str(error), pkg_name, tmp_pkg_name,
frame=gfi(cf()),
errnum=errno.EIO,
msg="Failed to copy f5-sdk package!")
print("Success")
print("Compare structured pkg dependencies against what was built")
try:
reqs_from_pkg = read_pkg_reqs(tmp_pkg_name)
compare_reqs(reqs_from_pkg, requirements)
except InstallError as error:
print("Failed")
return error
# handle dependency installation:
print("Installing Dependencies:")
try:
handle_f5_dependencies(f5_reqs)
handle_other_dependencies(other_reqs)
except InstallError as error:
print("Failed")
return error
print("Installing Self - %s" % pkg_name)
try:
output, result = runCommand('rpm -i %s 2>&1' % tmp_pkg_name)
if not result == 0:
raise InstallError("Exit status was {}".format(result))
except InstallError as error:
print("Failed to get requirements for %s." % (pkg_name))
return error
print("Success")
def load_config(config_json):
"""load_config
This loads a dist_dir/scripts/config.JSON file that contains the appropriate
mappings required to build the packages and test them.
"""
data = None
try:
with open(config_json, 'r') as fh:
data = json.loads(fh.read())
except Exception as Error:
raise InstallError(str(Error), config_json, frame=gfi(cf()),
errno=errno.ESPIPE,
msg="Could not laod config.JSON")
return data
def get_args(argv):
"""get_args
Attempts to map out the input arguments into the anticipated format. If an
incorrect number of arguments are provided, or if the arguments provided do
not map out properly, then an InstallError will be raised.
"""
error = None
try:
working_dir, package_name = argv
except IndexError as error:
pass
except ValueError as error:
pass
except IndexError as error:
pass
if not os.path.isdir(working_dir) or not os.access(working_dir, os.R_OK):
error = InstallError(working_dir, errnum=errno.EIO, frame=gfi(cf()),
msg="Directory given cannot be accessed")
if error:
InstallError(str(error), msg="Improper list of input arguments",
frame=gfi(cf()))
usage()
sys.exit(errno.EINVAL)
return working_dir, package_name
def check_dist_dir(dist_dir):
"""check_dist_dir
This checks the given dist_dir for validity and raises if it is not valid.
"""
try:
dist_dir = glob.glob(dist_dir)[0]
except IndexError:
raise InstallError(dist_dir, frame=gfi(cf()), errnum=errno.ENOSYS,
msg="No dist dir found under the working")
return dist_dir
def main(args):
"""main
The entrypoint to the script. The script will exit with a non-zero value in
error. Any other value indicates that an issue occurred.
"""
error = None
working_dir, pkg_name = get_args(sys.argv[1:])
os.chdir("/var/wdir")
dist_dir = check_dist_dir(working_dir + "/*-dist")
try:
config = load_config(dist_dir + "/scripts/config.JSON")
except InstallError as error:
pass
# Get all files for the package.
error = fetch_pkg_dependencies(config, pkg_name) if not error else error
# Instal from the tmp directory.
if error:
sys.exit(error.errnum)
else:
# last attempt to detect an error:
cmd = "rpm -qa | grep {}".format(config['project'])
output, status = runCommand(cmd)
if status == 0:
print("Passed last check:\n{}".format(output))
sys.exit(0)
print("Failed last level of verification:\n{}".format(cmd))
sys.exit(29)
if __name__ == '__main__':
main(sys.argv)
# vim: set fileencoding=utf-8
|
|
import sys
import traceback
import datetime
import unittest
from tcmessages import TeamcityServiceMessages
from tcunittest import strclass
from tcunittest import TeamcityTestResult
try:
from nose.util import isclass # backwards compat
from nose.config import Config
from nose.result import TextTestResult
from nose import SkipTest
from nose.plugins.errorclass import ErrorClassPlugin
except (Exception, ):
e = sys.exc_info()[1]
raise NameError(
"Something went wrong, do you have nosetest installed? I got this error: %s" % e)
class TeamcityPlugin(ErrorClassPlugin, TextTestResult, TeamcityTestResult):
"""
TeamcityTest plugin for nose tests
"""
name = "TeamcityPlugin"
enabled = True
def __init__(self, stream=sys.stderr, descriptions=None, verbosity=1,
config=None, errorClasses=None):
super(TeamcityPlugin, self).__init__()
if errorClasses is None:
errorClasses = {}
self.errorClasses = errorClasses
if config is None:
config = Config()
self.config = config
self.output = stream
self.messages = TeamcityServiceMessages(self.output,
prepend_linebreak=True)
self.messages.testMatrixEntered()
self.current_suite = None
TextTestResult.__init__(self, stream, descriptions, verbosity, config,
errorClasses)
TeamcityTestResult.__init__(self, stream)
def configure(self, options, conf):
if not self.can_configure:
return
self.conf = conf
def addError(self, test, err):
exctype, value, tb = err
err = self.formatErr(err)
if exctype == SkipTest:
self.messages.testIgnored(self.getTestName(test), message='Skip')
else:
self.messages.testError(self.getTestName(test), message='Error', details=err, duration=self.__getDuration(test))
def formatErr(self, err):
exctype, value, tb = err
if isinstance(value, str):
try:
value = exctype(value)
except TypeError:
pass
return ''.join(traceback.format_exception(exctype, value, tb))
def is_gen(self, test):
if hasattr(test, "test") and hasattr(test.test, "descriptor"):
if test.test.descriptor is not None:
return True
return False
def getTestName(self, test):
if hasattr(test, "error_context"):
return test.error_context
test_name_full = str(test)
if self.is_gen(test):
return test_name_full
ind_1 = test_name_full.rfind('(')
if ind_1 != -1:
return test_name_full[:ind_1]
return test_name_full
def addFailure(self, test, err):
err = self.formatErr(err)
self.messages.testFailed(self.getTestName(test),
message='Failure', details=err)
def addSkip(self, test, reason):
self.messages.testIgnored(self.getTestName(test), message=reason)
def _getSuite(self, test):
if hasattr(test, "suite"):
suite = strclass(test.suite)
suite_location = test.suite.location
location = test.suite.abs_location
if hasattr(test, "lineno"):
location = location + ":" + str(test.lineno)
else:
location = location + ":" + str(test.test.lineno)
else:
suite = strclass(test.__class__)
suite_location = "python_nosetestid://" + suite
try:
from nose.util import func_lineno
if hasattr(test.test, "descriptor") and test.test.descriptor:
suite_location = "file://" + self.test_address(
test.test.descriptor)
location = suite_location + ":" + str(
func_lineno(test.test.descriptor))
else:
suite_location = "file://" + self.test_address(
test.test.test)
location = "file://" + self.test_address(
test.test.test) + ":" + str(func_lineno(test.test.test))
except:
test_id = test.id()
suite_id = test_id[:test_id.rfind(".")]
suite_location = "python_nosetestid://" + str(suite_id)
location = "python_nosetestid://" + str(test_id)
return (location, suite_location)
def test_address(self, test):
if hasattr(test, "address"):
return test.address()[0]
t = type(test)
file = None
import types, os
if (t == types.FunctionType or issubclass(t, type) or t == type
or isclass(test)):
module = getattr(test, '__module__', None)
if module is not None:
m = sys.modules[module]
file = getattr(m, '__file__', None)
if file is not None:
file = os.path.abspath(file)
if file.endswith("pyc"):
file = file[:-1]
return file
raise TypeError("I don't know what %s is (%s)" % (test, t))
def getSuiteName(self, test):
test_name_full = str(test)
ind_1 = test_name_full.rfind('(')
if self.is_gen(test) and ind_1 != -1:
ind = test_name_full[:ind_1].rfind('.')
if ind != -1:
return test_name_full[:ind]
if ind_1 != -1:
return test_name_full[ind_1 + 1: -1]
ind = test_name_full.rfind('.')
if ind != -1:
return test_name_full[:test_name_full.rfind(".")]
return test_name_full
def startTest(self, test):
location, suite_location = self._getSuite(test)
suite = self.getSuiteName(test)
if suite != self.current_suite:
if self.current_suite:
self.messages.testSuiteFinished(self.current_suite)
self.current_suite = suite
self.messages.testSuiteStarted(self.current_suite,
location=suite_location)
setattr(test, "startTime", datetime.datetime.now())
self.messages.testStarted(self.getTestName(test), location=location)
def stopTest(self, test):
duration = self.__getDuration(test)
self.messages.testFinished(self.getTestName(test),
duration=int(duration))
def __getDuration(self, test):
start = getattr(test, "startTime", datetime.datetime.now())
d = datetime.datetime.now() - start
duration = d.microseconds / 1000 + d.seconds * 1000 + d.days * 86400000
return duration
def finalize(self, result):
if self.current_suite:
self.messages.testSuiteFinished(self.current_suite)
self.current_suite = None
class TeamcityNoseRunner(unittest.TextTestRunner):
"""Test runner that supports teamcity output
"""
def __init__(self, stream=sys.stdout, descriptions=1, verbosity=1,
config=None):
if config is None:
config = Config()
self.config = config
unittest.TextTestRunner.__init__(self, stream, descriptions, verbosity)
def _makeResult(self):
return TeamcityPlugin(self.stream,
self.descriptions,
self.verbosity,
self.config)
def run(self, test):
"""Overrides to provide plugin hooks and defer all output to
the test result class.
"""
#for 2.5 compat
plugins = self.config.plugins
plugins.configure(self.config.options, self.config)
plugins.begin()
wrapper = plugins.prepareTest(test)
if wrapper is not None:
test = wrapper
# plugins can decorate or capture the output stream
wrapped = self.config.plugins.setOutputStream(self.stream)
if wrapped is not None:
self.stream = wrapped
result = self._makeResult()
test(result)
result.endLastSuite()
plugins.finalize(result)
return result
|
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# snakemakelib documentation build configuration file, created by
# sphinx-quickstart on Fri May 29 14:05:01 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import re
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
#'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'snakemakelib'
copyright = '2015, Per Unneberg'
author = 'Per Unneberg'
# Get the snakemakelib version from versioneer
from snakemakelib import __version__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
if version.endswith("dirty"):
version = re.sub("\+[0-9]+\.[a-z0-9]+\.dirty", "", version)
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
#'navbar_title': "Demo",
# Tab name for entire site. (Default: "Site")
'navbar_site_name': "Site",
# A list of tuples containing pages or urls to link to.
# Valid tuples should be in the following forms:
# (name, page) # a link to a page
# (name, "/aa/bb", 1) # a link to an arbitrary relative url
# (name, "http://example.com", True) # arbitrary absolute url
# Note the "1" or "True" value above as the third argument to indicate
# an arbitrary url.
'navbar_links': [
("Workflows", "docs/workflows"),
],
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': False,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': True,
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 3,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar navbar-inverse",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "footer",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing with "" (default) or the name of a valid theme
# such as "amelia" or "cosmo".
#'bootswatch_theme': "united",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
standard_sidebars = ['sidebartoc.html', 'sourcelink.html', 'searchbox.html']
html_sidebars = {
'*': standard_sidebars,
'docs/*': standard_sidebars,
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'snakemakelibdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'snakemakelib.tex', 'snakemakelib Documentation',
'Per Unneberg', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'snakemakelib', 'snakemakelib Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'snakemakelib', 'snakemakelib Documentation',
author, 'snakemakelib', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'https://docs.python.org/3.4': None,
'http://python.readthedocs.org/en/latest/' : None,
}
|
|
# =============================================================================
# HEPHAESTUS VALIDATION 4 - MESHER AND CROSS-SECTIONAL ANALYSIS
# =============================================================================
# IMPORTS:
import sys
import os
sys.path.append(os.path.abspath('..\..'))
from AeroComBAT.Structures import MaterialLib, Laminate, XSect
from AeroComBAT.Aerodynamics import Airfoil
import numpy as np
# HODGES XSECTION VALIDATION
# Add the material property
matLib = MaterialLib()
matLib.addMat(1,'AS43501-6','trans_iso',[20.6e6,1.42e6,.3,.34,.87e6,0.00177],0.005)
matLib.addMat(2,'AS43501-6*','trans_iso',[20.6e6,1.42e6,.34,.42,.87e6,0.00177],0.005)
matLib.addMat(3,'AL','iso',[71.7e9,.33,2810],.005)
# Box Configuration 2
c2 = 0.53
xdim2 = [-0.8990566037735849,0.8990566037735849]
af2 = Airfoil(c2,name='box')
force = np.array([100,100,10000,-400.89,400.89,0.])
#force = np.array([0,0,0,0,0,50])
'''
# B1 Box beam (0.5 x 0.923 in^2 box with laminate schedule [15]_6)
n_i_B1 = [6]
m_i_B1 = [1]
th_B1 = [-15]
lam1_B1 = Laminate(n_i_B1, m_i_B1, matLib, th=th_B1)
lam2_B1 = Laminate(n_i_B1, m_i_B1, matLib, th=th_B1)
lam3_B1 = Laminate(n_i_B1, m_i_B1, matLib, th=th_B1)
lam4_B1 = Laminate(n_i_B1, m_i_B1, matLib, th=th_B1)
lam1_B1.printSummary()
laminates_B1 = [lam1_B1,lam2_B1,lam3_B1,lam4_B1]
xsect_B1 = XSect(1,af2,xdim2,laminates_B1,matLib,typeXSect='rectBox',meshSize=1)
xsect_B1.xSectionAnalysis()
xsect_B1.printSummary(stiffMat=True)
xsect_B1.calcWarpEffects(force=force)
xsect_B1.plotWarped(figName='Validation Case B1',warpScale=10,contLim=[0,500000])
K = xsect_B1.K_raw
'''
'''
# Layup 1 Box beam (0.5 x 0.923 in^2 box with laminate schedule [0]_6)
n_i_Lay1 = [6]
m_i_Lay1 = [2]
th_Lay1 = [0]
lam1_Lay1 = Laminate(n_i_Lay1, m_i_Lay1, matLib, th=th_Lay1)
lam2_Lay1 = Laminate(n_i_Lay1, m_i_Lay1, matLib, th=th_Lay1)
lam3_Lay1 = Laminate(n_i_Lay1, m_i_Lay1, matLib, th=th_Lay1)
lam4_Lay1 = Laminate(n_i_Lay1, m_i_Lay1, matLib, th=th_Lay1)
lam1_Lay1.printSummary()
laminates_Lay1 = [lam1_Lay1,lam2_Lay1,lam3_Lay1,lam4_Lay1]
xsect_Lay1 = XSect(2,af2,xdim2,laminates_Lay1,matLib,typeXsect='box',meshSize=3)
xsect_Lay1.xSectionAnalysis()
xsect_Lay1.printSummary(stiffMat=True)
xsect_Lay1.calcWarpEffects(force=force)
xsect_Lay1.plotWarped(figName='Validation Case L1',warpScale=10,contLim=[0,500000])
# Layup 2 Box beam (0.5 x 0.923 in^2 box with laminate schedule [30,0]_3)
n_i_Lay2 = [1,1,1,1,1,1]
m_i_Lay2 = [2,2,2,2,2,2]
th_Lay2 = [-30,0,-30,0,-30,0]
lam1_Lay2 = Laminate(n_i_Lay2, m_i_Lay2, matLib, th=th_Lay2)
lam2_Lay2 = Laminate(n_i_Lay2, m_i_Lay2, matLib, th=th_Lay2)
lam3_Lay2 = Laminate(n_i_Lay2, m_i_Lay2, matLib, th=th_Lay2)
lam4_Lay2 = Laminate(n_i_Lay2, m_i_Lay2, matLib, th=th_Lay2)
lam1_Lay2.printSummary()
laminates_Lay2 = [lam1_Lay2,lam2_Lay2,lam3_Lay2,lam4_Lay2]
xsect_Lay2 = XSect(3,af2,xdim2,laminates_Lay2,matLib,typeXsect='box',meshSize=3)
xsect_Lay2.xSectionAnalysis(ref_ax='origin')
xsect_Lay2.printSummary(stiffMat=True)
xsect_Lay2.calcWarpEffects(force=force)
xsect_Lay2.plotWarped(figName='Validation Case L2',warpScale=10,contLim=[0,500000])
'''
'''
# Layup 3 Box beam (0.5 x 0.923 in^2 box with laminate schedule [30,0]_3)
n_i_1 = [1,1,1,1,1,1]
m_i_1 = [2,2,2,2,2,2]
th_1 = [-15,-15,-15,-15,-15,-15]
lam1 = Laminate(n_i_1, m_i_1, matLib, th=th_1)
n_i_2 = [1,1,1,1,1,1]
m_i_2 = [2,2,2,2,2,2]
th_2 = [-15,15,-15,15,-15,15]
lam2 = Laminate(n_i_2, m_i_2, matLib, th=th_2)
n_i_3 = [1,1,1,1,1,1]
m_i_3 = [2,2,2,2,2,2]
th_3 = [15,15,15,15,15,15]
lam3 = Laminate(n_i_3, m_i_3, matLib, th=th_3)
n_i_4 = [1,1,1,1,1,1]
m_i_4 = [2,2,2,2,2,2]
th_4 = [-15,15,-15,15,-15,15]
lam4 = Laminate(n_i_4, m_i_4, matLib, th=th_4)
'''
# AL Box Beam
n_i_1 = [1,1,1,1,1,1]
m_i_1 = [2,2,2,2,2,2]
th_1 = [-15,-15,-15,-15,-15,-15]
lam1 = Laminate(n_i_1, m_i_1, matLib, th=th_1)
n_i_2 = [1,1,1,1,1,1]
m_i_2 = [2,2,2,2,2,2]
th_2 = [15,-15,15,-15,15,-15]
lam2 = Laminate(n_i_2, m_i_2, matLib, th=th_2)
n_i_3 = [1,1,1,1,1,1]
m_i_3 = [2,2,2,2,2,2]
th_3 = [15,15,15,15,15,15]
lam3 = Laminate(n_i_3, m_i_3, matLib, th=th_3)
n_i_4 = [1,1,1,1,1,1]
m_i_4 = [2,2,2,2,2,2]
th_4 = [-15,15,-15,15,-15,15]
lam4 = Laminate(n_i_4, m_i_4, matLib, th=th_4)
'''
laminates_Lay3 = [lam1,lam2,lam3,lam4]
xsect_Lay3 = XSect(4,af2,xdim2,laminates_Lay3,matLib,typeXsect='box',meshSize=1)
xsect_Lay3.xSectionAnalysis()
K = xsect_Lay3.K_raw
force = np.array([1,0,0,0,0,0.])
xsect_Lay3.calcWarpEffects(force=force)
xsect_Lay3.plotWarped(figName='Warping Displacement Fx',warpScale=0,contour='none')
xsect_Lay3.plotWarped(figName='Warping Displacement Fx',warpScale=2e9,contLim=[0,1e-9])
force = np.array([0,1,0,0,0,0.])
xsect_Lay3.calcWarpEffects(force=force)
xsect_Lay3.plotWarped(figName='Warping Displacement Fy',warpScale=0,contour='none')
xsect_Lay3.plotWarped(figName='Warping Displacement Fy',warpScale=1e9,contLim=[0,1e-9])
force = np.array([0,0,1,0,0,0.])
xsect_Lay3.calcWarpEffects(force=force)
xsect_Lay3.plotWarped(figName='Warping Displacement Fz',warpScale=0,contour='none')
xsect_Lay3.plotWarped(figName='Warping Displacement Fz',warpScale=5e9,contLim=[0,1e-9])
force = np.array([0,0,0,1,0,0.])
xsect_Lay3.calcWarpEffects(force=force)
xsect_Lay3.plotWarped(figName='Warping Displacement Mx',warpScale=0,contour='none')
xsect_Lay3.plotWarped(figName='Warping Displacement Mx',warpScale=1e9,contLim=[0,1e-9])
force = np.array([0,0,0,0,1,0.])
xsect_Lay3.calcWarpEffects(force=force)
xsect_Lay3.plotWarped(figName='Warping Displacement My',warpScale=0,contour='none')
xsect_Lay3.plotWarped(figName='Warping Displacement My',warpScale=1e9,contLim=[0,1e-9])
force = np.array([0,0,0,0,0,1.])
xsect_Lay3.calcWarpEffects(force=force)
xsect_Lay3.plotWarped(figName='Warping Displacement Mz',warpScale=0,contour='none')
xsect_Lay3.plotWarped(figName='Warping Displacement Mz',warpScale=5e8,contLim=[0,1e-9])
'''
lam1.printSummary()
lam2.printSummary()
lam3.printSummary()
lam4.printSummary()
laminates_Lay3 = [lam1,lam2,lam3,lam4]
xsect_Lay3 = XSect(4,af2,xdim2,laminates_Lay3,matLib,typeXSect='rectBox',meshSize=1.87)
xsect_Lay3.xSectionAnalysis()
xsect_Lay3.printSummary(stiffMat=True)
xsect_Lay3.calcWarpEffects(force=force)
#import mayavi.mlab as mlab
#xsect_Lay3.plotWarped(figName='Stress sig_11',warpScale=1,contLim=[-500,500],contour='sig_11')
xsect_Lay3.plotWarped(figName='Stress sig_11',warpScale=0,contour='sig_11')
#xsect_Lay3.plotRigid(figName='Stress sig_11')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress sig_22',warpScale=1,contLim=[-5075,9365],contour='sig_22')
xsect_Lay3.plotWarped(figName='Stress sig_22',warpScale=0,contour='sig_22')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress sig_33',warpScale=1,contLim=[25762,293000],contour='sig_33')
xsect_Lay3.plotWarped(figName='Stress sig_33',warpScale=0,contour='sig_33')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress sig_12',warpScale=1,contLim=[-2932,3116],contour='sig_12')
xsect_Lay3.plotWarped(figName='Stress sig_12',warpScale=0,contour='sig_12')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress sig_13',warpScale=1,contLim=[-35359,33715],contour='sig_13')
xsect_Lay3.plotWarped(figName='Stress sig_13',warpScale=0,contour='sig_13')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress sig_23',warpScale=1,contLim=[-57921,78221],contour='sig_23')
xsect_Lay3.plotWarped(figName='Stress sig_23',warpScale=0,contour='sig_23')
#mlab.colorbar()
# Import EID Mesh CSV files to do element mapping
lam1AeroComBATEIDmesh = np.genfromtxt('lam1AeroComBATEIDmesh.csv', delimiter=',',dtype=int)
lam1NASTRANEIDmesh = np.genfromtxt('lam1NASTRANEIDmesh.csv', delimiter=',',dtype=int)
lam2AeroComBATEIDmesh = np.genfromtxt('lam2AeroComBATEIDmesh.csv', delimiter=',',dtype=int)
lam2NASTRANEIDmesh = np.genfromtxt('lam2NASTRANEIDmesh.csv', delimiter=',',dtype=int)
lam3AeroComBATEIDmesh = np.genfromtxt('lam3AeroComBATEIDmesh.csv', delimiter=',',dtype=int)
lam3NASTRANEIDmesh = np.genfromtxt('lam3NASTRANEIDmesh.csv', delimiter=',',dtype=int)
lam4AeroComBATEIDmesh = np.genfromtxt('lam4AeroComBATEIDmesh.csv', delimiter=',',dtype=int)
lam4NASTRANEIDmesh = np.genfromtxt('lam4NASTRANEIDmesh.csv', delimiter=',',dtype=int)
# Make mapping
NAST_2_Aero_EID = {}
for i in range(0,np.size(lam1AeroComBATEIDmesh,axis=0)):
for j in range(0,np.size(lam1AeroComBATEIDmesh,axis=1)):
NAST_2_Aero_EID[lam1NASTRANEIDmesh[i,j]]=lam1AeroComBATEIDmesh[i,j]
for i in range(0,np.size(lam2AeroComBATEIDmesh,axis=0)):
for j in range(0,np.size(lam2AeroComBATEIDmesh,axis=1)):
NAST_2_Aero_EID[lam2NASTRANEIDmesh[i,j]]=lam2AeroComBATEIDmesh[i,j]
for i in range(0,np.size(lam3AeroComBATEIDmesh,axis=0)):
for j in range(0,np.size(lam3AeroComBATEIDmesh,axis=1)):
NAST_2_Aero_EID[lam3NASTRANEIDmesh[i,j]]=lam3AeroComBATEIDmesh[i,j]
for i in range(0,np.size(lam4AeroComBATEIDmesh,axis=0)):
for j in range(0,np.size(lam4AeroComBATEIDmesh,axis=1)):
NAST_2_Aero_EID[lam4NASTRANEIDmesh[i,j]]=lam4AeroComBATEIDmesh[i,j]
# Import NASTRAN Stress Data:
NASTRANStress = np.genfromtxt('NASTRANStress.csv', delimiter=',')
# Determine L2 Norms
sigxx_L2 = max(abs(NASTRANStress[:,1]))
sigyy_L2 = max(abs(NASTRANStress[:,2]))
sigzz_L2 = max(abs(NASTRANStress[:,3]))
sigxy_L2 = max(abs(NASTRANStress[:,4]))
sigxz_L2 = max(abs(NASTRANStress[:,6]))
sigyz_L2 = max(abs(NASTRANStress[:,5]))
for i in range(0,np.size(NASTRANStress,axis=0)):
AeroEID = NAST_2_Aero_EID[int(NASTRANStress[i,0])]
tmpElem = xsect_Lay3.elemDict[AeroEID]
sigxx = NASTRANStress[i,1]
sigyy = NASTRANStress[i,2]
sigzz = NASTRANStress[i,3]
sigxy = NASTRANStress[i,4]
sigxz = NASTRANStress[i,6]
sigyz = NASTRANStress[i,5]
tmpSigxx = np.mean(tmpElem.Sig[0,:])
tmpSigyy = np.mean(tmpElem.Sig[1,:])
tmpSigxy = np.mean(tmpElem.Sig[2,:])
tmpSigxz = np.mean(tmpElem.Sig[3,:])
tmpSigyz = np.mean(tmpElem.Sig[4,:])
tmpSigzz = np.mean(tmpElem.Sig[5,:])
SigxxError = (sigxx-tmpSigxx)/sigxx_L2*100
SigyyError = (sigyy-tmpSigyy)/sigyy_L2*100
SigzzError = (sigzz-tmpSigzz)/sigzz_L2*100
SigxyError = (sigxy-tmpSigxy)/sigxy_L2*100
SigxzError = (sigxz-tmpSigxz)/sigxz_L2*100
SigyzError = (sigyz-tmpSigyz)/sigyz_L2*100
tmpElem.Sig[0,:] = np.ones((1,4))*SigxxError
tmpElem.Sig[1,:] = np.ones((1,4))*SigyyError
tmpElem.Sig[2,:] = np.ones((1,4))*SigxyError
tmpElem.Sig[3,:] = np.ones((1,4))*SigxzError
tmpElem.Sig[4,:] = np.ones((1,4))*SigyzError
tmpElem.Sig[5,:] = np.ones((1,4))*SigzzError
#xsect_Lay3.plotWarped(figName='Stress Validation sig_11',warpScale=1,contLim=[-15,15],contour='sig_11')
xsect_Lay3.plotWarped(figName='Stress Validation sig_11',warpScale=0,contour='sig_11')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress Validation sig_22',warpScale=1,contLim=[-15,15],contour='sig_22')
xsect_Lay3.plotWarped(figName='Stress Validation sig_22',warpScale=0,contour='sig_22')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress Validation sig_33',warpScale=1,contLim=[-2,2],contour='sig_33')
xsect_Lay3.plotWarped(figName='Stress Validation sig_33',warpScale=0,contour='sig_33')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress Validation sig_12',warpScale=1,contLim=[-15,15],contour='sig_12')
xsect_Lay3.plotWarped(figName='Stress Validation sig_12',warpScale=0,contour='sig_12')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress Validation sig_13',warpScale=1,contLim=[-5,5],contour='sig_13')
xsect_Lay3.plotWarped(figName='Stress Validation sig_13',warpScale=0,contour='sig_13')
#mlab.colorbar()
#xsect_Lay3.plotWarped(figName='Stress Validation sig_23',warpScale=1,contLim=[-2,2],contour='sig_23')
xsect_Lay3.plotWarped(figName='Stress Validation sig_23',warpScale=0,contour='sig_23')
#mlab.colorbar()
'''
from AeroComBAT.AircraftParts import Wing
from AeroComBAT.FEM import Model
croot = 0.53
ctip = 0.53
x1 = -0.8990566037735849
x2 = 0.8990566037735849
n_ply = n_i_1+n_i_2+n_i_3+n_i_4
m_ply = m_i_1+m_i_2+m_i_3+m_i_4
th_ply = th_1+th_2+th_3+th_4
p1 = np.array([0.,0.,0.])
p2 = np.array([0.,0.,8.05])
Y_rib = np.linspace(0.,1.,2)
noe_dens = 31
chordVec=np.array([1.,0.,0.])
wing1 = Wing(1,p1,p2,croot,ctip,x1,x2,Y_rib,n_ply,m_ply,matLib,name='box',\
noe=noe_dens,chordVec=chordVec,ref_ax='origin',th_ply=th_ply,typeXSect='rectBox',n_orients=6)
sbeam1 = wing1.wingSects[0].SuperBeams[0]
# Make a FEM model
model = Model()
model.addAircraftParts([wing1])
model.plotRigidModel(numXSects=10)
# Apply the constraint for the model
model.applyConstraints(0,'fix')
# CASE 1:
# Apply the case load
model.resetPointLoads()
tipLoad = np.array([100.,100.,10000.,0.,0.,0.])
F = {249:tipLoad}
model.applyLoads(1,F=F)
# Run the analysis
model.staticAnalysis(1)
model.plotDeformedModel(figName='V8 Case 1',numXSects=10,contLim=[0,293000],\
warpScale=10,displScale=2,contour='sig_33')
# Composite Normal Mode Analysis
model.normalModesAnalysis()
freqs = model.freqs
model.plotDeformedModel(figName='Normal Mode 1',numXSects=10,contLim=[0,293000],\
warpScale=25,displScale=10,contour='none',mode=1)
model.plotDeformedModel(figName='normalMode 2',numXSects=10,contLim=[0,293000],\
warpScale=25,displScale=10,contour='none',mode=2)
model.plotDeformedModel(figName='normalMode 3',numXSects=10,contLim=[0,293000],\
warpScale=25,displScale=10,contour='none',mode=3)
model.plotDeformedModel(figName='normalMode 4',numXSects=10,contLim=[0,293000],\
warpScale=25,displScale=10,contour='none',mode=4)
model.plotDeformedModel(figName='normalMode 5',numXSects=10,contLim=[0,293000],\
warpScale=25,displScale=10,contour='none',mode=5)
# Composite Deflections error
# Import NASTRAN Nodal Displacements
Layup3NastranNodalDisp = np.genfromtxt('Layup3NASTRANNodalDisplConstEnds.csv', delimiter=',')
# Create blank dictionary for displacements
T1 = {}
T2 = {}
T3 = {}
R1 = {}
R2 = {}
R3 = {}
for i in range(1,np.size(Layup3NastranNodalDisp,axis=0)):
currentZ = Layup3NastranNodalDisp[i,3]
if T1.has_key(currentZ):
T1[currentZ] += [Layup3NastranNodalDisp[i,4]]
T2[currentZ] += [Layup3NastranNodalDisp[i,5]]
T3[currentZ] += [Layup3NastranNodalDisp[i,6]]
else:
T1[currentZ] = [Layup3NastranNodalDisp[i,4]]
T2[currentZ] = [Layup3NastranNodalDisp[i,5]]
T3[currentZ] = [Layup3NastranNodalDisp[i,6]]
# Calculate the average displacements
zposvec = T1.keys()
T1avg = []
T2avg = []
T3avg = []
for zpos, disp in T1.iteritems():
T1avg += [np.mean(disp)]
for zpos, disp in T2.iteritems():
T2avg += [np.mean(disp)]
for zpos, disp in T3.iteritems():
T3avg += [np.mean(disp)]
zposvec,T1avg,T2avg,T3avg = zip(*sorted(zip(zposvec,T1avg,T2avg,T3avg)))
# Calculate the average rotations
for i in range(1,np.size(Layup3NastranNodalDisp,axis=0)):
currentZ = Layup3NastranNodalDisp[i,3]
zind = zposvec.index(currentZ)
xold = Layup3NastranNodalDisp[i,1]
yold = Layup3NastranNodalDisp[i,2]
if R1.has_key(currentZ):
if not abs(yold)<1e-6:
R1[currentZ] += [np.arctan(Layup3NastranNodalDisp[i,6]/yold)]
if not abs(xold)<1e-6:
R2[currentZ] += [-np.arctan(Layup3NastranNodalDisp[i,6]/xold)]
xnew = xold+Layup3NastranNodalDisp[i,4]-T1avg[zind]
ynew = yold+Layup3NastranNodalDisp[i,5]-T2avg[zind]
R3[currentZ] += [np.arccos((xnew*xold+ynew*yold)/(np.linalg.norm([xnew,ynew])\
*np.linalg.norm([xold,yold])))]
else:
if not abs(yold)<1e-6:
R1[currentZ] = [np.arctan(Layup3NastranNodalDisp[i,6]/yold)]
if not abs(xold)<1e-6:
R2[currentZ] = [-np.arctan(Layup3NastranNodalDisp[i,6]/xold)]
xnew = xold+Layup3NastranNodalDisp[i,4]-T1avg[zind]
ynew = yold+Layup3NastranNodalDisp[i,5]-T2avg[zind]
R3[currentZ] = [np.arccos((xnew*xold+ynew*yold)/(np.linalg.norm([xnew,ynew])\
*np.linalg.norm([xold,yold])))]
zposvec = R1.keys()
R1avg = []
R2avg = []
R3avg = []
for zpos, disp in R1.iteritems():
R1avg += [np.mean(disp)]
for zpos, disp in R2.iteritems():
R2avg += [np.mean(disp)]
for zpos, disp in R3.iteritems():
R3avg += [np.mean(disp)]
zposvec,R1avg,R2avg,R3avg = zip(*sorted(zip(zposvec,R1avg,R2avg,R3avg)))
zposvec = np.array(zposvec)
T1avg = np.array(T1avg)
T2avg = np.array(T2avg)
T3avg = np.array(T3avg)
R1avg = np.array(R1avg)
R2avg = np.array(R2avg)
R3avg = np.array(R3avg)
AeroComBATData = sbeam1.writeDisplacements(Return=True)
ZposAero = AeroComBATData[:,3]
T1Aero = AeroComBATData[:,4]
T2Aero = AeroComBATData[:,5]
T3Aero = AeroComBATData[:,6]
R1Aero = AeroComBATData[:,7]
R2Aero = AeroComBATData[:,8]
R3Aero = AeroComBATData[:,9]
from matplotlib import pylab as pl
fig1 = pl.figure(1)
pl.hold(True)
pl.plot(ZposAero/8.05,T1Aero/8.05,'b-',label='AeroComBAT T1',linewidth=3)
pl.plot(ZposAero/8.05,T2Aero/8.05,'g-',label='AeroComBAT T2',linewidth=3)
pl.plot(ZposAero/8.05,T3Aero/8.05,'r-',label='AeroComBAT T3',linewidth=3)
pl.plot(zposvec/8.05,T1avg/8.05,'c--',label='NASTRAN T1',linewidth=3)
pl.plot(zposvec/8.05,T2avg/8.05,'m--',label='NASTRAN T2',linewidth=3)
pl.plot(zposvec/8.05,T3avg/8.05,'k--',label='NASTRAN T3',linewidth=3)
pl.grid(True)
pl.legend(loc=2,fontsize=10)
pl.title('Non-dimensional Beam Displacement')
pl.xlabel('Non-dimensional position along the beam')
pl.ylabel('Non-dimensional displacement')
pl.hold(False)
fig2 = pl.figure(2)
pl.hold(True)
pl.plot(ZposAero/8.05,R1Aero,'b-',label='AeroComBAT R1',linewidth=3)
pl.plot(ZposAero/8.05,R2Aero,'g-',label='AeroComBAT R2',linewidth=3)
pl.plot(ZposAero/8.05,R3Aero,'r-',label='AeroComBAT R3',linewidth=3)
pl.plot(zposvec/8.05,R1avg,'c--',label='NASTRAN R1',linewidth=3)
pl.plot(zposvec/8.05,R2avg,'m--',label='NASTRAN R2',linewidth=3)
pl.plot(zposvec/8.05,R3avg,'k--',label='NASTRAN R3',linewidth=3)
pl.grid(True)
pl.legend(loc=2,fontsize=10)
pl.title('Beam Rotation')
pl.xlabel('Non-dimensional position along the beam')
pl.ylabel('Rotation, rad')
pl.hold(False)
'''
'''
def reorder(Q):
scram_vec = [0,1,5,2,4,3]
newMat = np.zeros((6,6))
for i in range(0,np.size(Q,axis=0)):
for j in range(0,np.size(Q,axis=1)):
newMat[i,j] = Q[scram_vec[i],scram_vec[j]]
return newMat
Q1 = xsect_Lay3.elemDict[0].Q
Q3 = xsect_Lay3.elemDict[1692].Q
Q2_p15 = xsect_Lay3.elemDict[1135].Q
Q2_m15 = xsect_Lay3.elemDict[1134].Q
Q4_p15 = xsect_Lay3.elemDict[2830].Q
Q4_m15 = xsect_Lay3.elemDict[2831].Q
Q1_Nast = reorder(Q1)
Q3_Nast = reorder(Q3)
Q2_p15_Nast = reorder(Q2_p15)
Q2_m15_Nast = reorder(Q2_m15)
Q4_p15_Nast = reorder(Q4_p15)
Q4_m15_Nast = reorder(Q4_m15)'''
# True scratch
from matplotlib import pylab as plt
x = np.linspace(0,1,7)
xsub = []
for i in range(0,len(x)-1):
xsub+=[np.linspace(x[i],x[i+1],30)]
plt.figure(1)
for i in range(0,len(x)-1):
plt.plot(xsub[i],xsub[i]**2)
plt.grid(True)
plt.xlabel('Non-dimensional length along the beam, z/L')
plt.ylabel('Non-dimensional displacement, u(z)/L')
plt.title('Displacement of a Cantilver Beam Under Pure Bending')
plt.figure(2)
for i in range(0,len(x)-1):
plt.plot(xsub[i],2*xsub[0]-max(xsub[0]))
plt.grid(True)
plt.ylim([-1,1])
plt.xlabel('Non-dimensional length along the beam, z/L')
plt.ylabel('Non-dimensional Generalized Beam Shear Strain, Psi(z)/L')
plt.title('Shear Strain of a Cantilver Beam Under Pure Bending')
|
|
##########################################################################
#
# Copyright (c) 2013-2015, Image Engine Design Inc. All rights reserved.
# Copyright (c) 2015, Nvizible Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import os
import imath
import IECore
import Gaffer
import GafferTest
import GafferImage
import GafferImageTest
class CropTest( GafferImageTest.ImageTestCase ) :
imageFileUndersizeDataWindow = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/blueWithDataWindow.100x100.exr" )
imageFileOversizeDataWindow = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checkerWithNegWindows.200x150.exr" )
representativeDeepImagePath = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/representativeDeepImage.exr" )
def testDefaultState( self ) :
crop = GafferImage.Crop()
self.assertEqual( crop["areaSource"].getValue(), GafferImage.Crop.AreaSource.Area )
self.assertTrue( crop["area"].getValue().isEmpty() )
self.assertEqual( crop["affectDataWindow"].getValue(), True )
self.assertEqual( crop["affectDisplayWindow"].getValue(), True )
def testCompatibility( self ) :
self.assertEqual( GafferImage.Crop.AreaSource.Custom, GafferImage.Crop.AreaSource.Area )
def testPassThrough( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.imageFileUndersizeDataWindow )
crop = GafferImage.Crop()
crop["in"].setInput(i["out"])
crop["areaSource"].setValue( GafferImage.Crop.AreaSource.Area )
crop["area"].setValue( imath.Box2i( imath.V2i( 40 ), imath.V2i( 50 ) ) )
crop["affectDataWindow"].setValue( True )
crop["affectDisplayWindow"].setValue( True )
crop["resetOrigin"].setValue( False )
self.assertEqual(i['out'].channelDataHash( "R", imath.V2i( 0 ) ), crop['out'].channelDataHash( "R", imath.V2i( 0 ) ) )
self.assertEqual(i['out'].channelDataHash( "G", imath.V2i( 0 ) ), crop['out'].channelDataHash( "G", imath.V2i( 0 ) ) )
self.assertEqual(i['out'].channelDataHash( "B", imath.V2i( 0 ) ), crop['out'].channelDataHash( "B", imath.V2i( 0 ) ) )
self.assertEqual( i["out"]["metadata"].hash(), crop["out"]["metadata"].hash() )
self.assertEqual( i["out"]["channelNames"].hash(), crop["out"]["channelNames"].hash() )
self.assertNotEqual( i["out"]["format"].hash(), crop["out"]["format"].hash() )
self.assertNotEqual( i["out"]["dataWindow"].hash(), crop["out"]["dataWindow"].hash() )
self.assertEqual( i["out"]["metadata"].getValue(), crop["out"]["metadata"].getValue() )
self.assertEqual( i["out"]["channelNames"].getValue(), crop["out"]["channelNames"].getValue() )
self.assertNotEqual( i["out"]["format"].getValue(), crop["out"]["format"].getValue() )
self.assertNotEqual( i["out"]["dataWindow"].getValue(), crop["out"]["dataWindow"].getValue() )
def testEnableBehaviour( self ) :
crop = GafferImage.Crop()
self.assertTrue( crop.enabledPlug().isSame( crop["enabled"] ) )
self.assertTrue( crop.correspondingInput( crop["out"] ).isSame( crop["in"] ) )
self.assertEqual( crop.correspondingInput( crop["in"] ), None )
self.assertEqual( crop.correspondingInput( crop["enabled"] ), None )
def testAreaFormat( self ) :
constant = GafferImage.Constant()
constant['format'].setValue( GafferImage.Format( 1024, 576 ) )
crop1 = GafferImage.Crop()
crop1['in'].setInput( constant['out'] )
crop1['areaSource'].setValue( GafferImage.Crop.AreaSource.Format )
crop1['format'].setValue( GafferImage.Format( 2048, 1152 ) )
crop2 = GafferImage.Crop()
crop2['in'].setInput( constant['out'] )
crop2['areaSource'].setValue( GafferImage.Crop.AreaSource.Area )
crop2['area'].setValue( imath.Box2i( imath.V2i( 0, 0 ), imath.V2i( 2048, 1152 ) ) )
self.assertEqual( crop1['out']['dataWindow'].getValue(), crop2['out']['dataWindow'].getValue() )
crop1['formatCenter'].setValue( True )
crop2['area'].setValue( imath.Box2i( imath.V2i( -512, -288 ), imath.V2i( 1536, 864 ) ) )
crop2['resetOrigin'].setValue( True )
self.assertEqual( crop1['out']['dataWindow'].getValue(), crop2['out']['dataWindow'].getValue() )
def testAffectDataWindow( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.imageFileUndersizeDataWindow )
crop = GafferImage.Crop()
crop["in"].setInput(i["out"])
crop["areaSource"].setValue( GafferImage.Crop.AreaSource.Area )
crop["area"].setValue( imath.Box2i( imath.V2i( 40 ), imath.V2i( 50 ) ) )
crop["affectDataWindow"].setValue( True )
crop["affectDisplayWindow"].setValue( False )
self.assertEqual( crop["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( 40 ), imath.V2i( 50 ) ) )
self.assertEqual( i["out"]["format"].getValue(), crop["out"]["format"].getValue() )
def testAffectDisplayWindow( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.imageFileUndersizeDataWindow )
crop = GafferImage.Crop()
crop["in"].setInput(i["out"])
crop["areaSource"].setValue( GafferImage.Crop.AreaSource.Area )
crop["area"].setValue( imath.Box2i( imath.V2i( 40 ), imath.V2i( 50 ) ) )
crop["affectDataWindow"].setValue( False )
crop["affectDisplayWindow"].setValue( True )
crop["resetOrigin"].setValue( False )
self.assertEqual( crop["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 40 ), imath.V2i( 50 ) ) )
self.assertEqual( i["out"]["dataWindow"].getValue(), crop["out"]["dataWindow"].getValue() )
crop["resetOrigin"].setValue( True )
self.assertEqual( crop["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 0 ), imath.V2i( 10 ) ) )
self.assertEqual( crop["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( -10 ), imath.V2i( 40 ) ) )
def testIntersectDataWindow( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.imageFileUndersizeDataWindow )
crop = GafferImage.Crop()
crop["in"].setInput(i["out"])
crop["areaSource"].setValue( GafferImage.Crop.AreaSource.Area )
crop["area"].setValue( imath.Box2i( imath.V2i( 0 ), imath.V2i( 50 ) ) )
crop["affectDataWindow"].setValue( True )
crop["affectDisplayWindow"].setValue( False )
self.assertEqual( crop["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( 30 ), imath.V2i( 50 ) ) )
def testDataWindowToDisplayWindow( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.imageFileUndersizeDataWindow )
crop = GafferImage.Crop()
crop["in"].setInput(i["out"])
crop["areaSource"].setValue( GafferImage.Crop.AreaSource.DataWindow )
crop["affectDataWindow"].setValue( False )
crop["affectDisplayWindow"].setValue( True )
crop["resetOrigin"].setValue( False )
self.assertEqual( i["out"]["dataWindow"].getValue(), crop["out"]["format"].getValue().getDisplayWindow() )
self.assertEqual( crop["out"]["dataWindow"].getValue(), i["out"]["dataWindow"].getValue() )
crop["resetOrigin"].setValue( True )
self.assertEqual( crop["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 0 ), imath.V2i( 50 ) ) )
self.assertEqual( crop["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( 0 ), imath.V2i( 50 ) ) )
def testDisplayWindowToDataWindow( self ) :
i = GafferImage.ImageReader()
i["fileName"].setValue( self.imageFileOversizeDataWindow )
crop = GafferImage.Crop()
crop["in"].setInput(i["out"])
crop["areaSource"].setValue( GafferImage.Crop.AreaSource.DisplayWindow )
crop["affectDataWindow"].setValue( True )
crop["affectDisplayWindow"].setValue( False )
self.assertEqual( i["out"]["format"].getValue().getDisplayWindow(), crop["out"]["dataWindow"].getValue() )
def testAffects( self ) :
c = GafferImage.Crop()
self.assertEqual(
set( c.affects( c["affectDisplayWindow"] ) ),
{ c["out"]["format"], c["__offset"]["x"], c["__offset"]["y"] }
)
self.assertEqual(
set( c.affects( c["affectDataWindow"] ) ),
{ c["__cropDataWindow"] }
)
self.assertTrue( c["__cropDataWindow"] in set( c.affects( c["in"]["dataWindow"] ) ) )
self.assertTrue( c["out"]["format"] in set( c.affects( c["in"]["format"] ) ) )
def testResetOrigin( self ) :
constant = GafferImage.Constant()
constant["format"].setValue( GafferImage.Format( 100, 200, 1 ) )
crop = GafferImage.Crop()
crop["in"].setInput( constant["out"] )
self.assertEqual( crop["affectDisplayWindow"].getValue(), True )
self.assertEqual( crop["affectDataWindow"].getValue(), True )
self.assertEqual( crop["resetOrigin"].getValue(), True )
area = imath.Box2i( imath.V2i( 50 ), imath.V2i( 100, 190 ) )
crop["area"].setValue( area )
self.assertEqual( crop["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 0 ), area.size() ) )
self.assertEqual( crop["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( 0 ), area.size() ) )
crop["resetOrigin"].setValue( False )
self.assertEqual( crop["out"]["format"].getValue().getDisplayWindow(), area )
self.assertEqual( crop["out"]["dataWindow"].getValue(), area )
# If we're not affecting the display window, then the reset origin flag
# should be ignored.
crop["resetOrigin"].setValue( True )
crop["affectDisplayWindow"].setValue( False )
self.assertEqual( crop["out"]["format"].getValue(), crop["in"]["format"].getValue() )
self.assertEqual( crop["out"]["dataWindow"].getValue(), area )
# But if we are affecting the display window, and we are resetting the origin,
# the data window should be offset even if affectDataWindow is off.
crop["affectDisplayWindow"].setValue( True )
crop["affectDataWindow"].setValue( False )
self.assertEqual( crop["out"]["format"].getValue().getDisplayWindow(), imath.Box2i( imath.V2i( 0 ), area.size() ) )
self.assertEqual( crop["out"]["dataWindow"].getValue(), imath.Box2i( imath.V2i( -50 ), imath.V2i( 50, 150 ) ) )
def testEmptyInput( self ) :
crop = GafferImage.Crop()
crop["area"]["min"].setValue( imath.V2i( 20 ) )
self.assertTrue( GafferImage.BufferAlgo.empty( crop["out"]["dataWindow"].getValue() ) )
def testDeep( self ) :
representativeDeep = GafferImage.ImageReader()
representativeDeep["fileName"].setValue( self.representativeDeepImagePath )
deepCrop = GafferImage.Crop()
deepCrop["in"].setInput( representativeDeep["out"] )
postFlatten = GafferImage.DeepToFlat()
postFlatten["in"].setInput( deepCrop["out"] )
preFlatten = GafferImage.DeepToFlat()
preFlatten["in"].setInput( representativeDeep["out"] )
flatCrop = GafferImage.Crop()
flatCrop["in"].setInput( preFlatten["out"] )
dataWindow = representativeDeep["out"].dataWindow()
for affectDisplay in [ True, False ]:
for area in [
imath.Box2i( imath.V2i( 0, 0 ), imath.V2i( 150, 100 ) ),
imath.Box2i( imath.V2i( -10, -13 ), imath.V2i( 157, 103 ) ),
imath.Box2i( imath.V2i( 10, 13 ), imath.V2i( 143, 77 ) ),
imath.Box2i( imath.V2i( 37, 65 ), imath.V2i( 101, 67 ) ),
imath.Box2i( imath.V2i( 0, 0 ), imath.V2i( 149, 99 ) )
] :
deepCrop["area"].setValue( area )
flatCrop["area"].setValue( area )
self.assertImagesEqual( postFlatten["out"], flatCrop["out"] )
def testFormatAffectsOutput( self ) :
crop = GafferImage.Crop()
cs = GafferTest.CapturingSlot( crop.plugDirtiedSignal() )
crop["format"].setValue( GafferImage.Format( 100, 200 ) )
self.assertIn( crop["out"]["dataWindow"], { x[0] for x in cs } )
if __name__ == "__main__":
unittest.main()
|
|
#! /usr/bin/env python
"""
arast-client -- commandline client for Assembly RAST
"""
import argparse
import errno
import json
import logging
import os
import sys
import time
from ConfigParser import SafeConfigParser
from assembly import asmtypes
from assembly import auth
from assembly import client
from assembly import config as conf
from assembly import shock
from assembly import utils
from assembly import __version__
CLIENT_VERSION = __version__
CLIENT_NAME = 'CLI'
# Config precedence: command-line args > environment variables > config file
ARAST_URL = os.getenv('ARAST_URL') or conf.URL
ARAST_QUEUE = os.getenv('ARAST_QUEUE')
ARAST_AUTH_TOKEN = os.getenv('ARAST_AUTH_TOKEN') or os.getenv('KB_AUTH_TOKEN')
ARAST_AUTH_USER = os.getenv('ARAST_AUTH_USER') or os.getenv('KB_AUTH_USER_ID') or utils.parse_user_from_token(ARAST_AUTH_TOKEN)
ARAST_AUTH_SERVICE = os.getenv('ARAST_AUTH_SERVICE')
ARAST_ENVIRON = None
if os.getenv('KB_RUNNING_IN_IRIS'):
ARAST_ENVIRON = 'IRIS'
def get_parser():
parser = argparse.ArgumentParser(prog='arast', epilog='Use "arast command -h" for more information about a command.')
parser.add_argument('-s', dest='arast_url', help='arast server url')
parser.add_argument('-c', '--config', action="store", help='Specify config file')
parser.add_argument("-v", "--verbose", action="store_true", help="Verbose")
parser.add_argument('--version', action='version', version='AssemblyRAST Client ' + CLIENT_VERSION)
subparsers = parser.add_subparsers(dest='command', title='The commands are')
p_upload = subparsers.add_parser('upload', description='Upload a read set', help='Upload a read library or set of libraries, returns a data ID for future use')
p_run = subparsers.add_parser('run', description='Run an Assembly RAST job', help='run job')
p_stat = subparsers.add_parser('stat', description='Query status of running jobs', help='list jobs status')
p_avail = subparsers.add_parser('avail', description='List available AssemblyRAST modules', help='list available modules or recipes')
p_kill = subparsers.add_parser('kill', description='Send a kill signal to jobs', help='kill jobs')
p_get = subparsers.add_parser('get', description='Get result data', help='Get data')
p_login = subparsers.add_parser('login', description='Force log in', help='log in')
p_logout = subparsers.add_parser('logout', description='Log out', help='log out')
# upload options
p_upload.add_argument("-f", action="append", dest="single", nargs='*', help="specify sequence file(s)")
p_upload.add_argument("--pair", action="append", dest="pair", nargs='*', help="Specify a paired-end library and parameters")
p_upload.add_argument("--pair_url", action="append", dest="pair_url", nargs='*', help="Specify URLs for a paired-end library and parameters")
p_upload.add_argument("--single", action="append", dest="single", nargs='*', help="Specify a single end file and parameters")
p_upload.add_argument("--single_url", action="append", dest="single_url", nargs='*', help="Specify a URL for a single end file and parameters")
p_upload.add_argument("--reference", action="append", dest="reference", nargs='*', help="specify a reference contig file")
p_upload.add_argument("--reference_url", action="append", dest="reference_url", nargs='*', help="Specify a URL for a reference contig file and parameters")
p_upload.add_argument("--contigs", action="append", dest="contigs", nargs='*', help="specify a contig file")
p_upload.add_argument("-m", "--message", action="store", dest="message", help="Attach a description to job")
p_upload.add_argument("--curl", action="store_true", help="Use curl for http requests")
p_upload.add_argument("--json", action="store_true", help="Print data info json object")
# run options
p_run.add_argument("-f", action="append", dest="single", nargs='*', help="specify sequence file(s)")
p_run.add_argument("-m", "--message", action="store", dest="message", help="Attach a description to job")
p_run.add_argument("-q", "--queue", action="store", dest="queue", help=argparse.SUPPRESS)
p_run.add_argument("--pair", action="append", dest="pair", nargs='*', help="Specify a paired-end library and parameters")
p_run.add_argument("--pair_url", action="append", dest="pair_url", nargs='*', help="Specify URLs for a paired-end library and parameters")
p_run.add_argument("--single", action="append", dest="single", nargs='*', help="Specify a single end file and parameters")
p_run.add_argument("--single_url", action="append", dest="single_url", nargs='*', help="Specify a URL for a single end file and parameters")
p_run.add_argument("--reference", action="append", dest="reference", nargs='*', help="specify sequence file(s)")
p_run.add_argument("--reference_url", action="append", dest="reference_url", nargs='*', help="Specify a URL for a reference contig file and parameters")
p_run.add_argument("--contigs", action="append", dest="contigs", nargs='*', help="specify a contig file")
p_run.add_argument("--curl", action="store_true", help="Use curl for http requests")
data_group = p_run.add_mutually_exclusive_group()
data_group.add_argument("--data", action="store", dest="data_id", help="Reuse uploaded data")
data_group.add_argument("--data-json", action="store", dest="data_json", help="Reuse uploaded data from a json object")
cmd_group = p_run.add_mutually_exclusive_group()
cmd_group.add_argument("-a", "--assemblers", action="store", dest="assemblers", nargs='*', help="specify assemblers to use. None will invoke automatic mode")
cmd_group.add_argument("-p", "--pipeline", action="append", dest="pipeline", nargs='*', help="invoke a pipeline. None will invoke automatic mode")
cmd_group.add_argument("-r", "--recipe", action="store", dest="recipe", nargs='*', help="invoke a recipe")
cmd_group.add_argument("-w", "--wasp", action="store", dest="wasp", nargs='*', help="invoke a wasp expression")
# stat options
p_stat.add_argument("-j", "--job", action="store", help="get status of specific job")
p_stat.add_argument("-w", "--watch", action="store_true", help="monitor in realtime")
p_stat.add_argument("-n", dest="stat_n", action="store", default=10, type=int, help="specify number of records to show")
p_stat.add_argument("-d", "--detail", action="store_true", help="show pipeline/recipe/wasp details in status table")
p_stat.add_argument("-l", "--list-data", action="store_true", dest="list_data", help="list data objects")
p_stat.add_argument("--data-json", action="store", dest="data_id", help="print json string for data object")
# avail options
p_avail.add_argument("-r", "--recipe", action="store_true", help="list recipes")
p_avail.add_argument("-d", "--detail", action="store_true", help="show module or recipe details")
# kill options
p_kill.add_argument("-j", "--job", action="store", help="kill specific job")
p_kill.add_argument("-a", "--all", action="store_true", help="kill all user jobs")
# get options
p_get.add_argument("-j", "--job", action="store", required=True, help="Specify which job data to get")
p_get.add_argument("-a", "--assembly", action="store", nargs='?', default=None, const=True, help="Download an assembly or assemblies")
p_get.add_argument("-p", "--pick", action="store", nargs='?', default=None, const=True, help="Print an assembly")
p_get.add_argument("-r", "--report", action="store_true", help="Print assembly stats report")
p_get.add_argument("-l", "--log", action="store_true", help="Print assembly job log")
p_get.add_argument("-o", "--outdir", action="store", help="Download to specified dir")
p_get.add_argument("-w", "--wait", action="store", nargs='?', const=True, help="Wait until job is done")
# login options
p_login.add_argument("--rast", action="store_true", help="Log in using RAST account")
return parser
def cmd_login(args):
auth_service = 'KBase'
try:
auth_service = conf.AUTH_SERVICE if conf.AUTH_SERVICE else auth_service
except AttributeError:
pass
if ARAST_AUTH_SERVICE:
auth_service = ARAST_AUTH_SERVICE
auth_service = 'RAST' if args.rast else auth_service
auth.authenticate(service=auth_service, save=True)
sys.stderr.write('[.] Logged in\n')
def cmd_logout(args):
auth.remove_stored_token()
sys.stderr.write('[.] Logged out\n')
def cmd_upload(args, aclient, usage, log=None):
data = prepare_assembly_data(args, aclient, usage)
arast_msg = {'assembly_data': data,
'client': CLIENT_NAME,
'version': CLIENT_VERSION}
payload = json.dumps(arast_msg, sort_keys=True)
if log:
log.debug(" [.] Sending upload message: %r" % (payload))
response = aclient.submit_data(payload)
arast_msg.update(json.loads(response))
if args.json:
print payload
else:
print 'Data ID: {}'.format(arast_msg['data_id'])
def cmd_run(args, aclient, usage, log=None):
if args.data_id:
data = None
elif args.data_json:
data = utils.load_json_from_file(args.data_json)
else:
data = prepare_assembly_data(args, aclient, usage)
if args.assemblers:
args.pipeline = [(" ".join(args.assemblers))]
options = vars(args)
options['client'] = CLIENT_NAME
options['version'] = CLIENT_VERSION
queue = args.queue or ARAST_QUEUE
if queue: options['queue'] = queue
keys = ['pipeline', 'recipe', 'wasp', 'message',
'data_id', 'queue', 'version', 'client']
arast_msg = dict((k, options[k]) for k in keys if k in options)
if data:
if 'file_sets' in data: #
arast_msg['assembly_data'] = data
elif 'assembly_data' in data: # from: --json data.json
arast_msg['assembly_data'] = data['assembly_data']
else:
arast_msg['kbase_assembly_input'] = data
payload = json.dumps(arast_msg, sort_keys=True)
if log:
log.debug(" [.] Sending run message: %r" % (payload))
response = aclient.submit_job(payload)
print 'Job ID: {}'.format(response)
def cmd_stat(args, aclient):
if args.list_data:
table = aclient.get_data_list_table(args.stat_n)
print table
sys.exit()
if args.data_id:
data_json = aclient.get_data_json(args.data_id)
print data_json
sys.exit()
while True:
response = aclient.get_job_status(args.stat_n, args.job, detail=args.detail)
if args.watch:
os.system('clear')
print response
if not args.watch:
break
else:
print 'Press CTRL-C to quit.'
### Spinner loop
spinners = ['-', '\\', '|', '/']
sleep_seconds = 25
spins_per_sec = 4
for i in range(sleep_seconds * spins_per_sec):
os.system('clear')
print('[{}] Assembly Service Status').format(spinners[i%4])
print response
print 'Press CTRL-C to quit.'
time.sleep(1.0/spins_per_sec)
def cmd_get(args, aclient):
aclient.validate_job(args.job)
if args.wait:
if type(args.wait) is str:
stat = aclient.wait_for_job(args.job, args.wait)
else:
stat = aclient.wait_for_job(args.job)
if 'FAIL' in stat:
print 'Job failed: ', stat
sys.exit()
else:
aclient.check_job(args.job)
if args.report:
report = aclient.get_job_report(args.job)
if report: print report
elif args.log:
joblog = aclient.get_job_log(args.job)
if joblog: print joblog
elif args.pick:
# the assembly ID can be supplied by either argument
asm1 = args.pick if type(args.pick) is str else None
asm2 = args.assembly if type(args.assembly) is str else None
# pick the best assembly by default
asm = asm1 or asm2 or 'auto'
aclient.get_assemblies(args.job, asm, stdout=True)
elif args.assembly:
# download all assemblies by default
asm = args.assembly if type(args.assembly) is str else None
aclient.get_assemblies(args.job, asm, outdir=args.outdir)
else:
aclient.get_job_data(job_id=args.job, outdir=args.outdir)
def cmd_avail(args, aclient):
if args.recipe:
recipes = json.loads(aclient.get_available_recipes())
client.print_recipes(recipes, args.detail)
else:
mods = json.loads(aclient.get_available_modules())
mods = sorted(mods, key=lambda mod: mod['module'])
client.print_modules(mods, args.detail)
def prepare_assembly_data(args, aclient, usage):
"""Parses args and uploads files
returns data spec for submission in run/upload commands"""
if not (args.pair or args.single or args.pair_url or args.single_url or args.contigs):
sys.exit(usage)
adata = client.AssemblyData()
curl = args.curl
res_ids = []
files = []
file_sizes = []
file_list = []
file_lists = []
all_lists = [args.pair, args.pair_url, args.single, args.single_url,
args.reference, args.reference_url, args.contigs]
all_types = ['paired', 'paired_url', 'single', 'single_url',
'reference', 'reference_url', 'contigs']
for li in all_lists:
if li is None:
file_lists.append([])
else:
file_lists.append(li)
seen = {}
for f_list in file_lists:
for ls in f_list:
for word in ls:
if '=' not in word:
if word in seen:
sys.exit('Input error: duplicated file: {}'.format(word))
else:
seen[word] = True
for f_list, f_type in zip(file_lists, all_types):
for ls in f_list:
f_infos = []
f_set_args = {}
for word in ls:
if '=' in word:
key, val = word.split('=')
f_set_args[key] = val
elif os.path.isfile(word):
f_info = aclient.upload_data_file_info(word, curl=curl)
f_infos.append(f_info)
elif f_type.endswith('_url'):
file_url = utils.verify_url(word)
f_info = asmtypes.FileInfo(direct_url=file_url)
f_infos.append(f_info)
else:
sys.exit('Invalid input: {}: {}'.format(f_type, word))
f_set = asmtypes.FileSet(f_type, f_infos, **f_set_args)
adata.add_set(f_set)
return adata
def run_command():
parser = get_parser()
args = parser.parse_args()
usage = parser.format_usage()
frmt = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(frmt)
clientlog = logging.getLogger('client')
clientlog.setLevel(logging.INFO)
clientlog.addHandler(sh)
if args.verbose:
clientlog.setLevel(logging.DEBUG)
clientlog.debug("Logger Debugging mode")
if args.command == 'login':
cmd_login(args)
sys.exit()
if args.command == 'logout':
cmd_logout(args)
sys.exit()
a_user, a_token = auth.verify_token(ARAST_AUTH_USER,ARAST_AUTH_TOKEN)
if not a_user or not a_token:
if ARAST_ENVIRON:
sys.exit('Please use the {} controls to authenticate'.format(ARAST_ENVIRON))
else:
sys.stderr.write('You can use the login/logout commands to authenticate\n')
a_user, a_token = auth.authenticate()
# main command options
a_url = args.arast_url or ARAST_URL
a_url = utils.verify_url(a_url)
logging.info('ARAST_URL: {}'.format(a_url))
aclient = client.Client(a_url, a_user, a_token)
if args.command == 'upload':
cmd_upload(args, aclient, usage, clientlog)
elif args.command == 'run':
cmd_run(args, aclient, usage, clientlog)
elif args.command == 'stat':
cmd_stat(args, aclient)
elif args.command == 'get':
cmd_get(args, aclient)
elif args.command == 'avail':
cmd_avail(args, aclient)
elif args.command == 'kill':
print aclient.kill_jobs(args.job)
def main():
try:
run_command()
except KeyboardInterrupt:
sys.exit()
except IOError as e:
if e.errno != errno.EPIPE:
raise
except auth.Error as e:
sys.exit('Authentication error: {}'.format(e))
except shock.Error as e:
sys.exit('Shock error: {}'.format(e))
except client.URLError as e:
sys.exit('Invalid URL: {}'.format(e))
except client.ConnectionError as e:
sys.exit('Connection error: {}'.format(e))
except client.HTTPError as e:
sys.exit('HTTP error: {}'.format(e))
except client.Error as e:
sys.exit('Error: {}'.format(e))
# print stack for unexpected errors
if __name__ == '__main__':
main()
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'PyConTalkProposal.extreme'
db.delete_column(u'pycon_pycontalkproposal', 'extreme')
# Adding field 'PyConTalkProposal.outline'
db.add_column(u'pycon_pycontalkproposal', 'outline',
self.gf('django.db.models.fields.TextField')(default=''),
keep_default=False)
# Adding field 'PyConTalkProposal.audience'
db.add_column(u'pycon_pycontalkproposal', 'audience',
self.gf('django.db.models.fields.CharField')(default='', max_length=150),
keep_default=False)
# Adding field 'PyConTalkProposal.perceived_value'
db.add_column(u'pycon_pycontalkproposal', 'perceived_value',
self.gf('django.db.models.fields.TextField')(default='', max_length=500),
keep_default=False)
# Adding field 'PyConTutorialProposal.audience'
db.add_column(u'pycon_pycontutorialproposal', 'audience',
self.gf('django.db.models.fields.CharField')(default='', max_length=150),
keep_default=False)
# Adding field 'PyConTutorialProposal.perceived_value'
db.add_column(u'pycon_pycontutorialproposal', 'perceived_value',
self.gf('django.db.models.fields.TextField')(default='', max_length=500),
keep_default=False)
def backwards(self, orm):
# Adding field 'PyConTalkProposal.extreme'
db.add_column(u'pycon_pycontalkproposal', 'extreme',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Deleting field 'PyConTalkProposal.outline'
db.delete_column(u'pycon_pycontalkproposal', 'outline')
# Deleting field 'PyConTalkProposal.audience'
db.delete_column(u'pycon_pycontalkproposal', 'audience')
# Deleting field 'PyConTalkProposal.perceived_value'
db.delete_column(u'pycon_pycontalkproposal', 'perceived_value')
# Deleting field 'PyConTutorialProposal.audience'
db.delete_column(u'pycon_pycontutorialproposal', 'audience')
# Deleting field 'PyConTutorialProposal.perceived_value'
db.delete_column(u'pycon_pycontutorialproposal', 'perceived_value')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'conference.conference': {
'Meta': {'object_name': 'Conference'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'timezone': ('timezones.fields.TimeZoneField', [], {'default': "'US/Eastern'", 'max_length': '100', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'conference.section': {
'Meta': {'object_name': 'Section'},
'conference': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['conference.Conference']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'proposals.additionalspeaker': {
'Meta': {'unique_together': "(('speaker', 'proposalbase'),)", 'object_name': 'AdditionalSpeaker', 'db_table': "'proposals_proposalbase_additional_speakers'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'proposalbase': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['proposals.ProposalBase']"}),
'speaker': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['speakers.Speaker']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'proposals.proposalbase': {
'Meta': {'object_name': 'ProposalBase'},
'abstract': ('django.db.models.fields.TextField', [], {}),
'additional_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'additional_speakers': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['speakers.Speaker']", 'symmetrical': 'False', 'through': u"orm['proposals.AdditionalSpeaker']", 'blank': 'True'}),
'cancelled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '400'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['proposals.ProposalKind']"}),
'speaker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'proposals'", 'to': u"orm['speakers.Speaker']"}),
'submitted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'proposals.proposalkind': {
'Meta': {'object_name': 'ProposalKind'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'proposal_kinds'", 'to': u"orm['conference.Section']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
u'pycon.pyconposterproposal': {
'Meta': {'object_name': 'PyConPosterProposal'},
'audience_level': ('django.db.models.fields.IntegerField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pycon.PyConProposalCategory']"}),
'damaged_score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'overall_status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
u'proposalbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['proposals.ProposalBase']", 'unique': 'True', 'primary_key': 'True'}),
'recording_release': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rejection_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'pycon.pyconproposalcategory': {
'Meta': {'object_name': 'PyConProposalCategory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
u'pycon.pyconsponsortutorialproposal': {
'Meta': {'object_name': 'PyConSponsorTutorialProposal', '_ormbases': [u'proposals.ProposalBase']},
u'proposalbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['proposals.ProposalBase']", 'unique': 'True', 'primary_key': 'True'})
},
u'pycon.pycontalkproposal': {
'Meta': {'object_name': 'PyConTalkProposal'},
'audience': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'audience_level': ('django.db.models.fields.IntegerField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pycon.PyConProposalCategory']"}),
'damaged_score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'duration': ('django.db.models.fields.IntegerField', [], {}),
'outline': ('django.db.models.fields.TextField', [], {}),
'overall_status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'perceived_value': ('django.db.models.fields.TextField', [], {'max_length': '500'}),
u'proposalbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['proposals.ProposalBase']", 'unique': 'True', 'primary_key': 'True'}),
'recording_release': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rejection_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'pycon.pycontutorialproposal': {
'Meta': {'object_name': 'PyConTutorialProposal'},
'audience': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'audience_level': ('django.db.models.fields.IntegerField', [], {}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['pycon.PyConProposalCategory']"}),
'damaged_score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'domain_level': ('django.db.models.fields.IntegerField', [], {}),
'more_info': ('django.db.models.fields.TextField', [], {}),
'outline': ('django.db.models.fields.TextField', [], {}),
'overall_status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'perceived_value': ('django.db.models.fields.TextField', [], {'max_length': '500'}),
u'proposalbase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['proposals.ProposalBase']", 'unique': 'True', 'primary_key': 'True'}),
'recording_release': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rejection_status': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'speakers.speaker': {
'Meta': {'object_name': 'Speaker'},
'annotation': ('django.db.models.fields.TextField', [], {}),
'biography': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_email': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'db_index': 'True'}),
'invite_token': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'sessions_preference': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'twitter_username': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'speaker_profile'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"})
}
}
complete_apps = ['pycon']
|
|
# -*- coding: utf-8 -*-
from __future__ import division
import copy
import functools
import logging
import math
import re
import unicodedata
from framework import sentry
import six
from django.apps import apps
from django.core.paginator import Paginator
from django.db.models import Q
from elasticsearch import (ConnectionError, Elasticsearch, NotFoundError,
RequestError, TransportError, helpers)
from framework.celery_tasks import app as celery_app
from framework.database import paginated
from osf.models import AbstractNode
from osf.models import OSFUser
from osf.models import BaseFileNode
from osf.models import Institution
from osf.models import QuickFilesNode
from osf.utils.sanitize import unescape_entities
from website import settings
from website.filters import profile_image_url
from osf.models.licenses import serialize_node_license_record
from website.search import exceptions
from website.search.util import build_query, clean_splitters
from website.views import validate_page_num
logger = logging.getLogger(__name__)
# These are the doc_types that exist in the search database
ALIASES = {
'project': 'Projects',
'component': 'Components',
'registration': 'Registrations',
'user': 'Users',
'total': 'All OSF Results',
'file': 'Files',
'institution': 'Institutions',
'preprint': 'Preprints',
}
DOC_TYPE_TO_MODEL = {
'component': AbstractNode,
'project': AbstractNode,
'registration': AbstractNode,
'user': OSFUser,
'file': BaseFileNode,
'institution': Institution,
'preprint': AbstractNode,
}
# Prevent tokenizing and stop word removal.
NOT_ANALYZED_PROPERTY = {'type': 'string', 'index': 'not_analyzed'}
# Perform stemming on the field it's applied to.
ENGLISH_ANALYZER_PROPERTY = {'type': 'string', 'analyzer': 'english'}
INDEX = settings.ELASTIC_INDEX
CLIENT = None
def client():
global CLIENT
if CLIENT is None:
try:
CLIENT = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT,
retry_on_timeout=True,
**settings.ELASTIC_KWARGS
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
CLIENT.cluster.health(wait_for_status='yellow')
except ConnectionError:
message = (
'The SEARCH_ENGINE setting is set to "elastic", but there '
'was a problem starting the elasticsearch interface. Is '
'elasticsearch running?'
)
if settings.SENTRY_DSN:
try:
sentry.log_exception()
sentry.log_message(message)
except AssertionError: # App has not yet been initialized
logger.exception(message)
else:
logger.error(message)
exit(1)
return CLIENT
def requires_search(func):
def wrapped(*args, **kwargs):
if client() is not None:
try:
return func(*args, **kwargs)
except ConnectionError:
raise exceptions.SearchUnavailableError('Could not connect to elasticsearch')
except NotFoundError as e:
raise exceptions.IndexNotFoundError(e.error)
except RequestError as e:
if e.error == 'search_phase_execution_exception':
raise exceptions.MalformedQueryError('Failed to parse query')
if 'ParseException' in e.error: # ES 1.5
raise exceptions.MalformedQueryError(e.error)
if type(e.error) == dict: # ES 2.0
try:
root_cause = e.error['root_cause'][0]
if root_cause['type'] == 'query_parsing_exception':
raise exceptions.MalformedQueryError(root_cause['reason'])
except (AttributeError, KeyError):
pass
raise exceptions.SearchException(e.error)
except TransportError as e:
# Catch and wrap generic uncaught ES error codes. TODO: Improve fix for https://openscience.atlassian.net/browse/OSF-4538
raise exceptions.SearchException(e.error)
sentry.log_message('Elastic search action failed. Is elasticsearch running?')
raise exceptions.SearchUnavailableError('Failed to connect to elasticsearch')
return wrapped
@requires_search
def get_aggregations(query, doc_type):
query['aggregations'] = {
'licenses': {
'terms': {
'field': 'license.id'
}
}
}
res = client().search(index=INDEX, doc_type=doc_type, search_type='count', body=query)
ret = {
doc_type: {
item['key']: item['doc_count']
for item in agg['buckets']
}
for doc_type, agg in res['aggregations'].iteritems()
}
ret['total'] = res['hits']['total']
return ret
@requires_search
def get_counts(count_query, clean=True):
count_query['aggregations'] = {
'counts': {
'terms': {
'field': '_type',
}
}
}
res = client().search(index=INDEX, doc_type=None, search_type='count', body=count_query)
counts = {x['key']: x['doc_count'] for x in res['aggregations']['counts']['buckets'] if x['key'] in ALIASES.keys()}
counts['total'] = sum([val for val in counts.values()])
return counts
@requires_search
def get_tags(query, index):
query['aggregations'] = {
'tag_cloud': {
'terms': {'field': 'tags'}
}
}
results = client().search(index=index, doc_type=None, body=query)
tags = results['aggregations']['tag_cloud']['buckets']
return tags
@requires_search
def search(query, index=None, doc_type='_all', raw=False):
"""Search for a query
:param query: The substring of the username/project name/tag to search for
:param index:
:param doc_type:
:return: List of dictionaries, each containing the results, counts, tags and typeAliases
results: All results returned by the query, that are within the index and search type
counts: A dictionary in which keys are types and values are counts for that type, e.g, count['total'] is the sum of the other counts
tags: A list of tags that are returned by the search query
typeAliases: the doc_types that exist in the search database
"""
index = index or INDEX
tag_query = copy.deepcopy(query)
aggs_query = copy.deepcopy(query)
count_query = copy.deepcopy(query)
for key in ['from', 'size', 'sort']:
try:
del tag_query[key]
del aggs_query[key]
del count_query[key]
except KeyError:
pass
tags = get_tags(tag_query, index)
try:
del aggs_query['query']['filtered']['filter']
del count_query['query']['filtered']['filter']
except KeyError:
pass
aggregations = get_aggregations(aggs_query, doc_type=doc_type)
counts = get_counts(count_query, index)
# Run the real query and get the results
raw_results = client().search(index=index, doc_type=doc_type, body=query)
results = [hit['_source'] for hit in raw_results['hits']['hits']]
return_value = {
'results': raw_results['hits']['hits'] if raw else format_results(results),
'counts': counts,
'aggs': aggregations,
'tags': tags,
'typeAliases': ALIASES
}
return return_value
def format_results(results):
ret = []
for result in results:
if result.get('category') == 'user':
result['url'] = '/profile/' + result['id']
elif result.get('category') == 'file':
parent_info = load_parent(result.get('parent_id'))
result['parent_url'] = parent_info.get('url') if parent_info else None
result['parent_title'] = parent_info.get('title') if parent_info else None
elif result.get('category') in {'project', 'component', 'registration', 'preprint'}:
result = format_result(result, result.get('parent_id'))
elif not result.get('category'):
continue
ret.append(result)
return ret
def format_result(result, parent_id=None):
parent_info = load_parent(parent_id)
formatted_result = {
'contributors': result['contributors'],
'wiki_link': result['url'] + 'wiki/',
# TODO: Remove unescape_entities when mako html safe comes in
'title': unescape_entities(result['title']),
'url': result['url'],
'is_component': False if parent_info is None else True,
'parent_title': unescape_entities(parent_info.get('title')) if parent_info else None,
'parent_url': parent_info.get('url') if parent_info is not None else None,
'tags': result['tags'],
'is_registration': (result['is_registration'] if parent_info is None
else parent_info.get('is_registration')),
'is_retracted': result['is_retracted'],
'is_pending_retraction': result['is_pending_retraction'],
'embargo_end_date': result['embargo_end_date'],
'is_pending_embargo': result['is_pending_embargo'],
'description': unescape_entities(result['description']),
'category': result.get('category'),
'date_created': result.get('date_created'),
'date_registered': result.get('registered_date'),
'n_wikis': len(result['wikis'] or []),
'license': result.get('license'),
'affiliated_institutions': result.get('affiliated_institutions'),
'preprint_url': result.get('preprint_url'),
}
return formatted_result
def load_parent(parent_id):
parent = AbstractNode.load(parent_id)
if parent and parent.is_public:
return {
'title': parent.title,
'url': parent.url,
'id': parent._id,
'is_registation': parent.is_registration,
}
return None
COMPONENT_CATEGORIES = set(settings.NODE_CATEGORY_MAP.keys())
def get_doctype_from_node(node):
if node.is_registration:
return 'registration'
elif node.is_preprint:
return 'preprint'
elif node.parent_node is None:
# ElasticSearch categorizes top-level projects differently than children
return 'project'
elif node.category in COMPONENT_CATEGORIES:
return 'component'
else:
return node.category
@celery_app.task(bind=True, max_retries=5, default_retry_delay=60)
def update_node_async(self, node_id, index=None, bulk=False):
AbstractNode = apps.get_model('osf.AbstractNode')
node = AbstractNode.load(node_id)
try:
update_node(node=node, index=index, bulk=bulk, async=True)
except Exception as exc:
self.retry(exc=exc)
@celery_app.task(bind=True, max_retries=5, default_retry_delay=60)
def update_user_async(self, user_id, index=None):
OSFUser = apps.get_model('osf.OSFUser')
user = OSFUser.objects.get(id=user_id)
try:
update_user(user, index)
except Exception as exc:
self.retry(exc)
def serialize_node(node, category):
elastic_document = {}
parent_id = node.parent_id
try:
normalized_title = six.u(node.title)
except TypeError:
normalized_title = node.title
normalized_title = unicodedata.normalize('NFKD', normalized_title).encode('ascii', 'ignore')
elastic_document = {
'id': node._id,
'contributors': [
{
'fullname': x['fullname'],
'url': '/{}/'.format(x['guids___id']) if x['is_active'] else None
}
for x in node._contributors.filter(contributor__visible=True).order_by('contributor___order')
.values('fullname', 'guids___id', 'is_active')
],
'title': node.title,
'normalized_title': normalized_title,
'category': category,
'public': node.is_public,
'tags': list(node.tags.filter(system=False).values_list('name', flat=True)),
'description': node.description,
'url': node.url,
'is_registration': node.is_registration,
'is_pending_registration': node.is_pending_registration,
'is_retracted': node.is_retracted,
'is_pending_retraction': node.is_pending_retraction,
'embargo_end_date': node.embargo_end_date.strftime('%A, %b. %d, %Y') if node.embargo_end_date else False,
'is_pending_embargo': node.is_pending_embargo,
'registered_date': node.registered_date,
'wikis': {},
'parent_id': parent_id,
'date_created': node.created,
'license': serialize_node_license_record(node.license),
'affiliated_institutions': list(node.affiliated_institutions.values_list('name', flat=True)),
'boost': int(not node.is_registration) + 1, # This is for making registered projects less relevant
'extra_search_terms': clean_splitters(node.title),
'preprint_url': node.preprint_url,
}
if not node.is_retracted:
for wiki in node.get_wiki_pages_latest():
# '.' is not allowed in field names in ES2
elastic_document['wikis'][wiki.wiki_page.page_name.replace('.', ' ')] = wiki.raw_text(node)
return elastic_document
@requires_search
def update_node(node, index=None, bulk=False, async=False):
from addons.osfstorage.models import OsfStorageFile
index = index or INDEX
for file_ in paginated(OsfStorageFile, Q(node=node)):
update_file(file_, index=index)
is_qa_node = bool(set(settings.DO_NOT_INDEX_LIST['tags']).intersection(node.tags.all().values_list('name', flat=True))) or any(substring in node.title for substring in settings.DO_NOT_INDEX_LIST['titles'])
if node.is_deleted or not node.is_public or node.archiving or (node.is_spammy and settings.SPAM_FLAGGED_REMOVE_FROM_SEARCH) or node.is_quickfiles or is_qa_node:
delete_doc(node._id, node, index=index)
else:
category = get_doctype_from_node(node)
elastic_document = serialize_node(node, category)
if bulk:
return elastic_document
else:
client().index(index=index, doc_type=category, id=node._id, body=elastic_document, refresh=True)
def bulk_update_nodes(serialize, nodes, index=None):
"""Updates the list of input projects
:param function Node-> dict serialize:
:param Node[] nodes: Projects, components, registrations, or preprints
:param str index: Index of the nodes
:return:
"""
index = index or INDEX
actions = []
for node in nodes:
serialized = serialize(node)
if serialized:
actions.append({
'_op_type': 'update',
'_index': index,
'_id': node._id,
'_type': get_doctype_from_node(node),
'doc': serialized,
'doc_as_upsert': True,
})
if actions:
return helpers.bulk(client(), actions)
def serialize_contributors(node):
return {
'contributors': [
{
'fullname': x['user__fullname'],
'url': '/{}/'.format(x['user__guids___id'])
} for x in
node.contributor_set.filter(visible=True, user__is_active=True).order_by('_order').values('user__fullname', 'user__guids___id')
]
}
bulk_update_contributors = functools.partial(bulk_update_nodes, serialize_contributors)
@celery_app.task(bind=True, max_retries=5, default_retry_delay=60)
def update_contributors_async(self, user_id):
OSFUser = apps.get_model('osf.OSFUser')
user = OSFUser.objects.get(id=user_id)
p = Paginator(user.visible_contributor_to.order_by('id'), 100)
for page_num in p.page_range:
bulk_update_contributors(p.page(page_num).object_list)
@requires_search
def update_user(user, index=None):
index = index or INDEX
if not user.is_active:
try:
client().delete(index=index, doc_type='user', id=user._id, refresh=True, ignore=[404])
# update files in their quickfiles node if the user has been marked as spam
if 'spam_confirmed' in user.system_tags:
quickfiles = QuickFilesNode.objects.get_for_user(user)
for quickfile_id in quickfiles.files.values_list('_id', flat=True):
client().delete(
index=index,
doc_type='file',
id=quickfile_id,
refresh=True,
ignore=[404]
)
except NotFoundError:
pass
return
names = dict(
fullname=user.fullname,
given_name=user.given_name,
family_name=user.family_name,
middle_names=user.middle_names,
suffix=user.suffix
)
normalized_names = {}
for key, val in names.items():
if val is not None:
try:
val = six.u(val)
except TypeError:
pass # This is fine, will only happen in 2.x if val is already unicode
normalized_names[key] = unicodedata.normalize('NFKD', val).encode('ascii', 'ignore')
user_doc = {
'id': user._id,
'user': user.fullname,
'normalized_user': normalized_names['fullname'],
'normalized_names': normalized_names,
'names': names,
'job': user.jobs[0]['institution'] if user.jobs else '',
'job_title': user.jobs[0]['title'] if user.jobs else '',
'all_jobs': [job['institution'] for job in user.jobs[1:]],
'school': user.schools[0]['institution'] if user.schools else '',
'all_schools': [school['institution'] for school in user.schools],
'category': 'user',
'degree': user.schools[0]['degree'] if user.schools else '',
'social': user.social_links,
'boost': 2, # TODO(fabianvf): Probably should make this a constant or something
}
client().index(index=index, doc_type='user', body=user_doc, id=user._id, refresh=True)
@requires_search
def update_file(file_, index=None, delete=False):
index = index or INDEX
# TODO: Can remove 'not file_.name' if we remove all base file nodes with name=None
file_node_is_qa = bool(
set(settings.DO_NOT_INDEX_LIST['tags']).intersection(file_.tags.all().values_list('name', flat=True))
) or bool(
set(settings.DO_NOT_INDEX_LIST['tags']).intersection(file_.node.tags.all().values_list('name', flat=True))
) or any(substring in file_.node.title for substring in settings.DO_NOT_INDEX_LIST['titles'])
if not file_.name or not file_.node.is_public or delete or file_.node.is_deleted or file_.node.archiving or file_node_is_qa:
client().delete(
index=index,
doc_type='file',
id=file_._id,
refresh=True,
ignore=[404]
)
return
# We build URLs manually here so that this function can be
# run outside of a Flask request context (e.g. in a celery task)
file_deep_url = '/{node_id}/files/{provider}{path}/'.format(
node_id=file_.node._id,
provider=file_.provider,
path=file_.path,
)
node_url = '/{node_id}/'.format(node_id=file_.node._id)
guid_url = None
file_guid = file_.get_guid(create=False)
if file_guid:
guid_url = '/{file_guid}/'.format(file_guid=file_guid._id)
file_doc = {
'id': file_._id,
'deep_url': file_deep_url,
'guid_url': guid_url,
'tags': list(file_.tags.filter(system=False).values_list('name', flat=True)),
'name': file_.name,
'category': 'file',
'node_url': node_url,
'node_title': file_.node.title,
'parent_id': file_.node.parent_node._id if file_.node.parent_node else None,
'is_registration': file_.node.is_registration,
'is_retracted': file_.node.is_retracted,
'extra_search_terms': clean_splitters(file_.name),
}
client().index(
index=index,
doc_type='file',
body=file_doc,
id=file_._id,
refresh=True
)
@requires_search
def update_institution(institution, index=None):
index = index or INDEX
id_ = institution._id
if institution.is_deleted:
client().delete(index=index, doc_type='institution', id=id_, refresh=True, ignore=[404])
else:
institution_doc = {
'id': id_,
'url': '/institutions/{}/'.format(institution._id),
'logo_path': institution.logo_path,
'category': 'institution',
'name': institution.name,
}
client().index(index=index, doc_type='institution', body=institution_doc, id=id_, refresh=True)
@requires_search
def delete_all():
delete_index(INDEX)
@requires_search
def delete_index(index):
client().indices.delete(index, ignore=[404])
@requires_search
def create_index(index=None):
'''Creates index with some specified mappings to begin with,
all of which are applied to all projects, components, preprints, and registrations.
'''
index = index or INDEX
document_types = ['project', 'component', 'registration', 'user', 'file', 'institution', 'preprint']
project_like_types = ['project', 'component', 'registration', 'preprint']
analyzed_fields = ['title', 'description']
client().indices.create(index, ignore=[400]) # HTTP 400 if index already exists
for type_ in document_types:
mapping = {
'properties': {
'tags': NOT_ANALYZED_PROPERTY,
'license': {
'properties': {
'id': NOT_ANALYZED_PROPERTY,
'name': NOT_ANALYZED_PROPERTY,
# Elasticsearch automatically infers mappings from content-type. `year` needs to
# be explicitly mapped as a string to allow date ranges, which break on the inferred type
'year': {'type': 'string'},
}
}
}
}
if type_ in project_like_types:
analyzers = {field: ENGLISH_ANALYZER_PROPERTY
for field in analyzed_fields}
mapping['properties'].update(analyzers)
if type_ == 'user':
fields = {
'job': {
'type': 'string',
'boost': '1',
},
'all_jobs': {
'type': 'string',
'boost': '0.01',
},
'school': {
'type': 'string',
'boost': '1',
},
'all_schools': {
'type': 'string',
'boost': '0.01'
},
}
mapping['properties'].update(fields)
client().indices.put_mapping(index=index, doc_type=type_, body=mapping, ignore=[400, 404])
@requires_search
def delete_doc(elastic_document_id, node, index=None, category=None):
index = index or INDEX
if not category:
if node.is_registration:
category = 'registration'
elif node.is_preprint:
category = 'preprint'
else:
category = node.project_or_component
client().delete(index=index, doc_type=category, id=elastic_document_id, refresh=True, ignore=[404])
@requires_search
def search_contributor(query, page=0, size=10, exclude=None, current_user=None):
"""Search for contributors to add to a project using elastic search. Request must
include JSON data with a "query" field.
:param query: The substring of the username to search for
:param page: For pagination, the page number to use for results
:param size: For pagination, the number of results per page
:param exclude: A list of User objects to exclude from the search
:param current_user: A User object of the current user
:return: List of dictionaries, each containing the ID, full name,
most recent employment and education, profile_image URL of an OSF user
"""
start = (page * size)
items = re.split(r'[\s-]+', query)
exclude = exclude or []
normalized_items = []
for item in items:
try:
normalized_item = six.u(item)
except TypeError:
normalized_item = item
normalized_item = unicodedata.normalize('NFKD', normalized_item).encode('ascii', 'ignore')
normalized_items.append(normalized_item)
items = normalized_items
query = ' AND '.join('{}*~'.format(re.escape(item)) for item in items) + \
''.join(' NOT id:"{}"'.format(excluded._id) for excluded in exclude)
results = search(build_query(query, start=start, size=size), index=INDEX, doc_type='user')
docs = results['results']
pages = math.ceil(results['counts'].get('user', 0) / size)
validate_page_num(page, pages)
users = []
for doc in docs:
# TODO: use utils.serialize_user
user = OSFUser.load(doc['id'])
if current_user and current_user._id == user._id:
n_projects_in_common = -1
elif current_user:
n_projects_in_common = current_user.n_projects_in_common(user)
else:
n_projects_in_common = 0
if user is None:
logger.error('Could not load user {0}'.format(doc['id']))
continue
if user.is_active: # exclude merged, unregistered, etc.
current_employment = None
education = None
if user.jobs:
current_employment = user.jobs[0]['institution']
if user.schools:
education = user.schools[0]['institution']
users.append({
'fullname': doc['user'],
'id': doc['id'],
'employment': current_employment,
'education': education,
'social': user.social_links,
'n_projects_in_common': n_projects_in_common,
'profile_image_url': profile_image_url(settings.PROFILE_IMAGE_PROVIDER,
user,
use_ssl=True,
size=settings.PROFILE_IMAGE_MEDIUM),
'profile_url': user.profile_url,
'registered': user.is_registered,
'active': user.is_active
})
return {
'users': users,
'total': results['counts']['total'],
'pages': pages,
'page': page,
}
|
|
ROOOT_Domain_List = { ".aaa" : "[.]aaa",
".aarp" : "[.]aarp",
".abb" : "[.]abb",
".abbott" : "[.]abbott",
".abbvie" : "[.]abbvie",
".able" : "[.]able",
".abogado" : "[.]abogado",
".abudhabi" : "[.]abudhabi",
".ac" : "[.]ac",
".academy" : "[.]academy",
".accenture" : "[.]accenture",
".accountant" : "[.]accountant",
".accountants" : "[.]accountants",
".aco" : "[.]aco",
".active" : "[.]active",
".actor" : "[.]actor",
".ad" : "[.]ad",
".adac" : "[.]adac",
".ads" : "[.]ads",
".adult" : "[.]adult",
".ae" : "[.]ae",
".aeg" : "[.]aeg",
".aero" : "[.]aero",
".aetna" : "[.]aetna",
".af" : "[.]af",
".afl" : "[.]afl",
".ag" : "[.]ag",
".agakhan" : "[.]agakhan",
".agency" : "[.]agency",
".ai" : "[.]ai",
".aig" : "[.]aig",
".airbus" : "[.]airbus",
".airforce" : "[.]airforce",
".airtel" : "[.]airtel",
".akdn" : "[.]akdn",
".al" : "[.]al",
".alibaba" : "[.]alibaba",
".alipay" : "[.]alipay",
".allfinanz" : "[.]allfinanz",
".allstate" : "[.]allstate",
".ally" : "[.]ally",
".alsace" : "[.]alsace",
".alstom" : "[.]alstom",
".am" : "[.]am",
".americanfamily" : "[.]americanfamily",
".amfam" : "[.]amfam",
".amica" : "[.]amica",
".amsterdam" : "[.]amsterdam",
".an" : "[.]an",
".analytics" : "[.]analytics",
".android" : "[.]android",
".anquan" : "[.]anquan",
".anz" : "[.]anz",
".ao" : "[.]ao",
".apartments" : "[.]apartments",
".app" : "[.]app",
".apple" : "[.]apple",
".aq" : "[.]aq",
".aquarelle" : "[.]aquarelle",
".ar" : "[.]ar",
".aramco" : "[.]aramco",
".archi" : "[.]archi",
".army" : "[.]army",
".arpa" : "[.]arpa",
".art" : "[.]art",
".arte" : "[.]arte",
".as" : "[.]as",
".asia" : "[.]asia",
".associates" : "[.]associates",
".at" : "[.]at",
".attorney" : "[.]attorney",
".au" : "[.]au",
".auction" : "[.]auction",
".audi" : "[.]audi",
".audible" : "[.]audible",
".audio" : "[.]audio",
".author" : "[.]author",
".auto" : "[.]auto",
".autos" : "[.]autos",
".avianca" : "[.]avianca",
".aw" : "[.]aw",
".aws" : "[.]aws",
".ax" : "[.]ax",
".axa" : "[.]axa",
".az" : "[.]az",
".azure" : "[.]azure",
".ba" : "[.]ba",
".baby" : "[.]baby",
".baidu" : "[.]baidu",
".band" : "[.]band",
".bank" : "[.]bank",
".bar" : "[.]bar",
".barcelona" : "[.]barcelona",
".barclaycard" : "[.]barclaycard",
".barclays" : "[.]barclays",
".barefoot" : "[.]barefoot",
".bargains" : "[.]bargains",
".bauhaus" : "[.]bauhaus",
".bayern" : "[.]bayern",
".bb" : "[.]bb",
".bbc" : "[.]bbc",
".bbt" : "[.]bbt",
".bbva" : "[.]bbva",
".bcg" : "[.]bcg",
".bcn" : "[.]bcn",
".bd" : "[.]bd",
".be" : "[.]be",
".beats" : "[.]beats",
".beauty" : "[.]beauty",
".beer" : "[.]beer",
".bentley" : "[.]bentley",
".berlin" : "[.]berlin",
".best" : "[.]best",
".bestbuy" : "[.]bestbuy",
".bet" : "[.]bet",
".bf" : "[.]bf",
".bg" : "[.]bg",
".bh" : "[.]bh",
".bharti" : "[.]bharti",
".bi" : "[.]bi",
".bible" : "[.]bible",
".bid" : "[.]bid",
".bike" : "[.]bike",
".bing" : "[.]bing",
".bingo" : "[.]bingo",
".bio" : "[.]bio",
".biz" : "[.]biz",
".bj" : "[.]bj",
".bl" : "[.]bl",
".black" : "[.]black",
".blackfriday" : "[.]blackfriday",
".blanco" : "[.]blanco",
".blog" : "[.]blog",
".bloomberg" : "[.]bloomberg",
".blue" : "[.]blue",
".bm" : "[.]bm",
".bms" : "[.]bms",
".bmw" : "[.]bmw",
".bn" : "[.]bn",
".bnl" : "[.]bnl",
".bnpparibas" : "[.]bnpparibas",
".bo" : "[.]bo",
".boats" : "[.]boats",
".boehringer" : "[.]boehringer",
".bom" : "[.]bom",
".bond" : "[.]bond",
".boo" : "[.]boo",
".book" : "[.]book",
".booking" : "[.]booking",
".boots" : "[.]boots",
".bosch" : "[.]bosch",
".bostik" : "[.]bostik",
".bot" : "[.]bot",
".boutique" : "[.]boutique",
".bq" : "[.]bq",
".br" : "[.]br",
".bradesco" : "[.]bradesco",
".bridgestone" : "[.]bridgestone",
".broadway" : "[.]broadway",
".broker" : "[.]broker",
".brother" : "[.]brother",
".brussels" : "[.]brussels",
".bs" : "[.]bs",
".bt" : "[.]bt",
".budapest" : "[.]budapest",
".bugatti" : "[.]bugatti",
".build" : "[.]build",
".builders" : "[.]builders",
".business" : "[.]business",
".buy" : "[.]buy",
".buzz" : "[.]buzz",
".bv" : "[.]bv",
".bw" : "[.]bw",
".by" : "[.]by",
".bz" : "[.]bz",
".bzh" : "[.]bzh",
".ca" : "[.]ca",
".cab" : "[.]cab",
".cafe" : "[.]cafe",
".cal" : "[.]cal",
".call" : "[.]call",
".cam" : "[.]cam",
".camera" : "[.]camera",
".camp" : "[.]camp",
".cancerresearch" : "[.]cancerresearch",
".canon" : "[.]canon",
".capetown" : "[.]capetown",
".capital" : "[.]capital",
".car" : "[.]car",
".caravan" : "[.]caravan",
".cards" : "[.]cards",
".care" : "[.]care",
".career" : "[.]career",
".careers" : "[.]careers",
".cars" : "[.]cars",
".cartier" : "[.]cartier",
".casa" : "[.]casa",
".cash" : "[.]cash",
".casino" : "[.]casino",
".cat" : "[.]cat",
".catering" : "[.]catering",
".cba" : "[.]cba",
".cbn" : "[.]cbn",
".cbre" : "[.]cbre",
".cc" : "[.]cc",
".cd" : "[.]cd",
".ceb" : "[.]ceb",
".center" : "[.]center",
".ceo" : "[.]ceo",
".cern" : "[.]cern",
".cf" : "[.]cf",
".cfa" : "[.]cfa",
".cfd" : "[.]cfd",
".cg" : "[.]cg",
".ch" : "[.]ch",
".chanel" : "[.]chanel",
".channel" : "[.]channel",
".chase" : "[.]chase",
".chat" : "[.]chat",
".cheap" : "[.]cheap",
".chintai" : "[.]chintai",
".chloe" : "[.]chloe",
".christmas" : "[.]christmas",
".chrome" : "[.]chrome",
".church" : "[.]church",
".ci" : "[.]ci",
".cipriani" : "[.]cipriani",
".circle" : "[.]circle",
".cisco" : "[.]cisco",
".citadel" : "[.]citadel",
".citic" : "[.]citic",
".city" : "[.]city",
".cityeats" : "[.]cityeats",
".ck" : "[.]ck",
".cl" : "[.]cl",
".claims" : "[.]claims",
".cleaning" : "[.]cleaning",
".click" : "[.]click",
".clinic" : "[.]clinic",
".clinique" : "[.]clinique",
".clothing" : "[.]clothing",
".cloud" : "[.]cloud",
".club" : "[.]club",
".clubmed" : "[.]clubmed",
".cm" : "[.]cm",
".cn" : "[.]cn",
".co" : "[.]co",
".coach" : "[.]coach",
".codes" : "[.]codes",
".coffee" : "[.]coffee",
".college" : "[.]college",
".cologne" : "[.]cologne",
".com" : "[.]com",
".comcast" : "[.]comcast",
".commbank" : "[.]commbank",
".community" : "[.]community",
".company" : "[.]company",
".compare" : "[.]compare",
".computer" : "[.]computer",
".comsec" : "[.]comsec",
".condos" : "[.]condos",
".construction" : "[.]construction",
".consulting" : "[.]consulting",
".contact" : "[.]contact",
".contractors" : "[.]contractors",
".cooking" : "[.]cooking",
".cookingchannel" : "[.]cookingchannel",
".cool" : "[.]cool",
".coop" : "[.]coop",
".corsica" : "[.]corsica",
".country" : "[.]country",
".coupon" : "[.]coupon",
".coupons" : "[.]coupons",
".courses" : "[.]courses",
".cr" : "[.]cr",
".credit" : "[.]credit",
".creditcard" : "[.]creditcard",
".creditunion" : "[.]creditunion",
".cricket" : "[.]cricket",
".crown" : "[.]crown",
".crs" : "[.]crs",
".cruises" : "[.]cruises",
".csc" : "[.]csc",
".cu" : "[.]cu",
".cuisinella" : "[.]cuisinella",
".cv" : "[.]cv",
".cw" : "[.]cw",
".cx" : "[.]cx",
".cy" : "[.]cy",
".cymru" : "[.]cymru",
".cyou" : "[.]cyou",
".cz" : "[.]cz",
".dabur" : "[.]dabur",
".dad" : "[.]dad",
".dance" : "[.]dance",
".date" : "[.]date",
".dating" : "[.]dating",
".datsun" : "[.]datsun",
".day" : "[.]day",
".dclk" : "[.]dclk",
".dds" : "[.]dds",
".de" : "[.]de",
".deal" : "[.]deal",
".dealer" : "[.]dealer",
".deals" : "[.]deals",
".degree" : "[.]degree",
".delivery" : "[.]delivery",
".dell" : "[.]dell",
".deloitte" : "[.]deloitte",
".delta" : "[.]delta",
".democrat" : "[.]democrat",
".dental" : "[.]dental",
".dentist" : "[.]dentist",
".desi" : "[.]desi",
".design" : "[.]design",
".dev" : "[.]dev",
".dhl" : "[.]dhl",
".diamonds" : "[.]diamonds",
".diet" : "[.]diet",
".digital" : "[.]digital",
".direct" : "[.]direct",
".directory" : "[.]directory",
".discount" : "[.]discount",
".dj" : "[.]dj",
".dk" : "[.]dk",
".dm" : "[.]dm",
".dnp" : "[.]dnp",
".do" : "[.]do",
".docs" : "[.]docs",
".doctor" : "[.]doctor",
".dog" : "[.]dog",
".doha" : "[.]doha",
".domains" : "[.]domains",
".doosan" : "[.]doosan",
".dot" : "[.]dot",
".download" : "[.]download",
".drive" : "[.]drive",
".dtv" : "[.]dtv",
".dubai" : "[.]dubai",
".duck" : "[.]duck",
".dunlop" : "[.]dunlop",
".duns" : "[.]duns",
".dupont" : "[.]dupont",
".durban" : "[.]durban",
".dvag" : "[.]dvag",
".dz" : "[.]dz",
".earth" : "[.]earth",
".eat" : "[.]eat",
".ec" : "[.]ec",
".edeka" : "[.]edeka",
".edu" : "[.]edu",
".education" : "[.]education",
".ee" : "[.]ee",
".eg" : "[.]eg",
".eh" : "[.]eh",
".email" : "[.]email",
".emerck" : "[.]emerck",
".energy" : "[.]energy",
".engineer" : "[.]engineer",
".engineering" : "[.]engineering",
".enterprises" : "[.]enterprises",
".epost" : "[.]epost",
".epson" : "[.]epson",
".equipment" : "[.]equipment",
".er" : "[.]er",
".ericsson" : "[.]ericsson",
".erni" : "[.]erni",
".es" : "[.]es",
".esq" : "[.]esq",
".estate" : "[.]estate",
".esurance" : "[.]esurance",
".et" : "[.]et",
".eu" : "[.]eu",
".eurovision" : "[.]eurovision",
".eus" : "[.]eus",
".events" : "[.]events",
".everbank" : "[.]everbank",
".exchange" : "[.]exchange",
".expert" : "[.]expert",
".exposed" : "[.]exposed",
".express" : "[.]express",
".extraspace" : "[.]extraspace",
".fage" : "[.]fage",
".fail" : "[.]fail",
".fairwinds" : "[.]fairwinds",
".faith" : "[.]faith",
".family" : "[.]family",
".fan" : "[.]fan",
".fans" : "[.]fans",
".farm" : "[.]farm",
".farmers" : "[.]farmers",
".fashion" : "[.]fashion",
".fast" : "[.]fast",
".fedex" : "[.]fedex",
".feedback" : "[.]feedback",
".ferrero" : "[.]ferrero",
".fi" : "[.]fi",
".film" : "[.]film",
".final" : "[.]final",
".finance" : "[.]finance",
".financial" : "[.]financial",
".fire" : "[.]fire",
".firestone" : "[.]firestone",
".firmdale" : "[.]firmdale",
".fish" : "[.]fish",
".fishing" : "[.]fishing",
".fit" : "[.]fit",
".fitness" : "[.]fitness",
".fj" : "[.]fj",
".fk" : "[.]fk",
".flickr" : "[.]flickr",
".flights" : "[.]flights",
".flir" : "[.]flir",
".florist" : "[.]florist",
".flowers" : "[.]flowers",
".flsmidth" : "[.]flsmidth",
".fly" : "[.]fly",
".fm" : "[.]fm",
".fo" : "[.]fo",
".foo" : "[.]foo",
".foodnetwork" : "[.]foodnetwork",
".football" : "[.]football",
".ford" : "[.]ford",
".forex" : "[.]forex",
".forsale" : "[.]forsale",
".forum" : "[.]forum",
".foundation" : "[.]foundation",
".fox" : "[.]fox",
".fr" : "[.]fr",
".fresenius" : "[.]fresenius",
".frl" : "[.]frl",
".frogans" : "[.]frogans",
".frontdoor" : "[.]frontdoor",
".frontier" : "[.]frontier",
".ftr" : "[.]ftr",
".fujitsu" : "[.]fujitsu",
".fujixerox" : "[.]fujixerox",
".fund" : "[.]fund",
".furniture" : "[.]furniture",
".futbol" : "[.]futbol",
".fyi" : "[.]fyi",
".ga" : "[.]ga",
".gal" : "[.]gal",
".gallery" : "[.]gallery",
".gallo" : "[.]gallo",
".gallup" : "[.]gallup",
".game" : "[.]game",
".games" : "[.]games",
".garden" : "[.]garden",
".gb" : "[.]gb",
".gbiz" : "[.]gbiz",
".gd" : "[.]gd",
".gdn" : "[.]gdn",
".ge" : "[.]ge",
".gea" : "[.]gea",
".gent" : "[.]gent",
".genting" : "[.]genting",
".gf" : "[.]gf",
".gg" : "[.]gg",
".ggee" : "[.]ggee",
".gh" : "[.]gh",
".gi" : "[.]gi",
".gift" : "[.]gift",
".gifts" : "[.]gifts",
".gives" : "[.]gives",
".giving" : "[.]giving",
".gl" : "[.]gl",
".glade" : "[.]glade",
".glass" : "[.]glass",
".gle" : "[.]gle",
".global" : "[.]global",
".globo" : "[.]globo",
".gm" : "[.]gm",
".gmail" : "[.]gmail",
".gmbh" : "[.]gmbh",
".gmo" : "[.]gmo",
".gmx" : "[.]gmx",
".gn" : "[.]gn",
".godaddy" : "[.]godaddy",
".gold" : "[.]gold",
".goldpoint" : "[.]goldpoint",
".golf" : "[.]golf",
".goo" : "[.]goo",
".goodhands" : "[.]goodhands",
".goodyear" : "[.]goodyear",
".goog" : "[.]goog",
".google" : "[.]google",
".gop" : "[.]gop",
".got" : "[.]got",
".gov" : "[.]gov",
".gp" : "[.]gp",
".gq" : "[.]gq",
".gr" : "[.]gr",
".grainger" : "[.]grainger",
".graphics" : "[.]graphics",
".gratis" : "[.]gratis",
".green" : "[.]green",
".gripe" : "[.]gripe",
".group" : "[.]group",
".gs" : "[.]gs",
".gt" : "[.]gt",
".gu" : "[.]gu",
".guardian" : "[.]guardian",
".gucci" : "[.]gucci",
".guge" : "[.]guge",
".guide" : "[.]guide",
".guitars" : "[.]guitars",
".guru" : "[.]guru",
".gw" : "[.]gw",
".gy" : "[.]gy",
".hamburg" : "[.]hamburg",
".hangout" : "[.]hangout",
".haus" : "[.]haus",
".hdfcbank" : "[.]hdfcbank",
".health" : "[.]health",
".healthcare" : "[.]healthcare",
".help" : "[.]help",
".helsinki" : "[.]helsinki",
".here" : "[.]here",
".hermes" : "[.]hermes",
".hgtv" : "[.]hgtv",
".hiphop" : "[.]hiphop",
".hisamitsu" : "[.]hisamitsu",
".hitachi" : "[.]hitachi",
".hiv" : "[.]hiv",
".hk" : "[.]hk",
".hkt" : "[.]hkt",
".hm" : "[.]hm",
".hn" : "[.]hn",
".hockey" : "[.]hockey",
".holdings" : "[.]holdings",
".holiday" : "[.]holiday",
".homedepot" : "[.]homedepot",
".homegoods" : "[.]homegoods",
".homes" : "[.]homes",
".homesense" : "[.]homesense",
".honda" : "[.]honda",
".honeywell" : "[.]honeywell",
".horse" : "[.]horse",
".host" : "[.]host",
".hosting" : "[.]hosting",
".hoteles" : "[.]hoteles",
".hotmail" : "[.]hotmail",
".house" : "[.]house",
".how" : "[.]how",
".hr" : "[.]hr",
".hsbc" : "[.]hsbc",
".ht" : "[.]ht",
".htc" : "[.]htc",
".hu" : "[.]hu",
".hyundai" : "[.]hyundai",
".ibm" : "[.]ibm",
".icbc" : "[.]icbc",
".ice" : "[.]ice",
".icu" : "[.]icu",
".id" : "[.]id",
".ie" : "[.]ie",
".ieee" : "[.]ieee",
".ifm" : "[.]ifm",
".iinet" : "[.]iinet",
".ikano" : "[.]ikano",
".il" : "[.]il",
".im" : "[.]im",
".imamat" : "[.]imamat",
".imdb" : "[.]imdb",
".immo" : "[.]immo",
".immobilien" : "[.]immobilien",
".in" : "[.]in",
".industries" : "[.]industries",
".infiniti" : "[.]infiniti",
".info" : "[.]info",
".ing" : "[.]ing",
".ink" : "[.]ink",
".institute" : "[.]institute",
".insurance" : "[.]insurance",
".insure" : "[.]insure",
".int" : "[.]int",
".international" : "[.]international",
".intuit" : "[.]intuit",
".investments" : "[.]investments",
".io" : "[.]io",
".ipiranga" : "[.]ipiranga",
".iq" : "[.]iq",
".ir" : "[.]ir",
".irish" : "[.]irish",
".is" : "[.]is",
".iselect" : "[.]iselect",
".ismaili" : "[.]ismaili",
".ist" : "[.]ist",
".istanbul" : "[.]istanbul",
".it" : "[.]it",
".itau" : "[.]itau",
".itv" : "[.]itv",
".iwc" : "[.]iwc",
".jaguar" : "[.]jaguar",
".java" : "[.]java",
".jcb" : "[.]jcb",
".jcp" : "[.]jcp",
".je" : "[.]je",
".jetzt" : "[.]jetzt",
".jewelry" : "[.]jewelry",
".jlc" : "[.]jlc",
".jll" : "[.]jll",
".jm" : "[.]jm",
".jmp" : "[.]jmp",
".jnj" : "[.]jnj",
".jo" : "[.]jo",
".jobs" : "[.]jobs",
".joburg" : "[.]joburg",
".jot" : "[.]jot",
".joy" : "[.]joy",
".jp" : "[.]jp",
".jpmorgan" : "[.]jpmorgan",
".jprs" : "[.]jprs",
".juegos" : "[.]juegos",
".kaufen" : "[.]kaufen",
".kddi" : "[.]kddi",
".ke" : "[.]ke",
".kerryhotels" : "[.]kerryhotels",
".kerrylogistics" : "[.]kerrylogistics",
".kerryproperties" : "[.]kerryproperties",
".kfh" : "[.]kfh",
".kg" : "[.]kg",
".kh" : "[.]kh",
".ki" : "[.]ki",
".kia" : "[.]kia",
".kim" : "[.]kim",
".kinder" : "[.]kinder",
".kindle" : "[.]kindle",
".kitchen" : "[.]kitchen",
".kiwi" : "[.]kiwi",
".km" : "[.]km",
".kn" : "[.]kn",
".koeln" : "[.]koeln",
".komatsu" : "[.]komatsu",
".kosher" : "[.]kosher",
".kp" : "[.]kp",
".kpmg" : "[.]kpmg",
".kpn" : "[.]kpn",
".kr" : "[.]kr",
".krd" : "[.]krd",
".kred" : "[.]kred",
".kuokgroup" : "[.]kuokgroup",
".kw" : "[.]kw",
".ky" : "[.]ky",
".kyoto" : "[.]kyoto",
".kz" : "[.]kz",
".la" : "[.]la",
".lacaixa" : "[.]lacaixa",
".lamborghini" : "[.]lamborghini",
".lamer" : "[.]lamer",
".lancaster" : "[.]lancaster",
".lancome" : "[.]lancome",
".land" : "[.]land",
".landrover" : "[.]landrover",
".lanxess" : "[.]lanxess",
".lasalle" : "[.]lasalle",
".lat" : "[.]lat",
".latrobe" : "[.]latrobe",
".law" : "[.]law",
".lawyer" : "[.]lawyer",
".lb" : "[.]lb",
".lc" : "[.]lc",
".lds" : "[.]lds",
".lease" : "[.]lease",
".leclerc" : "[.]leclerc",
".lefrak" : "[.]lefrak",
".legal" : "[.]legal",
".lego" : "[.]lego",
".lexus" : "[.]lexus",
".lgbt" : "[.]lgbt",
".li" : "[.]li",
".liaison" : "[.]liaison",
".lidl" : "[.]lidl",
".life" : "[.]life",
".lifeinsurance" : "[.]lifeinsurance",
".lifestyle" : "[.]lifestyle",
".lighting" : "[.]lighting",
".like" : "[.]like",
".limited" : "[.]limited",
".limo" : "[.]limo",
".lincoln" : "[.]lincoln",
".linde" : "[.]linde",
".link" : "[.]link",
".lipsy" : "[.]lipsy",
".live" : "[.]live",
".living" : "[.]living",
".lixil" : "[.]lixil",
".lk" : "[.]lk",
".loan" : "[.]loan",
".loans" : "[.]loans",
".locker" : "[.]locker",
".locus" : "[.]locus",
".lol" : "[.]lol",
".london" : "[.]london",
".lotte" : "[.]lotte",
".lotto" : "[.]lotto",
".love" : "[.]love",
".lpl" : "[.]lpl",
".lplfinancial" : "[.]lplfinancial",
".lr" : "[.]lr",
".ls" : "[.]ls",
".lt" : "[.]lt",
".ltd" : "[.]ltd",
".ltda" : "[.]ltda",
".lu" : "[.]lu",
".lundbeck" : "[.]lundbeck",
".lupin" : "[.]lupin",
".luxe" : "[.]luxe",
".luxury" : "[.]luxury",
".lv" : "[.]lv",
".ly" : "[.]ly",
".ma" : "[.]ma",
".macys" : "[.]macys",
".madrid" : "[.]madrid",
".maif" : "[.]maif",
".maison" : "[.]maison",
".makeup" : "[.]makeup",
".man" : "[.]man",
".management" : "[.]management",
".mango" : "[.]mango",
".market" : "[.]market",
".marketing" : "[.]marketing",
".markets" : "[.]markets",
".marriott" : "[.]marriott",
".marshalls" : "[.]marshalls",
".mattel" : "[.]mattel",
".mba" : "[.]mba",
".mc" : "[.]mc",
".md" : "[.]md",
".me" : "[.]me",
".med" : "[.]med",
".media" : "[.]media",
".meet" : "[.]meet",
".melbourne" : "[.]melbourne",
".meme" : "[.]meme",
".memorial" : "[.]memorial",
".men" : "[.]men",
".menu" : "[.]menu",
".meo" : "[.]meo",
".metlife" : "[.]metlife",
".mf" : "[.]mf",
".mg" : "[.]mg",
".mh" : "[.]mh",
".miami" : "[.]miami",
".microsoft" : "[.]microsoft",
".mil" : "[.]mil",
".mini" : "[.]mini",
".mint" : "[.]mint",
".mit" : "[.]mit",
".mitsubishi" : "[.]mitsubishi",
".mk" : "[.]mk",
".ml" : "[.]ml",
".mlb" : "[.]mlb",
".mls" : "[.]mls",
".mm" : "[.]mm",
".mma" : "[.]mma",
".mn" : "[.]mn",
".mo" : "[.]mo",
".mobi" : "[.]mobi",
".mobily" : "[.]mobily",
".moda" : "[.]moda",
".moe" : "[.]moe",
".moi" : "[.]moi",
".mom" : "[.]mom",
".monash" : "[.]monash",
".money" : "[.]money",
".montblanc" : "[.]montblanc",
".mormon" : "[.]mormon",
".mortgage" : "[.]mortgage",
".moscow" : "[.]moscow",
".motorcycles" : "[.]motorcycles",
".mov" : "[.]mov",
".movie" : "[.]movie",
".movistar" : "[.]movistar",
".mp" : "[.]mp",
".mq" : "[.]mq",
".mr" : "[.]mr",
".ms" : "[.]ms",
".msd" : "[.]msd",
".mt" : "[.]mt",
".mtn" : "[.]mtn",
".mtpc" : "[.]mtpc",
".mtr" : "[.]mtr",
".mu" : "[.]mu",
".museum" : "[.]museum",
".mutual" : "[.]mutual",
".mutuelle" : "[.]mutuelle",
".mv" : "[.]mv",
".mw" : "[.]mw",
".mx" : "[.]mx",
".my" : "[.]my",
".mz" : "[.]mz",
".na" : "[.]na",
".nadex" : "[.]nadex",
".nagoya" : "[.]nagoya",
".name" : "[.]name",
".nationwide" : "[.]nationwide",
".natura" : "[.]natura",
".navy" : "[.]navy",
".nc" : "[.]nc",
".ne" : "[.]ne",
".nec" : "[.]nec",
".net" : "[.]net",
".netbank" : "[.]netbank",
".netflix" : "[.]netflix",
".network" : "[.]network",
".neustar" : "[.]neustar",
".new" : "[.]new",
".news" : "[.]news",
".next" : "[.]next",
".nextdirect" : "[.]nextdirect",
".nexus" : "[.]nexus",
".nf" : "[.]nf",
".nfl" : "[.]nfl",
".ng" : "[.]ng",
".ngo" : "[.]ngo",
".nhk" : "[.]nhk",
".ni" : "[.]ni",
".nico" : "[.]nico",
".nike" : "[.]nike",
".nikon" : "[.]nikon",
".ninja" : "[.]ninja",
".nissan" : "[.]nissan",
".nissay" : "[.]nissay",
".nl" : "[.]nl",
".no" : "[.]no",
".nokia" : "[.]nokia",
".northwesternmutual" : "[.]northwesternmutual",
".norton" : "[.]norton",
".now" : "[.]now",
".nowruz" : "[.]nowruz",
".nowtv" : "[.]nowtv",
".np" : "[.]np",
".nr" : "[.]nr",
".nra" : "[.]nra",
".nrw" : "[.]nrw",
".ntt" : "[.]ntt",
".nu" : "[.]nu",
".nyc" : "[.]nyc",
".nz" : "[.]nz",
".obi" : "[.]obi",
".off" : "[.]off",
".office" : "[.]office",
".okinawa" : "[.]okinawa",
".olayan" : "[.]olayan",
".olayangroup" : "[.]olayangroup",
".ollo" : "[.]ollo",
".om" : "[.]om",
".omega" : "[.]omega",
".one" : "[.]one",
".ong" : "[.]ong",
".onl" : "[.]onl",
".online" : "[.]online",
".onyourside" : "[.]onyourside",
".ooo" : "[.]ooo",
".oracle" : "[.]oracle",
".orange" : "[.]orange",
".org" : "[.]org",
".organic" : "[.]organic",
".orientexpress" : "[.]orientexpress",
".origins" : "[.]origins",
".osaka" : "[.]osaka",
".otsuka" : "[.]otsuka",
".ott" : "[.]ott",
".ovh" : "[.]ovh",
".pa" : "[.]pa",
".page" : "[.]page",
".pamperedchef" : "[.]pamperedchef",
".panasonic" : "[.]panasonic",
".panerai" : "[.]panerai",
".paris" : "[.]paris",
".pars" : "[.]pars",
".partners" : "[.]partners",
".parts" : "[.]parts",
".party" : "[.]party",
".passagens" : "[.]passagens",
".pccw" : "[.]pccw",
".pe" : "[.]pe",
".pet" : "[.]pet",
".pf" : "[.]pf",
".pfizer" : "[.]pfizer",
".pg" : "[.]pg",
".ph" : "[.]ph",
".pharmacy" : "[.]pharmacy",
".philips" : "[.]philips",
".photo" : "[.]photo",
".photography" : "[.]photography",
".photos" : "[.]photos",
".physio" : "[.]physio",
".piaget" : "[.]piaget",
".pics" : "[.]pics",
".pictet" : "[.]pictet",
".pictures" : "[.]pictures",
".pid" : "[.]pid",
".pin" : "[.]pin",
".ping" : "[.]ping",
".pink" : "[.]pink",
".pioneer" : "[.]pioneer",
".pizza" : "[.]pizza",
".pk" : "[.]pk",
".pl" : "[.]pl",
".place" : "[.]place",
".play" : "[.]play",
".playstation" : "[.]playstation",
".plumbing" : "[.]plumbing",
".plus" : "[.]plus",
".pm" : "[.]pm",
".pn" : "[.]pn",
".pnc" : "[.]pnc",
".pohl" : "[.]pohl",
".poker" : "[.]poker",
".politie" : "[.]politie",
".porn" : "[.]porn",
".post" : "[.]post",
".pr" : "[.]pr",
".praxi" : "[.]praxi",
".press" : "[.]press",
".prime" : "[.]prime",
".pro" : "[.]pro",
".prod" : "[.]prod",
".productions" : "[.]productions",
".prof" : "[.]prof",
".progressive" : "[.]progressive",
".promo" : "[.]promo",
".properties" : "[.]properties",
".property" : "[.]property",
".protection" : "[.]protection",
".ps" : "[.]ps",
".pt" : "[.]pt",
".pub" : "[.]pub",
".pw" : "[.]pw",
".pwc" : "[.]pwc",
".py" : "[.]py",
".qa" : "[.]qa",
".qpon" : "[.]qpon",
".quebec" : "[.]quebec",
".quest" : "[.]quest",
".racing" : "[.]racing",
".raid" : "[.]raid",
".re" : "[.]re",
".read" : "[.]read",
".realestate" : "[.]realestate",
".realtor" : "[.]realtor",
".realty" : "[.]realty",
".recipes" : "[.]recipes",
".red" : "[.]red",
".redstone" : "[.]redstone",
".redumbrella" : "[.]redumbrella",
".rehab" : "[.]rehab",
".reise" : "[.]reise",
".reisen" : "[.]reisen",
".reit" : "[.]reit",
".ren" : "[.]ren",
".rent" : "[.]rent",
".rentals" : "[.]rentals",
".repair" : "[.]repair",
".report" : "[.]report",
".republican" : "[.]republican",
".rest" : "[.]rest",
".restaurant" : "[.]restaurant",
".review" : "[.]review",
".reviews" : "[.]reviews",
".rexroth" : "[.]rexroth",
".rich" : "[.]rich",
".richardli" : "[.]richardli",
".ricoh" : "[.]ricoh",
".rightathome" : "[.]rightathome",
".rio" : "[.]rio",
".rip" : "[.]rip",
".ro" : "[.]ro",
".rocher" : "[.]rocher",
".rocks" : "[.]rocks",
".rodeo" : "[.]rodeo",
".room" : "[.]room",
".rs" : "[.]rs",
".rsvp" : "[.]rsvp",
".ru" : "[.]ru",
".ruhr" : "[.]ruhr",
".run" : "[.]run",
".rw" : "[.]rw",
".rwe" : "[.]rwe",
".ryukyu" : "[.]ryukyu",
".sa" : "[.]sa",
".saarland" : "[.]saarland",
".safe" : "[.]safe",
".safety" : "[.]safety",
".sakura" : "[.]sakura",
".sale" : "[.]sale",
".salon" : "[.]salon",
".samsung" : "[.]samsung",
".sandvik" : "[.]sandvik",
".sandvikcoromant" : "[.]sandvikcoromant",
".sanofi" : "[.]sanofi",
".sap" : "[.]sap",
".sapo" : "[.]sapo",
".sarl" : "[.]sarl",
".sas" : "[.]sas",
".save" : "[.]save",
".saxo" : "[.]saxo",
".sb" : "[.]sb",
".sbi" : "[.]sbi",
".sbs" : "[.]sbs",
".sc" : "[.]sc",
".sca" : "[.]sca",
".scb" : "[.]scb",
".schaeffler" : "[.]schaeffler",
".schmidt" : "[.]schmidt",
".scholarships" : "[.]scholarships",
".school" : "[.]school",
".schule" : "[.]schule",
".schwarz" : "[.]schwarz",
".science" : "[.]science",
".scjohnson" : "[.]scjohnson",
".scor" : "[.]scor",
".scot" : "[.]scot",
".sd" : "[.]sd",
".se" : "[.]se",
".seat" : "[.]seat",
".security" : "[.]security",
".seek" : "[.]seek",
".select" : "[.]select",
".sener" : "[.]sener",
".services" : "[.]services",
".ses" : "[.]ses",
".seven" : "[.]seven",
".sew" : "[.]sew",
".sex" : "[.]sex",
".sexy" : "[.]sexy",
".sfr" : "[.]sfr",
".sg" : "[.]sg",
".sh" : "[.]sh",
".shangrila" : "[.]shangrila",
".sharp" : "[.]sharp",
".shaw" : "[.]shaw",
".shell" : "[.]shell",
".shia" : "[.]shia",
".shiksha" : "[.]shiksha",
".shoes" : "[.]shoes",
".shop" : "[.]shop",
".shopping" : "[.]shopping",
".shouji" : "[.]shouji",
".show" : "[.]show",
".shriram" : "[.]shriram",
".si" : "[.]si",
".silk" : "[.]silk",
".sina" : "[.]sina",
".singles" : "[.]singles",
".site" : "[.]site",
".sj" : "[.]sj",
".sk" : "[.]sk",
".ski" : "[.]ski",
".skin" : "[.]skin",
".sky" : "[.]sky",
".skype" : "[.]skype",
".sl" : "[.]sl",
".sm" : "[.]sm",
".smart" : "[.]smart",
".smile" : "[.]smile",
".sn" : "[.]sn",
".sncf" : "[.]sncf",
".so" : "[.]so",
".soccer" : "[.]soccer",
".social" : "[.]social",
".softbank" : "[.]softbank",
".software" : "[.]software",
".sohu" : "[.]sohu",
".solar" : "[.]solar",
".solutions" : "[.]solutions",
".song" : "[.]song",
".sony" : "[.]sony",
".soy" : "[.]soy",
".space" : "[.]space",
".spiegel" : "[.]spiegel",
".spot" : "[.]spot",
".spreadbetting" : "[.]spreadbetting",
".sr" : "[.]sr",
".srl" : "[.]srl",
".ss" : "[.]ss",
".st" : "[.]st",
".stada" : "[.]stada",
".staples" : "[.]staples",
".star" : "[.]star",
".starhub" : "[.]starhub",
".statebank" : "[.]statebank",
".statefarm" : "[.]statefarm",
".statoil" : "[.]statoil",
".stc" : "[.]stc",
".stcgroup" : "[.]stcgroup",
".stockholm" : "[.]stockholm",
".storage" : "[.]storage",
".store" : "[.]store",
".stream" : "[.]stream",
".studio" : "[.]studio",
".study" : "[.]study",
".style" : "[.]style",
".su" : "[.]su",
".sucks" : "[.]sucks",
".supplies" : "[.]supplies",
".supply" : "[.]supply",
".support" : "[.]support",
".surf" : "[.]surf",
".surgery" : "[.]surgery",
".suzuki" : "[.]suzuki",
".sv" : "[.]sv",
".swatch" : "[.]swatch",
".swiftcover" : "[.]swiftcover",
".swiss" : "[.]swiss",
".sx" : "[.]sx",
".sy" : "[.]sy",
".sydney" : "[.]sydney",
".symantec" : "[.]symantec",
".systems" : "[.]systems",
".sz" : "[.]sz",
".tab" : "[.]tab",
".taipei" : "[.]taipei",
".talk" : "[.]talk",
".taobao" : "[.]taobao",
".tatamotors" : "[.]tatamotors",
".tatar" : "[.]tatar",
".tattoo" : "[.]tattoo",
".tax" : "[.]tax",
".taxi" : "[.]taxi",
".tc" : "[.]tc",
".tci" : "[.]tci",
".td" : "[.]td",
".tdk" : "[.]tdk",
".team" : "[.]team",
".tech" : "[.]tech",
".technology" : "[.]technology",
".tel" : "[.]tel",
".telecity" : "[.]telecity",
".telefonica" : "[.]telefonica",
".temasek" : "[.]temasek",
".tennis" : "[.]tennis",
".teva" : "[.]teva",
".tf" : "[.]tf",
".tg" : "[.]tg",
".th" : "[.]th",
".thd" : "[.]thd",
".theater" : "[.]theater",
".theatre" : "[.]theatre",
".tiaa" : "[.]tiaa",
".tickets" : "[.]tickets",
".tienda" : "[.]tienda",
".tiffany" : "[.]tiffany",
".tips" : "[.]tips",
".tires" : "[.]tires",
".tirol" : "[.]tirol",
".tj" : "[.]tj",
".tjmaxx" : "[.]tjmaxx",
".tjx" : "[.]tjx",
".tk" : "[.]tk",
".tkmaxx" : "[.]tkmaxx",
".tl" : "[.]tl",
".tm" : "[.]tm",
".tmall" : "[.]tmall",
".tn" : "[.]tn",
".to" : "[.]to",
".today" : "[.]today",
".tokyo" : "[.]tokyo",
".tools" : "[.]tools",
".top" : "[.]top",
".toray" : "[.]toray",
".toshiba" : "[.]toshiba",
".total" : "[.]total",
".tours" : "[.]tours",
".town" : "[.]town",
".toyota" : "[.]toyota",
".toys" : "[.]toys",
".tp" : "[.]tp",
".tr" : "[.]tr",
".trade" : "[.]trade",
".trading" : "[.]trading",
".training" : "[.]training",
".travel" : "[.]travel",
".travelchannel" : "[.]travelchannel",
".travelers" : "[.]travelers",
".travelersinsurance" : "[.]travelersinsurance",
".trust" : "[.]trust",
".trv" : "[.]trv",
".tt" : "[.]tt",
".tube" : "[.]tube",
".tui" : "[.]tui",
".tunes" : "[.]tunes",
".tushu" : "[.]tushu",
".tv" : "[.]tv",
".tvs" : "[.]tvs",
".tw" : "[.]tw",
".tz" : "[.]tz",
".ua" : "[.]ua",
".ubs" : "[.]ubs",
".ug" : "[.]ug",
".uk" : "[.]uk",
".um" : "[.]um",
".unicom" : "[.]unicom",
".university" : "[.]university",
".uno" : "[.]uno",
".uol" : "[.]uol",
".ups" : "[.]ups",
".us" : "[.]us",
".uy" : "[.]uy",
".uz" : "[.]uz",
".va" : "[.]va",
".vacations" : "[.]vacations",
".vana" : "[.]vana",
".vc" : "[.]vc",
".ve" : "[.]ve",
".vegas" : "[.]vegas",
".ventures" : "[.]ventures",
".verisign" : "[.]verisign",
".versicherung" : "[.]versicherung",
".vet" : "[.]vet",
".vg" : "[.]vg",
".vi" : "[.]vi",
".viajes" : "[.]viajes",
".video" : "[.]video",
".vig" : "[.]vig",
".viking" : "[.]viking",
".villas" : "[.]villas",
".vin" : "[.]vin",
".vip" : "[.]vip",
".virgin" : "[.]virgin",
".visa" : "[.]visa",
".vision" : "[.]vision",
".vista" : "[.]vista",
".vistaprint" : "[.]vistaprint",
".viva" : "[.]viva",
".vivo" : "[.]vivo",
".vlaanderen" : "[.]vlaanderen",
".vn" : "[.]vn",
".vodka" : "[.]vodka",
".volkswagen" : "[.]volkswagen",
".vote" : "[.]vote",
".voting" : "[.]voting",
".voto" : "[.]voto",
".voyage" : "[.]voyage",
".vu" : "[.]vu",
".vuelos" : "[.]vuelos",
".wales" : "[.]wales",
".walter" : "[.]walter",
".wang" : "[.]wang",
".wanggou" : "[.]wanggou",
".warman" : "[.]warman",
".watch" : "[.]watch",
".watches" : "[.]watches",
".weather" : "[.]weather",
".weatherchannel" : "[.]weatherchannel",
".webcam" : "[.]webcam",
".weber" : "[.]weber",
".website" : "[.]website",
".wed" : "[.]wed",
".wedding" : "[.]wedding",
".weibo" : "[.]weibo",
".weir" : "[.]weir",
".wf" : "[.]wf",
".whoswho" : "[.]whoswho",
".wien" : "[.]wien",
".wiki" : "[.]wiki",
".williamhill" : "[.]williamhill",
".win" : "[.]win",
".windows" : "[.]windows",
".wine" : "[.]wine",
".winners" : "[.]winners",
".wme" : "[.]wme",
".wolterskluwer" : "[.]wolterskluwer",
".woodside" : "[.]woodside",
".work" : "[.]work",
".works" : "[.]works",
".world" : "[.]world",
".ws" : "[.]ws",
".wtc" : "[.]wtc",
".wtf" : "[.]wtf",
".xbox" : "[.]xbox",
".xerox" : "[.]xerox",
".xfinity" : "[.]xfinity",
".xihuan" : "[.]xihuan",
".xin" : "[.]xin",
".xperia" : "[.]xperia",
".xxx" : "[.]xxx",
".xyz" : "[.]xyz",
".yachts" : "[.]yachts",
".yahoo" : "[.]yahoo",
".yamaxun" : "[.]yamaxun",
".yandex" : "[.]yandex",
".ye" : "[.]ye",
".yodobashi" : "[.]yodobashi",
".yoga" : "[.]yoga",
".yokohama" : "[.]yokohama",
".you" : "[.]you",
".youtube" : "[.]youtube",
".yt" : "[.]yt",
".yun" : "[.]yun",
".za" : "[.]za",
".zappos" : "[.]zappos",
".zara" : "[.]zara",
".zero" : "[.]zero",
".zip" : "[.]zip",
".zippo" : "[.]zippo",
".zm" : "[.]zm",
".zone" : "[.]zone",
".zuerich" : "[.]zuerich",
".zw" : "[.]zw" }
|
|
from django.db.models import Exists
from django.db.models import OuterRef
from django.db.models import Q
from django.db import transaction
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.permissions import IsAuthenticated
from rest_framework.serializers import PrimaryKeyRelatedField
from rest_framework.serializers import ValidationError
from contentcuration.models import AssessmentItem
from contentcuration.models import Channel
from contentcuration.models import ContentNode
from contentcuration.models import File
from contentcuration.models import generate_storage_url
from contentcuration.models import User
from contentcuration.utils.files import duplicate_file
from contentcuration.viewsets.base import BulkListSerializer
from contentcuration.viewsets.base import BulkModelSerializer
from contentcuration.viewsets.base import ValuesViewset
from contentcuration.viewsets.base import BulkCreateMixin
from contentcuration.viewsets.base import BulkUpdateMixin
from contentcuration.viewsets.base import CopyMixin
from contentcuration.viewsets.base import RequiredFilterSet
from contentcuration.viewsets.common import UUIDInFilter
from contentcuration.viewsets.sync.constants import CREATED
from contentcuration.viewsets.sync.constants import DELETED
from contentcuration.viewsets.sync.constants import FILE
class FileFilter(RequiredFilterSet):
id__in = UUIDInFilter(name="id")
contentnode__in = UUIDInFilter(name="contentnode")
assessment_item__in = UUIDInFilter(name="assessment_item")
class Meta:
model = File
fields = (
"id__in",
"contentnode__in",
"assessment_item__in",
"id",
"contentnode",
"assessment_item",
)
class FileSerializer(BulkModelSerializer):
contentnode = PrimaryKeyRelatedField(queryset=ContentNode.objects.all())
uploaded_by = PrimaryKeyRelatedField(queryset=User.objects.all())
class Meta:
model = File
fields = (
"id",
"checksum",
"file_size",
"language",
"contentnode",
"assessment_item",
"file_format",
"preset",
"original_filename",
"uploaded_by",
)
list_serializer_class = BulkListSerializer
def retrieve_storage_url(item):
""" Get the file_on_disk url """
return generate_storage_url("{}.{}".format(item["checksum"], item["file_format"]))
channel_trees = (
"main_tree",
"chef_tree",
"trash_tree",
"staging_tree",
"previous_tree",
)
edit_filter = Q()
for tree_name in channel_trees:
edit_filter |= Q(
**{
"editable_channels__{}__tree_id".format(tree_name): OuterRef(
"contentnode__tree_id"
)
}
)
edit_filter |= Q(
**{
"editable_channels__{}__tree_id".format(tree_name): OuterRef(
"assessment_item__contentnode__tree_id"
)
}
)
view_filter = Q()
for tree_name in channel_trees:
view_filter |= Q(
**{
"view_only_channels__{}__tree_id".format(tree_name): OuterRef(
"contentnode__tree_id"
)
}
)
view_filter |= Q(
**{
"view_only_channels__{}__tree_id".format(tree_name): OuterRef(
"assessment_item__contentnode__tree_id"
)
}
)
# Apply mixin first to override ValuesViewset
class FileViewSet(BulkCreateMixin, BulkUpdateMixin, CopyMixin, ValuesViewset):
queryset = File.objects.all()
serializer_class = FileSerializer
permission_classes = [IsAuthenticated]
filter_backends = (DjangoFilterBackend,)
filter_class = FileFilter
values = (
"id",
"checksum",
"file_size",
"language",
"file_format",
"contentnode_id",
"assessment_item_id",
"file_on_disk",
"preset_id",
"language_id",
"original_filename",
"uploaded_by",
)
field_map = {
"url": retrieve_storage_url,
"preset": "preset_id",
"language": "language_id",
"contentnode": "contentnode_id",
"assessment_item": "assessment_item_id",
}
def get_queryset(self):
user_id = not self.request.user.is_anonymous() and self.request.user.id
user_queryset = User.objects.filter(id=user_id)
queryset = File.objects.annotate(
edit=Exists(user_queryset.filter(edit_filter)),
view=Exists(user_queryset.filter(view_filter)),
public=Exists(
Channel.objects.filter(
public=True, main_tree__tree_id=OuterRef("contentnode__tree_id")
)
),
)
queryset = queryset.filter(Q(view=True) | Q(edit=True) | Q(public=True))
return queryset
def get_edit_queryset(self):
user_id = not self.request.user.is_anonymous() and self.request.user.id
user_queryset = User.objects.filter(id=user_id)
queryset = File.objects.annotate(
edit=Exists(user_queryset.filter(edit_filter)),
)
queryset = queryset.filter(edit=True)
return queryset
def copy(self, pk, from_key=None, **mods):
delete_response = [dict(key=pk, table=FILE, type=DELETED,)]
try:
file = File.objects.get(pk=from_key)
except File.DoesNotExist:
error = ValidationError("Copy file source does not exist")
return str(error), delete_response
if File.objects.filter(pk=pk).exists():
error = ValidationError("Copy pk already exists")
# if the PK already exists, this is not a scenario we can negotiate easily
# between client and server
return str(error), None
contentnode_id = mods.get("contentnode", None)
assessment_id = mods.get("assessment_item", None)
preset_id = mods.get("preset", None)
try:
contentnode = None
if contentnode_id is not None:
contentnode = ContentNode.objects.get(pk=contentnode_id)
except ContentNode.DoesNotExist as e:
return str(ValidationError(e)), delete_response
try:
assessment_item = None
if assessment_id is not None:
assessment_item = AssessmentItem.objects.get(
assessment_id=assessment_id
)
except AssessmentItem.DoesNotExist as e:
return str(ValidationError(e)), delete_response
with transaction.atomic():
file_copy = duplicate_file(
file,
save=False,
node=contentnode,
assessment_item=assessment_item,
preset_id=preset_id,
)
file_copy.pk = pk
file_copy.save()
return (
None,
dict(
key=pk,
table=FILE,
type=CREATED,
obj=FileSerializer(instance=file_copy).data,
),
)
|
|
#! /usr/bin/env python
import unittest
import numpy as np
from pymt.grids import UniformRectilinear, UniformRectilinearPoints
class TestRasterGrid(unittest.TestCase):
def assert_point_count(self, grid, point_count):
self.assertEqual(point_count, grid.get_point_count())
def assert_cell_count(self, grid, cell_count):
self.assertEqual(cell_count, grid.get_cell_count())
def assert_shape(self, grid, shape):
self.assertListEqual(list(grid.get_shape()), list(shape))
def assert_spacing(self, grid, spacing):
self.assertListEqual(list(grid.get_spacing()), list(spacing))
def assert_origin(self, grid, origin):
self.assertListEqual(list(grid.get_origin()), list(origin))
def assert_x(self, grid, x):
self.assertListEqual(list(x), list(grid.get_x()))
def assert_y(self, grid, y):
self.assertListEqual(list(y), list(grid.get_y()))
def assert_z(self, grid, z):
self.assertListEqual(list(z), list(grid.get_z()))
def assert_offset(self, grid, offset):
self.assertListEqual(list(offset), list(grid.get_offset()))
def assert_connectivity(self, grid, connectivity):
self.assertListEqual(list(connectivity), list(grid.get_connectivity()))
@unittest.skip("xy indexing is deprecated")
def test_xy_indexing(self):
grid = UniformRectilinear((2, 3), (1, 2), (0.5, 0))
self.assert_point_count(grid, 6)
self.assert_cell_count(grid, 2)
self.assert_shape(grid, (3, 2))
self.assert_spacing(grid, (2.0, 1.0))
self.assert_origin(grid, (0.0, 0.5))
self.assert_x(grid, [0.5, 1.5, 0.5, 1.5, 0.5, 1.5])
self.assert_y(grid, [0.0, 0.0, 2.0, 2.0, 4.0, 4.0])
def test_ij_indexing(self):
grid = UniformRectilinear(
(2, 3), (1, 2), (0.5, 0), indexing="ij", units=("m", "km")
)
self.assert_point_count(grid, 6)
self.assert_cell_count(grid, 2)
self.assert_shape(grid, (2, 3))
self.assert_spacing(grid, (1.0, 2.0))
self.assert_origin(grid, (0.5, 0.0))
self.assert_x(grid, [0.0, 2.0, 4.0, 0.0, 2.0, 4.0])
self.assert_y(grid, [0.5, 0.5, 0.5, 1.5, 1.5, 1.5])
def test_grid_of_points(self):
grid = UniformRectilinearPoints(
(2, 3), (1, 2), (0.5, 0), indexing="ij", set_connectivity=True
)
self.assert_point_count(grid, 6)
self.assert_cell_count(grid, 0)
self.assert_shape(grid, (2, 3))
self.assert_spacing(grid, (1.0, 2.0))
self.assert_origin(grid, (0.5, 0.0))
self.assert_x(grid, [0.0, 2.0, 4.0, 0.0, 2.0, 4.0])
self.assert_y(grid, [0.5, 0.5, 0.5, 1.5, 1.5, 1.5])
self.assert_connectivity(grid, np.arange(grid.get_point_count()))
self.assert_offset(grid, np.arange(1, grid.get_point_count() + 1))
def test_1d_grid_of_points(self):
grid = UniformRectilinearPoints(
(5,), (1.0,), (0.5,), indexing="ij", set_connectivity=True
)
self.assert_point_count(grid, 5)
self.assert_cell_count(grid, 0)
self.assert_shape(grid, (5,))
self.assert_spacing(grid, (1.0,))
self.assert_origin(grid, (0.5,))
self.assert_x(grid, [0.5, 1.5, 2.5, 3.5, 4.5])
self.assert_connectivity(grid, np.arange(grid.get_point_count()))
self.assert_offset(grid, np.arange(1, grid.get_point_count() + 1))
@unittest.skip("xy indexing is deprecated")
def test_3d_grid_xy_indexing(self):
grid = UniformRectilinear((4, 3, 2), (1, 2, 3), (-1, 0, 1), indexing="xy")
self.assert_point_count(grid, 24)
self.assert_cell_count(grid, 6)
self.assert_shape(grid, (2, 3, 4))
self.assert_spacing(grid, (3.0, 2.0, 1.0))
self.assert_origin(grid, (1.0, 0.0, -1.0))
self.assert_x(
grid,
[
-1.0,
0.0,
1.0,
2.0,
-1.0,
0.0,
1.0,
2.0,
-1.0,
0.0,
1.0,
2.0,
-1.0,
0.0,
1.0,
2.0,
-1.0,
0.0,
1.0,
2.0,
-1.0,
0.0,
1.0,
2.0,
],
)
self.assert_y(
grid,
[
0.0,
0.0,
0.0,
0.0,
2.0,
2.0,
2.0,
2.0,
4.0,
4.0,
4.0,
4.0,
0.0,
0.0,
0.0,
0.0,
2.0,
2.0,
2.0,
2.0,
4.0,
4.0,
4.0,
4.0,
],
)
self.assert_z(
grid,
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
4.0,
],
)
self.assert_offset(grid, 8.0 * np.arange(1, grid.get_cell_count() + 1))
def test_3d_grid_ij_indexing(self):
grid = UniformRectilinear((4, 3, 2), (1, 2, 3), (-1, 0, 1), indexing="ij")
self.assert_point_count(grid, 24)
self.assert_cell_count(grid, 6)
self.assert_shape(grid, (4, 3, 2))
self.assert_spacing(grid, (1.0, 2.0, 3.0))
self.assert_origin(grid, (-1.0, 0.0, 1.0))
self.assert_x(
grid,
[
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
1.0,
4.0,
],
)
self.assert_y(
grid,
[
0.0,
0.0,
2.0,
2.0,
4.0,
4.0,
0.0,
0.0,
2.0,
2.0,
4.0,
4.0,
0.0,
0.0,
2.0,
2.0,
4.0,
4.0,
0.0,
0.0,
2.0,
2.0,
4.0,
4.0,
],
)
self.assert_z(
grid,
[
-1.0,
-1.0,
-1.0,
-1.0,
-1.0,
-1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
2.0,
2.0,
2.0,
2.0,
2.0,
2.0,
],
)
self.assert_offset(grid, 8.0 * np.arange(1, grid.get_cell_count() + 1))
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(TestRasterGrid)
return suite
if __name__ == "__main__":
unittest.main()
|
|
#!/usr/bin/env python
"""Upload and download support for apitools."""
from __future__ import print_function
import email.generator as email_generator
import email.mime.multipart as mime_multipart
import email.mime.nonmultipart as mime_nonmultipart
import io
import json
import mimetypes
import os
import threading
import six
from six.moves import http_client
from apitools.base.py import buffered_stream
from apitools.base.py import exceptions
from apitools.base.py import http_wrapper
from apitools.base.py import stream_slice
from apitools.base.py import util
__all__ = [
'Download',
'Upload',
'RESUMABLE_UPLOAD',
'SIMPLE_UPLOAD',
'DownloadProgressPrinter',
'DownloadCompletePrinter',
'UploadProgressPrinter',
'UploadCompletePrinter',
]
_RESUMABLE_UPLOAD_THRESHOLD = 5 << 20
SIMPLE_UPLOAD = 'simple'
RESUMABLE_UPLOAD = 'resumable'
def DownloadProgressPrinter(response, unused_download):
"""Print download progress based on response."""
if 'content-range' in response.info:
print('Received %s' % response.info['content-range'])
else:
print('Received %d bytes' % response.length)
def DownloadCompletePrinter(unused_response, unused_download):
"""Print information about a completed download."""
print('Download complete')
def UploadProgressPrinter(response, unused_upload):
"""Print upload progress based on response."""
print('Sent %s' % response.info['range'])
def UploadCompletePrinter(unused_response, unused_upload):
"""Print information about a completed upload."""
print('Upload complete')
class _Transfer(object):
"""Generic bits common to Uploads and Downloads."""
def __init__(self, stream, close_stream=False, chunksize=None,
auto_transfer=True, http=None, num_retries=5):
self.__bytes_http = None
self.__close_stream = close_stream
self.__http = http
self.__stream = stream
self.__url = None
self.__num_retries = 5
# Let the @property do validation
self.num_retries = num_retries
self.retry_func = (
http_wrapper.HandleExceptionsAndRebuildHttpConnections)
self.auto_transfer = auto_transfer
self.chunksize = chunksize or 1048576
def __repr__(self):
return str(self)
@property
def close_stream(self):
return self.__close_stream
@property
def http(self):
return self.__http
@property
def bytes_http(self):
return self.__bytes_http or self.http
@bytes_http.setter
def bytes_http(self, value):
self.__bytes_http = value
@property
def num_retries(self):
return self.__num_retries
@num_retries.setter
def num_retries(self, value):
util.Typecheck(value, six.integer_types)
if value < 0:
raise exceptions.InvalidDataError(
'Cannot have negative value for num_retries')
self.__num_retries = value
@property
def stream(self):
return self.__stream
@property
def url(self):
return self.__url
def _Initialize(self, http, url):
"""Initialize this download by setting self.http and self.url.
We want the user to be able to override self.http by having set
the value in the constructor; in that case, we ignore the provided
http.
Args:
http: An httplib2.Http instance or None.
url: The url for this transfer.
Returns:
None. Initializes self.
"""
self.EnsureUninitialized()
if self.http is None:
self.__http = http or http_wrapper.GetHttp()
self.__url = url
@property
def initialized(self):
return self.url is not None and self.http is not None
@property
def _type_name(self):
return type(self).__name__
def EnsureInitialized(self):
if not self.initialized:
raise exceptions.TransferInvalidError(
'Cannot use uninitialized %s', self._type_name)
def EnsureUninitialized(self):
if self.initialized:
raise exceptions.TransferInvalidError(
'Cannot re-initialize %s', self._type_name)
def __del__(self):
if self.__close_stream:
self.__stream.close()
def _ExecuteCallback(self, callback, response):
# TODO(craigcitro): Push these into a queue.
if callback is not None:
threading.Thread(target=callback, args=(response, self)).start()
class Download(_Transfer):
"""Data for a single download.
Public attributes:
chunksize: default chunksize to use for transfers.
"""
_ACCEPTABLE_STATUSES = set((
http_client.OK,
http_client.NO_CONTENT,
http_client.PARTIAL_CONTENT,
http_client.REQUESTED_RANGE_NOT_SATISFIABLE,
))
_REQUIRED_SERIALIZATION_KEYS = set((
'auto_transfer', 'progress', 'total_size', 'url'))
def __init__(self, stream, progress_callback=None, finish_callback=None,
**kwds):
total_size = kwds.pop('total_size', None)
super(Download, self).__init__(stream, **kwds)
self.__initial_response = None
self.__progress = 0
self.__total_size = total_size
self.__encoding = None
self.progress_callback = progress_callback
self.finish_callback = finish_callback
@property
def progress(self):
return self.__progress
@property
def encoding(self):
return self.__encoding
@classmethod
def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds):
"""Create a new download object from a filename."""
path = os.path.expanduser(filename)
if os.path.exists(path) and not overwrite:
raise exceptions.InvalidUserInputError(
'File %s exists and overwrite not specified' % path)
return cls(open(path, 'wb'), close_stream=True,
auto_transfer=auto_transfer, **kwds)
@classmethod
def FromStream(cls, stream, auto_transfer=True, total_size=None, **kwds):
"""Create a new Download object from a stream."""
return cls(stream, auto_transfer=auto_transfer, total_size=total_size,
**kwds)
@classmethod
def FromData(cls, stream, json_data, http=None, auto_transfer=None,
**kwds):
"""Create a new Download object from a stream and serialized data."""
info = json.loads(json_data)
missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
if missing_keys:
raise exceptions.InvalidDataError(
'Invalid serialization data, missing keys: %s' % (
', '.join(missing_keys)))
download = cls.FromStream(stream, **kwds)
if auto_transfer is not None:
download.auto_transfer = auto_transfer
else:
download.auto_transfer = info['auto_transfer']
setattr(download, '_Download__progress', info['progress'])
setattr(download, '_Download__total_size', info['total_size'])
download._Initialize( # pylint: disable=protected-access
http, info['url'])
return download
@property
def serialization_data(self):
self.EnsureInitialized()
return {
'auto_transfer': self.auto_transfer,
'progress': self.progress,
'total_size': self.total_size,
'url': self.url,
}
@property
def total_size(self):
return self.__total_size
def __str__(self):
if not self.initialized:
return 'Download (uninitialized)'
else:
return 'Download with %d/%s bytes transferred from url %s' % (
self.progress, self.total_size, self.url)
def ConfigureRequest(self, http_request, url_builder):
url_builder.query_params['alt'] = 'media'
# TODO(craigcitro): We need to send range requests because by
# default httplib2 stores entire reponses in memory. Override
# httplib2's download method (as gsutil does) so that this is not
# necessary.
http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,)
def __SetTotal(self, info):
if 'content-range' in info:
_, _, total = info['content-range'].rpartition('/')
if total != '*':
self.__total_size = int(total)
# Note "total_size is None" means we don't know it; if no size
# info was returned on our initial range request, that means we
# have a 0-byte file. (That last statement has been verified
# empirically, but is not clearly documented anywhere.)
if self.total_size is None:
self.__total_size = 0
def InitializeDownload(self, http_request, http=None, client=None):
"""Initialize this download by making a request.
Args:
http_request: The HttpRequest to use to initialize this download.
http: The httplib2.Http instance for this request.
client: If provided, let this client process the final URL before
sending any additional requests. If client is provided and
http is not, client.http will be used instead.
"""
self.EnsureUninitialized()
if http is None and client is None:
raise exceptions.UserError('Must provide client or http.')
http = http or client.http
if client is not None:
http_request.url = client.FinalizeTransferUrl(http_request.url)
url = http_request.url
if self.auto_transfer:
end_byte = self.__ComputeEndByte(0)
self.__SetRangeHeader(http_request, 0, end_byte)
response = http_wrapper.MakeRequest(
self.bytes_http or http, http_request)
if response.status_code not in self._ACCEPTABLE_STATUSES:
raise exceptions.HttpError.FromResponse(response)
self.__initial_response = response
self.__SetTotal(response.info)
url = response.info.get('content-location', response.request_url)
if client is not None:
url = client.FinalizeTransferUrl(url)
self._Initialize(http, url)
# Unless the user has requested otherwise, we want to just
# go ahead and pump the bytes now.
if self.auto_transfer:
self.StreamInChunks()
def __NormalizeStartEnd(self, start, end=None):
if end is not None:
if start < 0:
raise exceptions.TransferInvalidError(
'Cannot have end index with negative start index')
elif start >= self.total_size:
raise exceptions.TransferInvalidError(
'Cannot have start index greater than total size')
end = min(end, self.total_size - 1)
if end < start:
raise exceptions.TransferInvalidError(
'Range requested with end[%s] < start[%s]' % (end, start))
return start, end
else:
if start < 0:
start = max(0, start + self.total_size)
return start, self.total_size - 1
def __SetRangeHeader(self, request, start, end=None):
if start < 0:
request.headers['range'] = 'bytes=%d' % start
elif end is None:
request.headers['range'] = 'bytes=%d-' % start
else:
request.headers['range'] = 'bytes=%d-%d' % (start, end)
def __ComputeEndByte(self, start, end=None, use_chunks=True):
"""Compute the last byte to fetch for this request.
This is all based on the HTTP spec for Range and
Content-Range.
Note that this is potentially confusing in several ways:
* the value for the last byte is 0-based, eg "fetch 10 bytes
from the beginning" would return 9 here.
* if we have no information about size, and don't want to
use the chunksize, we'll return None.
See the tests for more examples.
Args:
start: byte to start at.
end: (int or None, default: None) Suggested last byte.
use_chunks: (bool, default: True) If False, ignore self.chunksize.
Returns:
Last byte to use in a Range header, or None.
"""
end_byte = end
if start < 0 and not self.total_size:
return end_byte
if use_chunks:
alternate = start + self.chunksize - 1
if end_byte is not None:
end_byte = min(end_byte, alternate)
else:
end_byte = alternate
if self.total_size:
alternate = self.total_size - 1
if end_byte is not None:
end_byte = min(end_byte, alternate)
else:
end_byte = alternate
return end_byte
def __GetChunk(self, start, end, additional_headers=None):
"""Retrieve a chunk, and return the full response."""
self.EnsureInitialized()
request = http_wrapper.Request(url=self.url)
self.__SetRangeHeader(request, start, end=end)
if additional_headers is not None:
request.headers.update(additional_headers)
return http_wrapper.MakeRequest(
self.bytes_http, request, retry_func=self.retry_func,
retries=self.num_retries)
def __ProcessResponse(self, response):
"""Process response (by updating self and writing to self.stream)."""
if response.status_code not in self._ACCEPTABLE_STATUSES:
# We distinguish errors that mean we made a mistake in setting
# up the transfer versus something we should attempt again.
if response.status_code in (http_client.FORBIDDEN,
http_client.NOT_FOUND):
raise exceptions.HttpError.FromResponse(response)
else:
raise exceptions.TransferRetryError(response.content)
if response.status_code in (http_client.OK,
http_client.PARTIAL_CONTENT):
self.stream.write(response.content)
self.__progress += response.length
if response.info and 'content-encoding' in response.info:
# TODO(craigcitro): Handle the case where this changes over a
# download.
self.__encoding = response.info['content-encoding']
elif response.status_code == http_client.NO_CONTENT:
# It's important to write something to the stream for the case
# of a 0-byte download to a file, as otherwise python won't
# create the file.
self.stream.write('')
return response
def GetRange(self, start, end=None, additional_headers=None,
use_chunks=True):
"""Retrieve a given byte range from this download, inclusive.
Range must be of one of these three forms:
* 0 <= start, end = None: Fetch from start to the end of the file.
* 0 <= start <= end: Fetch the bytes from start to end.
* start < 0, end = None: Fetch the last -start bytes of the file.
(These variations correspond to those described in the HTTP 1.1
protocol for range headers in RFC 2616, sec. 14.35.1.)
Args:
start: (int) Where to start fetching bytes. (See above.)
end: (int, optional) Where to stop fetching bytes. (See above.)
additional_headers: (bool, optional) Any additional headers to
pass with the request.
use_chunks: (bool, default: True) If False, ignore self.chunksize
and fetch this range in a single request.
Returns:
None. Streams bytes into self.stream.
"""
self.EnsureInitialized()
progress_end_normalized = False
if self.total_size is not None:
progress, end_byte = self.__NormalizeStartEnd(start, end)
progress_end_normalized = True
else:
progress = start
end_byte = end
while (not progress_end_normalized or end_byte is None or
progress <= end_byte):
end_byte = self.__ComputeEndByte(progress, end=end_byte,
use_chunks=use_chunks)
response = self.__GetChunk(progress, end_byte,
additional_headers=additional_headers)
if not progress_end_normalized:
self.__SetTotal(response.info)
progress, end_byte = self.__NormalizeStartEnd(start, end)
progress_end_normalized = True
response = self.__ProcessResponse(response)
progress += response.length
if response.length == 0:
raise exceptions.TransferRetryError(
'Zero bytes unexpectedly returned in download response')
def StreamInChunks(self, callback=None, finish_callback=None,
additional_headers=None):
"""Stream the entire download in chunks."""
self.StreamMedia(callback=callback, finish_callback=finish_callback,
additional_headers=additional_headers,
use_chunks=True)
def StreamMedia(self, callback=None, finish_callback=None,
additional_headers=None, use_chunks=True):
"""Stream the entire download.
Args:
callback: (default: None) Callback to call as each chunk is
completed.
finish_callback: (default: None) Callback to call when the
download is complete.
additional_headers: (default: None) Additional headers to
include in fetching bytes.
use_chunks: (bool, default: True) If False, ignore self.chunksize
and stream this download in a single request.
Returns:
None. Streams bytes into self.stream.
"""
callback = callback or self.progress_callback
finish_callback = finish_callback or self.finish_callback
self.EnsureInitialized()
while True:
if self.__initial_response is not None:
response = self.__initial_response
self.__initial_response = None
else:
end_byte = self.__ComputeEndByte(self.progress,
use_chunks=use_chunks)
response = self.__GetChunk(
self.progress, end_byte,
additional_headers=additional_headers)
if self.total_size is None:
self.__SetTotal(response.info)
response = self.__ProcessResponse(response)
self._ExecuteCallback(callback, response)
if (response.status_code == http_client.OK or
self.progress >= self.total_size):
break
self._ExecuteCallback(finish_callback, response)
class Upload(_Transfer):
"""Data for a single Upload.
Fields:
stream: The stream to upload.
mime_type: MIME type of the upload.
total_size: (optional) Total upload size for the stream.
close_stream: (default: False) Whether or not we should close the
stream when finished with the upload.
auto_transfer: (default: True) If True, stream all bytes as soon as
the upload is created.
"""
_REQUIRED_SERIALIZATION_KEYS = set((
'auto_transfer', 'mime_type', 'total_size', 'url'))
def __init__(self, stream, mime_type, total_size=None, http=None,
close_stream=False, chunksize=None, auto_transfer=True,
progress_callback=None, finish_callback=None,
**kwds):
super(Upload, self).__init__(
stream, close_stream=close_stream, chunksize=chunksize,
auto_transfer=auto_transfer, http=http, **kwds)
self.__complete = False
self.__final_response = None
self.__mime_type = mime_type
self.__progress = 0
self.__server_chunk_granularity = None
self.__strategy = None
self.__total_size = None
self.progress_callback = progress_callback
self.finish_callback = finish_callback
self.total_size = total_size
@property
def progress(self):
return self.__progress
@classmethod
def FromFile(cls, filename, mime_type=None, auto_transfer=True, **kwds):
"""Create a new Upload object from a filename."""
path = os.path.expanduser(filename)
if not os.path.exists(path):
raise exceptions.NotFoundError('Could not find file %s' % path)
if not mime_type:
mime_type, _ = mimetypes.guess_type(path)
if mime_type is None:
raise exceptions.InvalidUserInputError(
'Could not guess mime type for %s' % path)
size = os.stat(path).st_size
return cls(open(path, 'rb'), mime_type, total_size=size,
close_stream=True, auto_transfer=auto_transfer, **kwds)
@classmethod
def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True,
**kwds):
"""Create a new Upload object from a stream."""
if mime_type is None:
raise exceptions.InvalidUserInputError(
'No mime_type specified for stream')
return cls(stream, mime_type, total_size=total_size,
close_stream=False, auto_transfer=auto_transfer, **kwds)
@classmethod
def FromData(cls, stream, json_data, http, auto_transfer=None, **kwds):
"""Create a new Upload of stream from serialized json_data and http."""
info = json.loads(json_data)
missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
if missing_keys:
raise exceptions.InvalidDataError(
'Invalid serialization data, missing keys: %s' % (
', '.join(missing_keys)))
if 'total_size' in kwds:
raise exceptions.InvalidUserInputError(
'Cannot override total_size on serialized Upload')
upload = cls.FromStream(stream, info['mime_type'],
total_size=info.get('total_size'), **kwds)
if isinstance(stream, io.IOBase) and not stream.seekable():
raise exceptions.InvalidUserInputError(
'Cannot restart resumable upload on non-seekable stream')
if auto_transfer is not None:
upload.auto_transfer = auto_transfer
else:
upload.auto_transfer = info['auto_transfer']
upload.strategy = RESUMABLE_UPLOAD
upload._Initialize( # pylint: disable=protected-access
http, info['url'])
upload.RefreshResumableUploadState()
upload.EnsureInitialized()
if upload.auto_transfer:
upload.StreamInChunks()
return upload
@property
def serialization_data(self):
self.EnsureInitialized()
if self.strategy != RESUMABLE_UPLOAD:
raise exceptions.InvalidDataError(
'Serialization only supported for resumable uploads')
return {
'auto_transfer': self.auto_transfer,
'mime_type': self.mime_type,
'total_size': self.total_size,
'url': self.url,
}
@property
def complete(self):
return self.__complete
@property
def mime_type(self):
return self.__mime_type
def __str__(self):
if not self.initialized:
return 'Upload (uninitialized)'
else:
return 'Upload with %d/%s bytes transferred for url %s' % (
self.progress, self.total_size or '???', self.url)
@property
def strategy(self):
return self.__strategy
@strategy.setter
def strategy(self, value):
if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD):
raise exceptions.UserError((
'Invalid value "%s" for upload strategy, must be one of '
'"simple" or "resumable".') % value)
self.__strategy = value
@property
def total_size(self):
return self.__total_size
@total_size.setter
def total_size(self, value):
self.EnsureUninitialized()
self.__total_size = value
def __SetDefaultUploadStrategy(self, upload_config, http_request):
"""Determine and set the default upload strategy for this upload.
We generally prefer simple or multipart, unless we're forced to
use resumable. This happens when any of (1) the upload is too
large, (2) the simple endpoint doesn't support multipart requests
and we have metadata, or (3) there is no simple upload endpoint.
Args:
upload_config: Configuration for the upload endpoint.
http_request: The associated http request.
Returns:
None.
"""
if upload_config.resumable_path is None:
self.strategy = SIMPLE_UPLOAD
if self.strategy is not None:
return
strategy = SIMPLE_UPLOAD
if (self.total_size is not None and
self.total_size > _RESUMABLE_UPLOAD_THRESHOLD):
strategy = RESUMABLE_UPLOAD
if http_request.body and not upload_config.simple_multipart:
strategy = RESUMABLE_UPLOAD
if not upload_config.simple_path:
strategy = RESUMABLE_UPLOAD
self.strategy = strategy
def ConfigureRequest(self, upload_config, http_request, url_builder):
"""Configure the request and url for this upload."""
# Validate total_size vs. max_size
if (self.total_size and upload_config.max_size and
self.total_size > upload_config.max_size):
raise exceptions.InvalidUserInputError(
'Upload too big: %s larger than max size %s' % (
self.total_size, upload_config.max_size))
# Validate mime type
if not util.AcceptableMimeType(upload_config.accept, self.mime_type):
raise exceptions.InvalidUserInputError(
'MIME type %s does not match any accepted MIME ranges %s' % (
self.mime_type, upload_config.accept))
self.__SetDefaultUploadStrategy(upload_config, http_request)
if self.strategy == SIMPLE_UPLOAD:
url_builder.relative_path = upload_config.simple_path
if http_request.body:
url_builder.query_params['uploadType'] = 'multipart'
self.__ConfigureMultipartRequest(http_request)
else:
url_builder.query_params['uploadType'] = 'media'
self.__ConfigureMediaRequest(http_request)
else:
url_builder.relative_path = upload_config.resumable_path
url_builder.query_params['uploadType'] = 'resumable'
self.__ConfigureResumableRequest(http_request)
def __ConfigureMediaRequest(self, http_request):
"""Configure http_request as a simple request for this upload."""
http_request.headers['content-type'] = self.mime_type
http_request.body = self.stream.read()
http_request.loggable_body = '<media body>'
def __ConfigureMultipartRequest(self, http_request):
"""Configure http_request as a multipart request for this upload."""
# This is a multipart/related upload.
msg_root = mime_multipart.MIMEMultipart('related')
# msg_root should not write out its own headers
setattr(msg_root, '_write_headers', lambda self: None)
# attach the body as one part
msg = mime_nonmultipart.MIMENonMultipart(
*http_request.headers['content-type'].split('/'))
msg.set_payload(http_request.body)
msg_root.attach(msg)
# attach the media as the second part
msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
msg.set_payload(self.stream.read())
msg_root.attach(msg)
# NOTE: We encode the body, but can't use
# `email.message.Message.as_string` because it prepends
# `> ` to `From ` lines.
# NOTE: We must use six.StringIO() instead of io.StringIO() since the
# `email` library uses cStringIO in Py2 and io.StringIO in Py3.
fp = six.StringIO()
g = email_generator.Generator(fp, mangle_from_=False)
g.flatten(msg_root, unixfrom=False)
http_request.body = fp.getvalue()
multipart_boundary = msg_root.get_boundary()
http_request.headers['content-type'] = (
'multipart/related; boundary=%r' % multipart_boundary)
body_components = http_request.body.split(multipart_boundary)
headers, _, _ = body_components[-2].partition('\n\n')
body_components[-2] = '\n\n'.join([headers, '<media body>\n\n--'])
http_request.loggable_body = multipart_boundary.join(body_components)
def __ConfigureResumableRequest(self, http_request):
http_request.headers['X-Upload-Content-Type'] = self.mime_type
if self.total_size is not None:
http_request.headers[
'X-Upload-Content-Length'] = str(self.total_size)
def RefreshResumableUploadState(self):
"""Talk to the server and refresh the state of this resumable upload.
Returns:
Response if the upload is complete.
"""
if self.strategy != RESUMABLE_UPLOAD:
return
self.EnsureInitialized()
refresh_request = http_wrapper.Request(
url=self.url, http_method='PUT',
headers={'Content-Range': 'bytes */*'})
refresh_response = http_wrapper.MakeRequest(
self.http, refresh_request, redirections=0,
retries=self.num_retries)
range_header = self._GetRangeHeaderFromResponse(refresh_response)
if refresh_response.status_code in (http_client.OK,
http_client.CREATED):
self.__complete = True
self.__progress = self.total_size
self.stream.seek(self.progress)
# If we're finished, the refresh response will contain the metadata
# originally requested. Cache it so it can be returned in
# StreamInChunks.
self.__final_response = refresh_response
elif refresh_response.status_code == http_wrapper.RESUME_INCOMPLETE:
if range_header is None:
self.__progress = 0
else:
self.__progress = self.__GetLastByte(range_header) + 1
self.stream.seek(self.progress)
else:
raise exceptions.HttpError.FromResponse(refresh_response)
def _GetRangeHeaderFromResponse(self, response):
return response.info.get('Range', response.info.get('range'))
def InitializeUpload(self, http_request, http=None, client=None):
"""Initialize this upload from the given http_request."""
if self.strategy is None:
raise exceptions.UserError(
'No upload strategy set; did you call ConfigureRequest?')
if http is None and client is None:
raise exceptions.UserError('Must provide client or http.')
if self.strategy != RESUMABLE_UPLOAD:
return
http = http or client.http
if client is not None:
http_request.url = client.FinalizeTransferUrl(http_request.url)
self.EnsureUninitialized()
http_response = http_wrapper.MakeRequest(http, http_request,
retries=self.num_retries)
if http_response.status_code != http_client.OK:
raise exceptions.HttpError.FromResponse(http_response)
self.__server_chunk_granularity = http_response.info.get(
'X-Goog-Upload-Chunk-Granularity')
url = http_response.info['location']
if client is not None:
url = client.FinalizeTransferUrl(url)
self._Initialize(http, url)
# Unless the user has requested otherwise, we want to just
# go ahead and pump the bytes now.
if self.auto_transfer:
return self.StreamInChunks()
else:
return http_response
def __GetLastByte(self, range_header):
_, _, end = range_header.partition('-')
# TODO(craigcitro): Validate start == 0?
return int(end)
def __ValidateChunksize(self, chunksize=None):
if self.__server_chunk_granularity is None:
return
chunksize = chunksize or self.chunksize
if chunksize % self.__server_chunk_granularity:
raise exceptions.ConfigurationValueError(
'Server requires chunksize to be a multiple of %d',
self.__server_chunk_granularity)
def __StreamMedia(self, callback=None, finish_callback=None,
additional_headers=None, use_chunks=True):
"""Helper function for StreamMedia / StreamInChunks."""
if self.strategy != RESUMABLE_UPLOAD:
raise exceptions.InvalidUserInputError(
'Cannot stream non-resumable upload')
callback = callback or self.progress_callback
finish_callback = finish_callback or self.finish_callback
# final_response is set if we resumed an already-completed upload.
response = self.__final_response
send_func = self.__SendChunk if use_chunks else self.__SendMediaBody
if use_chunks:
self.__ValidateChunksize(self.chunksize)
self.EnsureInitialized()
while not self.complete:
response = send_func(self.stream.tell(),
additional_headers=additional_headers)
if response.status_code in (http_client.OK, http_client.CREATED):
self.__complete = True
break
self.__progress = self.__GetLastByte(response.info['range'])
if self.progress + 1 != self.stream.tell():
# TODO(craigcitro): Add a better way to recover here.
raise exceptions.CommunicationError(
'Failed to transfer all bytes in chunk, upload paused at '
'byte %d' % self.progress)
self._ExecuteCallback(callback, response)
if self.__complete and hasattr(self.stream, 'seek'):
current_pos = self.stream.tell()
self.stream.seek(0, os.SEEK_END)
end_pos = self.stream.tell()
self.stream.seek(current_pos)
if current_pos != end_pos:
raise exceptions.TransferInvalidError(
'Upload complete with %s additional bytes left in stream' %
(int(end_pos) - int(current_pos)))
self._ExecuteCallback(finish_callback, response)
return response
def StreamMedia(self, callback=None, finish_callback=None,
additional_headers=None):
"""Send this resumable upload in a single request.
Args:
callback: Progress callback function with inputs
(http_wrapper.Response, transfer.Upload)
finish_callback: Final callback function with inputs
(http_wrapper.Response, transfer.Upload)
additional_headers: Dict of headers to include with the upload
http_wrapper.Request.
Returns:
http_wrapper.Response of final response.
"""
return self.__StreamMedia(
callback=callback, finish_callback=finish_callback,
additional_headers=additional_headers, use_chunks=False)
def StreamInChunks(self, callback=None, finish_callback=None,
additional_headers=None):
"""Send this (resumable) upload in chunks."""
return self.__StreamMedia(
callback=callback, finish_callback=finish_callback,
additional_headers=additional_headers)
def __SendMediaRequest(self, request, end):
"""Request helper function for SendMediaBody & SendChunk."""
response = http_wrapper.MakeRequest(
self.bytes_http, request, retry_func=self.retry_func,
retries=self.num_retries)
if response.status_code not in (http_client.OK, http_client.CREATED,
http_wrapper.RESUME_INCOMPLETE):
# We want to reset our state to wherever the server left us
# before this failed request, and then raise.
self.RefreshResumableUploadState()
raise exceptions.HttpError.FromResponse(response)
if response.status_code == http_wrapper.RESUME_INCOMPLETE:
last_byte = self.__GetLastByte(
self._GetRangeHeaderFromResponse(response))
if last_byte + 1 != end:
self.stream.seek(last_byte)
return response
def __SendMediaBody(self, start, additional_headers=None):
"""Send the entire media stream in a single request."""
self.EnsureInitialized()
if self.total_size is None:
raise exceptions.TransferInvalidError(
'Total size must be known for SendMediaBody')
body_stream = stream_slice.StreamSlice(
self.stream, self.total_size - start)
request = http_wrapper.Request(url=self.url, http_method='PUT',
body=body_stream)
request.headers['Content-Type'] = self.mime_type
if start == self.total_size:
# End of an upload with 0 bytes left to send; just finalize.
range_string = 'bytes */%s' % self.total_size
else:
range_string = 'bytes %s-%s/%s' % (start, self.total_size - 1,
self.total_size)
request.headers['Content-Range'] = range_string
if additional_headers:
request.headers.update(additional_headers)
return self.__SendMediaRequest(request, self.total_size)
def __SendChunk(self, start, additional_headers=None):
"""Send the specified chunk."""
self.EnsureInitialized()
no_log_body = self.total_size is None
if self.total_size is None:
# For the streaming resumable case, we need to detect when
# we're at the end of the stream.
body_stream = buffered_stream.BufferedStream(
self.stream, start, self.chunksize)
end = body_stream.stream_end_position
if body_stream.stream_exhausted:
self.__total_size = end
# TODO: Here, change body_stream from a stream to a string object,
# which means reading a chunk into memory. This works around
# https://code.google.com/p/httplib2/issues/detail?id=176 which can
# cause httplib2 to skip bytes on 401's for file objects.
# Rework this solution to be more general.
body_stream = body_stream.read(self.chunksize)
else:
end = min(start + self.chunksize, self.total_size)
body_stream = stream_slice.StreamSlice(self.stream, end - start)
# TODO(craigcitro): Think about clearer errors on "no data in
# stream".
request = http_wrapper.Request(url=self.url, http_method='PUT',
body=body_stream)
request.headers['Content-Type'] = self.mime_type
if no_log_body:
# Disable logging of streaming body.
# TODO: Remove no_log_body and rework as part of a larger logs
# refactor.
request.loggable_body = '<media body>'
if self.total_size is None:
# Streaming resumable upload case, unknown total size.
range_string = 'bytes %s-%s/*' % (start, end - 1)
elif end == start:
# End of an upload with 0 bytes left to send; just finalize.
range_string = 'bytes */%s' % self.total_size
else:
# Normal resumable upload case with known sizes.
range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size)
request.headers['Content-Range'] = range_string
if additional_headers:
request.headers.update(additional_headers)
return self.__SendMediaRequest(request, end)
|
|
"""Mean shift clustering algorithm.
Mean shift clustering aims to discover *blobs* in a smooth density of
samples. It is a centroid based algorithm, which works by updating candidates
for centroids to be the mean of the points within a given region. These
candidates are then filtered in a post-processing stage to eliminate
near-duplicates to form the final set of centroids.
Seeding is performed using a binning technique for scalability.
"""
# Authors: Conrad Lee <conradlee@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Martino Sorbaro <martino.sorbaro@ed.ac.uk>
import numpy as np
import warnings
from collections import defaultdict
from ..externals import six
from ..utils.validation import check_is_fitted
from ..utils import extmath, check_random_state, gen_batches, check_array
from ..base import BaseEstimator, ClusterMixin
from ..neighbors import NearestNeighbors
from ..metrics.pairwise import pairwise_distances_argmin
from ..externals.joblib import Parallel
from ..externals.joblib import delayed
def estimate_bandwidth(X, quantile=0.3, n_samples=None, random_state=0,
n_jobs=1):
"""Estimate the bandwidth to use with the mean-shift algorithm.
That this function takes time at least quadratic in n_samples. For large
datasets, it's wise to set that parameter to a small value.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input points.
quantile : float, default 0.3
should be between [0, 1]
0.5 means that the median of all pairwise distances is used.
n_samples : int, optional
The number of samples to use. If not given, all samples are used.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run for neighbors search.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Returns
-------
bandwidth : float
The bandwidth parameter.
"""
random_state = check_random_state(random_state)
if n_samples is not None:
idx = random_state.permutation(X.shape[0])[:n_samples]
X = X[idx]
nbrs = NearestNeighbors(n_neighbors=int(X.shape[0] * quantile),
n_jobs=n_jobs)
nbrs.fit(X)
bandwidth = 0.
for batch in gen_batches(len(X), 500):
d, _ = nbrs.kneighbors(X[batch, :], return_distance=True)
bandwidth += np.max(d, axis=1).sum()
return bandwidth / X.shape[0]
# separate function for each seed's iterative loop
def _mean_shift_single_seed(my_mean, X, nbrs, max_iter):
# For each seed, climb gradient until convergence or max_iter
bandwidth = nbrs.get_params()['radius']
stop_thresh = 1e-3 * bandwidth # when mean has converged
completed_iterations = 0
while True:
# Find mean of points within bandwidth
i_nbrs = nbrs.radius_neighbors([my_mean], bandwidth,
return_distance=False)[0]
points_within = X[i_nbrs]
if len(points_within) == 0:
break # Depending on seeding strategy this condition may occur
my_old_mean = my_mean # save the old mean
my_mean = np.mean(points_within, axis=0)
# If converged or at max_iter, adds the cluster
if (extmath.norm(my_mean - my_old_mean) < stop_thresh or
completed_iterations == max_iter):
return tuple(my_mean), len(points_within)
completed_iterations += 1
def mean_shift(X, bandwidth=None, seeds=None, bin_seeding=False,
min_bin_freq=1, cluster_all=True, max_iter=300,
n_jobs=1):
"""Perform mean shift clustering of data using a flat kernel.
Read more in the :ref:`User Guide <mean_shift>`.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input data.
bandwidth : float, optional
Kernel bandwidth.
If bandwidth is not given, it is determined using a heuristic based on
the median of all pairwise distances. This will take quadratic time in
the number of samples. The sklearn.cluster.estimate_bandwidth function
can be used to do this more efficiently.
seeds : array-like, shape=[n_seeds, n_features] or None
Point used as initial kernel locations. If None and bin_seeding=False,
each data point is used as a seed. If None and bin_seeding=True,
see bin_seeding.
bin_seeding : boolean, default=False
If true, initial kernel locations are not locations of all
points, but rather the location of the discretized version of
points, where points are binned onto a grid whose coarseness
corresponds to the bandwidth. Setting this option to True will speed
up the algorithm because fewer seeds will be initialized.
Ignored if seeds argument is not None.
min_bin_freq : int, default=1
To speed up the algorithm, accept only those bins with at least
min_bin_freq points as seeds.
cluster_all : boolean, default True
If true, then all points are clustered, even those orphans that are
not within any kernel. Orphans are assigned to the nearest kernel.
If false, then orphans are given cluster label -1.
max_iter : int, default 300
Maximum number of iterations, per seed point before the clustering
operation terminates (for that seed point), if has not converged yet.
n_jobs : int
The number of jobs to use for the computation. This works by computing
each of the n_init runs in parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
.. versionadded:: 0.17
Parallel Execution using *n_jobs*.
Returns
-------
cluster_centers : array, shape=[n_clusters, n_features]
Coordinates of cluster centers.
labels : array, shape=[n_samples]
Cluster labels for each point.
Notes
-----
See examples/cluster/plot_mean_shift.py for an example.
"""
if bandwidth is None:
bandwidth = estimate_bandwidth(X, n_jobs=n_jobs)
elif bandwidth <= 0:
raise ValueError("bandwidth needs to be greater than zero or None,\
got %f" % bandwidth)
if seeds is None:
if bin_seeding:
seeds = get_bin_seeds(X, bandwidth, min_bin_freq)
else:
seeds = X
n_samples, n_features = X.shape
center_intensity_dict = {}
nbrs = NearestNeighbors(radius=bandwidth, n_jobs=n_jobs).fit(X)
# execute iterations on all seeds in parallel
all_res = Parallel(n_jobs=n_jobs)(
delayed(_mean_shift_single_seed)
(seed, X, nbrs, max_iter) for seed in seeds)
# copy results in a dictionary
for i in range(len(seeds)):
if all_res[i] is not None:
center_intensity_dict[all_res[i][0]] = all_res[i][1]
if not center_intensity_dict:
# nothing near seeds
raise ValueError("No point was within bandwidth=%f of any seed."
" Try a different seeding strategy \
or increase the bandwidth."
% bandwidth)
# POST PROCESSING: remove near duplicate points
# If the distance between two kernels is less than the bandwidth,
# then we have to remove one because it is a duplicate. Remove the
# one with fewer points.
sorted_by_intensity = sorted(center_intensity_dict.items(),
key=lambda tup: tup[1], reverse=True)
sorted_centers = np.array([tup[0] for tup in sorted_by_intensity])
unique = np.ones(len(sorted_centers), dtype=np.bool)
nbrs = NearestNeighbors(radius=bandwidth,
n_jobs=n_jobs).fit(sorted_centers)
for i, center in enumerate(sorted_centers):
if unique[i]:
neighbor_idxs = nbrs.radius_neighbors([center],
return_distance=False)[0]
unique[neighbor_idxs] = 0
unique[i] = 1 # leave the current point as unique
cluster_centers = sorted_centers[unique]
# ASSIGN LABELS: a point belongs to the cluster that it is closest to
nbrs = NearestNeighbors(n_neighbors=1, n_jobs=n_jobs).fit(cluster_centers)
labels = np.zeros(n_samples, dtype=np.int)
distances, idxs = nbrs.kneighbors(X)
if cluster_all:
labels = idxs.flatten()
else:
labels.fill(-1)
bool_selector = distances.flatten() <= bandwidth
labels[bool_selector] = idxs.flatten()[bool_selector]
return cluster_centers, labels
def get_bin_seeds(X, bin_size, min_bin_freq=1):
"""Finds seeds for mean_shift.
Finds seeds by first binning data onto a grid whose lines are
spaced bin_size apart, and then choosing those bins with at least
min_bin_freq points.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input points, the same points that will be used in mean_shift.
bin_size : float
Controls the coarseness of the binning. Smaller values lead
to more seeding (which is computationally more expensive). If you're
not sure how to set this, set it to the value of the bandwidth used
in clustering.mean_shift.
min_bin_freq : integer, optional
Only bins with at least min_bin_freq will be selected as seeds.
Raising this value decreases the number of seeds found, which
makes mean_shift computationally cheaper.
Returns
-------
bin_seeds : array-like, shape=[n_samples, n_features]
Points used as initial kernel positions in clustering.mean_shift.
"""
# Bin points
bin_sizes = defaultdict(int)
for point in X:
binned_point = np.round(point / bin_size)
bin_sizes[tuple(binned_point)] += 1
# Select only those bins as seeds which have enough members
bin_seeds = np.array([point for point, freq in six.iteritems(bin_sizes) if
freq >= min_bin_freq], dtype=np.float32)
if len(bin_seeds) == len(X):
warnings.warn("Binning data failed with provided bin_size=%f,"
" using data points as seeds." % bin_size)
return X
bin_seeds = bin_seeds * bin_size
return bin_seeds
class MeanShift(BaseEstimator, ClusterMixin):
"""Mean shift clustering using a flat kernel.
Mean shift clustering aims to discover "blobs" in a smooth density of
samples. It is a centroid-based algorithm, which works by updating
candidates for centroids to be the mean of the points within a given
region. These candidates are then filtered in a post-processing stage to
eliminate near-duplicates to form the final set of centroids.
Seeding is performed using a binning technique for scalability.
Read more in the :ref:`User Guide <mean_shift>`.
Parameters
----------
bandwidth : float, optional
Bandwidth used in the RBF kernel.
If not given, the bandwidth is estimated using
sklearn.cluster.estimate_bandwidth; see the documentation for that
function for hints on scalability (see also the Notes, below).
seeds : array, shape=[n_samples, n_features], optional
Seeds used to initialize kernels. If not set,
the seeds are calculated by clustering.get_bin_seeds
with bandwidth as the grid size and default values for
other parameters.
bin_seeding : boolean, optional
If true, initial kernel locations are not locations of all
points, but rather the location of the discretized version of
points, where points are binned onto a grid whose coarseness
corresponds to the bandwidth. Setting this option to True will speed
up the algorithm because fewer seeds will be initialized.
default value: False
Ignored if seeds argument is not None.
min_bin_freq : int, optional
To speed up the algorithm, accept only those bins with at least
min_bin_freq points as seeds. If not defined, set to 1.
cluster_all : boolean, default True
If true, then all points are clustered, even those orphans that are
not within any kernel. Orphans are assigned to the nearest kernel.
If false, then orphans are given cluster label -1.
n_jobs : int
The number of jobs to use for the computation. This works by computing
each of the n_init runs in parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
Attributes
----------
cluster_centers_ : array, [n_clusters, n_features]
Coordinates of cluster centers.
labels_ :
Labels of each point.
Notes
-----
Scalability:
Because this implementation uses a flat kernel and
a Ball Tree to look up members of each kernel, the complexity will tend
towards O(T*n*log(n)) in lower dimensions, with n the number of samples
and T the number of points. In higher dimensions the complexity will
tend towards O(T*n^2).
Scalability can be boosted by using fewer seeds, for example by using
a higher value of min_bin_freq in the get_bin_seeds function.
Note that the estimate_bandwidth function is much less scalable than the
mean shift algorithm and will be the bottleneck if it is used.
References
----------
Dorin Comaniciu and Peter Meer, "Mean Shift: A robust approach toward
feature space analysis". IEEE Transactions on Pattern Analysis and
Machine Intelligence. 2002. pp. 603-619.
"""
def __init__(self, bandwidth=None, seeds=None, bin_seeding=False,
min_bin_freq=1, cluster_all=True, n_jobs=1):
self.bandwidth = bandwidth
self.seeds = seeds
self.bin_seeding = bin_seeding
self.cluster_all = cluster_all
self.min_bin_freq = min_bin_freq
self.n_jobs = n_jobs
def fit(self, X, y=None):
"""Perform clustering.
Parameters
-----------
X : array-like, shape=[n_samples, n_features]
Samples to cluster.
"""
X = check_array(X)
self.cluster_centers_, self.labels_ = \
mean_shift(X, bandwidth=self.bandwidth, seeds=self.seeds,
min_bin_freq=self.min_bin_freq,
bin_seeding=self.bin_seeding,
cluster_all=self.cluster_all, n_jobs=self.n_jobs)
return self
def predict(self, X):
"""Predict the closest cluster each sample in X belongs to.
Parameters
----------
X : {array-like, sparse matrix}, shape=[n_samples, n_features]
New data to predict.
Returns
-------
labels : array, shape [n_samples,]
Index of the cluster each sample belongs to.
"""
check_is_fitted(self, "cluster_centers_")
return pairwise_distances_argmin(X, self.cluster_centers_)
|
|
"""
## Hyptotheis Testing Stuff
### Standard Stuff
#### Standard Headers
"""
from __future__ import division
import sys, random, math
from pdb import set_trace
sys.dont_write_bytecode = True
"""
#### Standard Utils
"""
class o():
"Anonymous container"
def __init__(i, **fields):
i.override(fields)
def override(i, d): i.__dict__.update(d); return i
def __repr__(i):
d = i.__dict__
name = i.__class__.__name__
return name + '{' + ' '.join([':%s %s' % (k, pretty(d[k]))
for k in i.show()]) + '}'
def show(i):
return [k for k in sorted(i.__dict__.keys())
if not "_" in k]
"""
Misc functions:
"""
rand = random.random
any = random.choice
seed = random.seed
exp = lambda n: math.e ** n
ln = lambda n: math.log(n, math.e)
g = lambda n: round(n, 2)
def median(lst, ordered=False):
if not ordered: lst = sorted(lst)
n = len(lst)
p = n // 2
if n % 2: return lst[p]
q = p - 1
q = max(0, min(q, n))
return (lst[p] + lst[q]) / 2
def msecs(f):
import time
t1 = time.time()
f()
return (time.time() - t1) * 1000
def pairs(lst):
"Return all pairs of items i,i+1 from a list."
last = lst[0]
for i in lst[1:]:
yield last, i
last = i
def xtile(lst, lo=0, hi=100, width=50,
chops=[0.3, 0.5, 0.7],
marks=["-", "-", "-"],
bar="", star="*", show=" %3.0f"):
"""The function _xtile_ takes a list of (possibly)
unsorted numbers and presents them as a horizontal
xtile chart (in ascii format). The default is a
contracted _quintile_ that shows the
10,30,50,70,90 breaks in the data.dat (but this can be
changed- see the optional flags of the function).
"""
def pos(p):
return ordered[int(len(lst) * p)]
def place(x):
return int(width * float((x - lo)) / (hi - lo + 0.00001))
def pretty(lst):
return ', '.join([show % x for x in lst])
ordered = sorted(lst)
lo = min(lo, ordered[0])
hi = max(hi, ordered[-1])
what = [pos(p) for p in chops]
where = [place(n) for n in what]
out = [" "] * width
for one, two in pairs(where):
for i in range(one, two):
out[i] = marks[0]
marks = marks[1:]
out[int(width / 2)] = bar
out[place(pos(0.5))] = star
return '|' + ''.join(out) + "|," + pretty(what)
def _tileX():
import random
random.seed(1)
nums = [random.random() ** 2 for _ in range(100)]
print xtile(nums, lo=0, hi=1.0, width=25, show=" %5.2f")
"""
### Standard Accumulator for Numbers
Note the _lt_ method: this accumulator can be sorted by median values.
Warning: this accumulator keeps _all_ numbers. Might be better to use
a bounded cache.
"""
class Num:
"An Accumulator for numbers"
def __init__(i, name, inits=[]):
i.n = i.m2 = i.mu = 0.0
i.all = []
i._median = None
i.name = name
i.rank = 0
for x in inits: i.add(x)
def s(i):
return (i.m2 / (i.n - 1)) ** 0.5
def add(i, x):
i._median = None
i.n += 1
i.all += [x]
delta = x - i.mu
i.mu += delta * 1.0 / i.n
i.m2 += delta * (x - i.mu)
def __add__(i, j):
return Num(i.name + j.name, i.all + j.all)
def quartiles(i):
def p(x): return g(xs[x])
i.median()
xs = i.all
n = int(len(xs) * 0.25)
return p(n), p(2 * n), p(3 * n)
def median(i):
if not i._median:
i.all = sorted(i.all)
i._median = median(i.all)
return i._median
def __lt__(i, j):
return i.median() < j.median()
def spread(i):
i.all = sorted(i.all)
n1 = i.n * 0.25
n2 = i.n * 0.75
if len(i.all) <= 1:
return 0
if len(i.all) == 2:
return i.all[1] - i.all[0]
else:
return i.all[int(n2)] - i.all[int(n1)]
"""
### The A12 Effect Size Test
"""
def a12slow(lst1, lst2):
"how often is x in lst1 more than y in lst2?"
more = same = 0.0
for x in lst1:
for y in lst2:
if x == y:
same += 1
elif x > y:
more += 1
x = (more + 0.5 * same) / (len(lst1) * len(lst2))
return x
def a12(lst1, lst2):
"how often is x in lst1 more than y in lst2?"
def loop(t, t1, t2):
while t1.j < t1.n and t2.j < t2.n:
h1 = t1.l[t1.j]
h2 = t2.l[t2.j]
h3 = t2.l[t2.j + 1] if t2.j + 1 < t2.n else None
if h1 > h2:
t1.j += 1;
t1.gt += t2.n - t2.j
elif h1 == h2:
if h3 and h1 > h3:
t1.gt += t2.n - t2.j - 1
t1.j += 1;
t1.eq += 1;
t2.eq += 1
else:
t2, t1 = t1, t2
return t.gt * 1.0, t.eq * 1.0
# --------------------------
lst1 = sorted(lst1, reverse=True)
lst2 = sorted(lst2, reverse=True)
n1 = len(lst1)
n2 = len(lst2)
t1 = o(l=lst1, j=0, eq=0, gt=0, n=n1)
t2 = o(l=lst2, j=0, eq=0, gt=0, n=n2)
gt, eq = loop(t1, t1, t2)
return gt / (n1 * n2) + eq / 2 / (n1 * n2)
def _a12():
def f1(): return a12slow(l1, l2)
def f2(): return a12(l1, l2)
for n in [100, 200, 400, 800, 1600, 3200, 6400]:
l1 = [rand() for _ in range(n)]
l2 = [rand() for _ in range(n)]
t1 = msecs(f1)
t2 = msecs(f2)
print n, g(f1()), g(f2()), int((t1 / t2))
"""Output:
````
n a12(fast) a12(slow) tfast / tslow
--- --------------- -------------- --------------
100 0.53 0.53 4
200 0.48 0.48 6
400 0.49 0.49 28
800 0.5 0.5 26
1600 0.51 0.51 72
3200 0.49 0.49 109
6400 0.5 0.5 244
````
## Non-Parametric Hypothesis Testing
The following _bootstrap_ method was introduced in
1979 by Bradley Efron at Stanford University. It
was inspired by earlier work on the
jackknife.
Improved estimates of the variance were [developed later][efron01].
[efron01]: http://goo.gl/14n8Wf "Bradley Efron and R.J. Tibshirani. An Introduction to the Bootstrap (Chapman & Hall/CRC Monographs on Statistics & Applied Probability), 1993"
To check if two populations _(y0,z0)_
are different, many times sample with replacement
from both to generate _(y1,z1), (y2,z2), (y3,z3)_.. etc.
"""
def sampleWithReplacement(lst):
"returns a list same size as list"
def any(n): return random.uniform(0, n)
def one(lst): return lst[int(any(len(lst)))]
return [one(lst) for _ in lst]
"""
Then, for all those samples,
check if some *testStatistic* in the original pair
hold for all the other pairs. If it does more than (say) 99%
of the time, then we are 99% confident in that the
populations are the same.
In such a _bootstrap_ hypothesis test, the *some property*
is the difference between the two populations, muted by the
joint standard deviation of the populations.
"""
def testStatistic(y, z):
"""Checks if two means are different, tempered
by the sample size of 'y' and 'z'"""
tmp1 = tmp2 = 0
for y1 in y.all: tmp1 += (y1 - y.mu) ** 2
for z1 in z.all: tmp2 += (z1 - z.mu) ** 2
s1 = (float(tmp1) / (y.n - 1)) ** 0.5
s2 = (float(tmp2) / (z.n - 1)) ** 0.5
delta = z.mu - y.mu
if s1 + s2:
delta = delta / ((s1 / y.n + s2 / z.n) ** 0.5)
return delta
"""
The rest is just details:
+ Efron advises
to make the mean of the populations the same (see
the _yhat,zhat_ stuff shown below).
+ The class _total_ is a just a quick and dirty accumulation class.
+ For more details see [the Efron text][efron01].
"""
def bootstrap(y0, z0, conf=0.01, b=1000):
"""The bootstrap hypothesis test from
p220 to 223 of Efron's book 'An
introduction to the boostrap."""
class total():
"quick and dirty data.dat collector"
def __init__(i, some=[]):
i.sum = i.n = i.mu = 0;
i.all = []
for one in some: i.put(one)
def put(i, x):
i.all.append(x);
i.sum += x;
i.n += 1;
i.mu = float(i.sum) / i.n
def __add__(i1, i2): return total(i1.all + i2.all)
y, z = total(y0), total(z0)
x = y + z
tobs = testStatistic(y, z)
yhat = [y1 - y.mu + x.mu for y1 in y.all]
zhat = [z1 - z.mu + x.mu for z1 in z.all]
bigger = 0.0
for i in range(b):
if testStatistic(total(sampleWithReplacement(yhat)),
total(sampleWithReplacement(zhat))) > tobs:
bigger += 1
return bigger / b < conf
"""
#### Examples
"""
def _bootstraped():
def worker(n=1000,
mu1=10, sigma1=1,
mu2=10.2, sigma2=1):
def g(mu, sigma): return random.gauss(mu, sigma)
x = [g(mu1, sigma1) for i in range(n)]
y = [g(mu2, sigma2) for i in range(n)]
return n, mu1, sigma1, mu2, sigma2, \
'different' if bootstrap(x, y) else 'same'
# very different means, same std
print worker(mu1=10, sigma1=10,
mu2=100, sigma2=10)
# similar means and std
print worker(mu1=10.1, sigma1=1,
mu2=10.2, sigma2=1)
# slightly different means, same std
print worker(mu1=10.1, sigma1=1,
mu2=10.8, sigma2=1)
# different in mu eater by large std
print worker(mu1=10.1, sigma1=10,
mu2=10.8, sigma2=1)
"""
Output:
````
_bootstraped()
(1000, 10, 10, 100, 10, 'different')
(1000, 10.1, 1, 10.2, 1, 'same')
(1000, 10.1, 1, 10.8, 1, 'different')
(1000, 10.1, 10, 10.8, 1, 'same')
````
Warning- the above took 8 seconds to generate since we used 1000 bootstraps.
As to how many bootstraps are enough, that depends on the data.dat. There are
results saying 200 to 400 are enough but, since I am suspicious man, I run it for 1000.
Which means the runtimes associated with bootstrapping is a significant issue.
To reduce that runtime, I avoid things like an all-pairs comparison of all treatments
(see below: Scott-knott). Also, BEFORE I do the boostrap, I first run
the effect size test (and only go to bootstrapping in effect size passes:
"""
def different(l1, l2):
# return bootstrap(l1,l2) and a12(l2,l1)
return a12(l2, l1) and bootstrap(l1, l2)
"""
## Saner Hypothesis Testing
The following code, which you should use verbatim does the following:
+ All treatments are clustered into _ranks_. In practice, dozens
of treatments end up generating just a handful of ranks.
+ The numbers of calls to the hypothesis tests are minimized:
+ Treatments are sorted by their median value.
+ Treatments are divided into two groups such that the
expected value of the mean values _after_ the split is minimized;
+ Hypothesis tests are called to test if the two groups are truly difference.
+ All hypothesis tests are non-parametric and include (1) effect size tests
and (2) tests for statistically significant numbers;
+ Slow bootstraps are executed if the faster _A12_ tests are passed;
In practice, this means that the hypothesis tests (with confidence of say, 95%)
are called on only a logarithmic number of times. So...
+ With this method, 16 treatments can be studied using less than _∑<sub>1,2,4,8,16</sub>log<sub>2</sub>i =15_ hypothesis tests and confidence _0.99<sup>15</sup>=0.86_.
+ But if did this with the 120 all-pairs comparisons of the 16 treatments, we would have total confidence _0.99<sup>120</sup>=0.30.
For examples on using this code, see _rdivDemo_ (below).
"""
def scottknott(data, cohen=0.3, small=3, useA12=False, epsilon=0.01):
"""Recursively split data.dat, maximizing delta of
the expected value of the mean before and
after the splits.
Reject splits with under 3 items"""
all = reduce(lambda x, y: x + y, data)
same = lambda l, r: abs(l.median() - r.median()) <= all.s() * cohen
if useA12:
same = lambda l, r: not different(l.all, r.all)
big = lambda n: n > small
return rdiv(data, all, minMu, big, same, epsilon)
def rdiv(data, # a list of class Nums
all, # all the data.dat combined into one num
div, # function: find the best split
big, # function: rejects small splits
same, # function: rejects similar splits
epsilon): # small enough to split two parts
"""Looks for ways to split sorted data.dat,
Recurses into each split. Assigns a 'rank' number
to all the leaf splits found in this way.
"""
def recurse(parts, all, rank=0):
"Split, then recurse on each part."
cut, left, right = maybeIgnore(div(parts, all, big, epsilon),
same, parts)
if cut:
# if cut, rank "right" higher than "left"
rank = recurse(parts[:cut], left, rank) + 1
rank = recurse(parts[cut:], right, rank)
else:
# if no cut, then all get same rank
for part in parts:
part.rank = rank
return rank
recurse(sorted(data), all)
return data
def maybeIgnore((cut, left, right), same, parts):
if cut:
if same(sum(parts[:cut], Num('upto')),
sum(parts[cut:], Num('above'))):
cut = left = right = None
return cut, left, right
def minMu(parts, all, big, epsilon):
"""Find a cut in the parts that maximizes
the expected value of the difference in
the mean before and after the cut.
Reject splits that are insignificantly
different or that generate very small subsets.
"""
cut, left, right = None, None, None
before, mu = 0, all.mu
for i, l, r in leftRight(parts, epsilon):
if big(l.n) and big(r.n):
n = all.n * 1.0
now = l.n / n * (mu - l.mu) ** 2 + r.n / n * (mu - r.mu) ** 2
if now > before:
before, cut, left, right = now, i, l, r
return cut, left, right
def leftRight(parts, epsilon=0.01):
"""Iterator. For all items in 'parts',
return everything to the left and everything
from here to the end. For reasons of
efficiency, take a first pass over the data.dat
to pre-compute and cache right-hand-sides
"""
rights = {}
n = j = len(parts) - 1
while j > 0:
rights[j] = parts[j]
if j < n: rights[j] += rights[j + 1]
j -= 1
left = parts[0]
for i, one in enumerate(parts):
if i > 0:
if parts[i]._median - parts[i - 1]._median > epsilon:
yield i, left, rights[i]
left += one
"""
## Putting it All Together
Driver for the demos:
"""
def sk_chart(data):
def z(x):
return int((x - lo) / (hi - lo + 1e-32))
data = map(lambda lst: Num(lst[0], lst[1:]),
data)
print ""
ranks = []
ranks = []
for x in scottknott(data, useA12=True):
ranks += [(x.rank, x.median(), x)]
all = []
for _, __, x in sorted(ranks): all += x.all
all = sorted(all)
lo, hi = all[0], all[-1]
line = "----------------------------------------------------"
last = None
print ('%4s , %12s , %s , %4s ' % \
('rank', 'name', 'med', 'iqr')) + "\n" + line
for _, __, x in sorted(ranks):
q1, q2, q3 = x.quartiles()
print ('%4s , %12s , %4s , %4s ' % \
(x.rank + 1, x.name, q2, q3 - q1)) + \
xtile(x.all, lo=lo, hi=hi, width=30, show="%5.2f")
last = x.rank
def sk_latex(data):
def z(x):
return int(80 * (x - lo) / (hi - lo + 0.00001))
data = map(lambda lst: Num(lst[0], lst[1:]),
data)
print ""
ranks = []
for x in scottknott(data, useA12=True):
ranks += [(x.rank, x.median(), x)]
all = []
for _, __, x in sorted(ranks):
all += x.quartiles()
all = sorted(all)
lo, hi = all[0], all[-1]
print r'{\scriptsize \begin{tabular}{l@{~~~}l@{~~~}r@{~~~}r@{~~~}c}'
print r'\arrayrulecolor{lightgray}'
print r'\textbf{Rank} & \textbf{Treatment} & \textbf{Median} & \textbf{IQR} & \\\hline'
last = None
for _, __, x in sorted(ranks):
q1, q2, q3 = x.quartiles()
pre = ""
if not last is None and not last == x.rank:
pre = "\\hline"
print pre, r'%2s & %12s & %s & %s & \quart{%s}{%s}{%s}{%s} \\' % \
(x.rank + 1,
x.name,
float(q2 / 100),
float((q3 - q1) / 100),
z(q1),
z(q3) - z(q1),
z(q2),
z(100))
last = x.rank
print r"\hline \end{tabular}}"
return ranks
def _test_scottknott():
a = ["a", 1, 2, 3, 4, 5, 4, 3, 2, 4, 2, 1, 2]
b = ["b", 3, 4, 5, 4, 3, 4, 3, 2, 6, 7, 5, 6]
c = [a, b]
sk_chart(c)
if __name__ == "__main__":
_test_scottknott()
|
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import eventlet
eventlet.monkey_patch()
import netaddr
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.linux import interface
from neutron.agent.linux import ovs_lib # noqa
from neutron.agent import rpc as agent_rpc
from neutron.api.rpc.agentnotifiers import helo_rpc_agent_api
from neutron.common import config as common_config
from neutron.common import constants
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron import manager
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import loopingcall
from neutron.openstack.common import service
from neutron import service as neutron_service
LOG = logging.getLogger(__name__)
class DhcpAgent(manager.Manager, helo_rpc_agent_api.HeloRpcCallbackMixin):
OPTS = [
cfg.IntOpt('resync_interval', default=5,
help=_("Interval to resync.")),
cfg.StrOpt('dhcp_driver',
default='neutron.agent.linux.dhcp.Dnsmasq',
help=_("The driver used to manage the DHCP server.")),
cfg.BoolOpt('enable_isolated_metadata', default=False,
help=_("Support Metadata requests on isolated networks.")),
cfg.BoolOpt('enable_metadata_network', default=False,
help=_("Allows for serving metadata requests from a "
"dedicated network. Requires "
"enable_isolated_metadata = True")),
cfg.IntOpt('num_sync_threads', default=4,
help=_('Number of threads to use during sync process.')),
cfg.StrOpt('metadata_proxy_socket',
default='$state_path/metadata_proxy',
help=_('Location of Metadata Proxy UNIX domain '
'socket')),
]
def __init__(self, host=None):
super(DhcpAgent, self).__init__(host=host)
self.needs_resync_reasons = []
self.conf = cfg.CONF
self.cache = NetworkCache()
self.root_helper = config.get_root_helper(self.conf)
self.dhcp_driver_cls = importutils.import_class(self.conf.dhcp_driver)
ctx = context.get_admin_context_without_session()
self.plugin_rpc = DhcpPluginApi(topics.PLUGIN,
ctx, self.conf.use_namespaces)
# create dhcp dir to store dhcp info
dhcp_dir = os.path.dirname("/%s/dhcp/" % self.conf.state_path)
if not os.path.isdir(dhcp_dir):
os.makedirs(dhcp_dir, 0o755)
self.dhcp_version = self.dhcp_driver_cls.check_version()
self._populate_networks_cache()
def _populate_networks_cache(self):
"""Populate the networks cache when the DHCP-agent starts."""
try:
existing_networks = self.dhcp_driver_cls.existing_dhcp_networks(
self.conf,
self.root_helper
)
for net_id in existing_networks:
net = dhcp.NetModel(self.conf.use_namespaces,
{"id": net_id,
"subnets": [],
"ports": []})
self.cache.put(net)
except NotImplementedError:
# just go ahead with an empty networks cache
LOG.debug(
_("The '%s' DHCP-driver does not support retrieving of a "
"list of existing networks"),
self.conf.dhcp_driver
)
def after_start(self):
LOG.info(_("DHCP agent started"))
if cfg.CONF.kill_dhcp_process:
self.run(kill_flag=True)
else:
self.run(kill_flag=False)
def run(self, kill_flag=True):
"""Activate the DHCP agent."""
self.sync_state(kill_flag)
self.periodic_resync()
def call_driver(self, action, network, **action_kwargs):
"""Invoke an action on a DHCP driver instance."""
LOG.debug(_('Calling driver for network: %(net)s action: %(action)s'),
{'net': network.id, 'action': action})
try:
# the Driver expects something that is duck typed similar to
# the base models.
driver = self.dhcp_driver_cls(self.conf,
network,
self.root_helper,
self.dhcp_version,
self.plugin_rpc)
getattr(driver, action)(**action_kwargs)
return True
except exceptions.Conflict:
# No need to resync here, the agent will receive the event related
# to a status update for the network
LOG.warning(_('Unable to %(action)s dhcp for %(net_id)s: there is '
'a conflict with its current state; please check '
'that the network and/or its subnet(s) still exist.')
% {'net_id': network.id, 'action': action})
except Exception as e:
self.schedule_resync(e)
if (isinstance(e, n_rpc.RemoteError)
and e.exc_type == 'NetworkNotFound'
or isinstance(e, exceptions.NetworkNotFound)):
LOG.warning(_("Network %s has been deleted."), network.id)
else:
LOG.exception(_('Unable to %(action)s dhcp for %(net_id)s.')
% {'net_id': network.id, 'action': action})
def schedule_resync(self, reason):
"""Schedule a resync for a given reason."""
self.needs_resync_reasons.append(reason)
@utils.synchronized('dhcp-agent')
def sync_state(self, kill_flag=True):
"""Sync the local DHCP state with Neutron."""
LOG.info(_('Synchronizing state'))
pool = eventlet.GreenPool(cfg.CONF.num_sync_threads)
known_network_ids = set(self.cache.get_network_ids())
try:
active_networks = self.plugin_rpc.get_active_networks_info()
active_network_ids = set(network.id for network in active_networks)
for deleted_id in known_network_ids - active_network_ids:
try:
self.disable_dhcp_helper(deleted_id)
except Exception as e:
self.schedule_resync(e)
LOG.exception(_('Unable to sync network state on deleted '
'network %s'), deleted_id)
for network in active_networks:
pool.spawn(self.safe_configure_dhcp_for_network, network, kill_flag)
pool.waitall()
LOG.info(_('Synchronizing state complete'))
except Exception as e:
self.schedule_resync(e)
LOG.exception(_('Unable to sync network state.'))
@utils.exception_logger()
def _periodic_resync_helper(self):
"""Resync the dhcp state at the configured interval."""
while True:
eventlet.sleep(self.conf.resync_interval)
if self.needs_resync_reasons:
# be careful to avoid a race with additions to list
# from other threads
reasons = self.needs_resync_reasons
self.needs_resync_reasons = []
for r in reasons:
LOG.debug(_("resync: %(reason)s"),
{"reason": r})
self.sync_state()
def periodic_resync(self):
"""Spawn a thread to periodically resync the dhcp state."""
eventlet.spawn(self._periodic_resync_helper)
def safe_get_network_info(self, network_id):
try:
network = self.plugin_rpc.get_network_info(network_id)
if not network:
LOG.warn(_('Network %s has been deleted.'), network_id)
return network
except Exception as e:
self.schedule_resync(e)
LOG.exception(_('Network %s info call failed.'), network_id)
def enable_dhcp_helper(self, network_id):
"""Enable DHCP for a network that meets enabling criteria."""
network = self.safe_get_network_info(network_id)
if network:
self.configure_dhcp_for_network(network)
@utils.exception_logger()
def safe_configure_dhcp_for_network(self, network, kill_flag=True):
try:
self.configure_dhcp_for_network(network, kill_flag)
except (exceptions.NetworkNotFound, RuntimeError):
LOG.warn(_('Network %s may have been deleted and its resources '
'may have already been disposed.'), network.id)
def configure_dhcp_for_network(self, network, kill_flag=True):
if not network.admin_state_up:
return
for subnet in network.subnets:
if subnet.enable_dhcp:
if self.call_driver('enable', network, kill_flag = kill_flag):
if (self.conf.use_namespaces and
self.conf.enable_isolated_metadata):
self.enable_isolated_metadata_proxy(network)
self.cache.put(network)
break
def disable_dhcp_helper(self, network_id):
"""Disable DHCP for a network known to the agent."""
network = self.cache.get_network_by_id(network_id)
if network:
if (self.conf.use_namespaces and
self.conf.enable_isolated_metadata):
self.disable_isolated_metadata_proxy(network)
if self.call_driver('disable', network):
self.cache.remove(network)
def refresh_dhcp_helper(self, network_id):
"""Refresh or disable DHCP for a network depending on the current state
of the network.
"""
old_network = self.cache.get_network_by_id(network_id)
if not old_network:
# DHCP current not running for network.
return self.enable_dhcp_helper(network_id)
network = self.safe_get_network_info(network_id)
if not network:
return
old_cidrs = set(s.cidr for s in old_network.subnets if s.enable_dhcp)
new_cidrs = set(s.cidr for s in network.subnets if s.enable_dhcp)
if new_cidrs and old_cidrs == new_cidrs:
self.call_driver('reload_allocations', network)
self.cache.put(network)
elif new_cidrs:
if self.call_driver('restart', network):
self.cache.put(network)
else:
self.disable_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def network_create_end(self, context, payload):
"""Handle the network.create.end notification event."""
network_id = payload['network']['id']
self.enable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_update_end(self, context, payload):
"""Handle the network.update.end notification event."""
network_id = payload['network']['id']
if payload['network']['admin_state_up']:
self.enable_dhcp_helper(network_id)
else:
self.disable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_delete_end(self, context, payload):
"""Handle the network.delete.end notification event."""
self.disable_dhcp_helper(payload['network_id'])
@utils.synchronized('dhcp-agent')
def subnet_update_end(self, context, payload):
"""Handle the subnet.update.end notification event."""
network_id = payload['subnet']['network_id']
self.refresh_dhcp_helper(network_id)
# Use the update handler for the subnet create event.
subnet_create_end = subnet_update_end
@utils.synchronized('dhcp-agent')
def subnet_delete_end(self, context, payload):
"""Handle the subnet.delete.end notification event."""
subnet_id = payload['subnet_id']
network = self.cache.get_network_by_subnet_id(subnet_id)
if network:
self.refresh_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def port_update_end(self, context, payload):
"""Handle the port.update.end notification event."""
updated_port = dhcp.DictModel(payload['port'])
network = self.cache.get_network_by_id(updated_port.network_id)
if network:
self.cache.put_port(updated_port)
self.call_driver('reload_allocations', network)
# Use the update handler for the port create event.
port_create_end = port_update_end
@utils.synchronized('dhcp-agent')
def port_delete_end(self, context, payload):
"""Handle the port.delete.end notification event."""
port = self.cache.get_port_by_id(payload['port_id'])
if port:
network = self.cache.get_network_by_id(port.network_id)
self.cache.remove_port(port)
self.call_driver('reload_allocations', network)
def enable_isolated_metadata_proxy(self, network):
# The proxy might work for either a single network
# or all the networks connected via a router
# to the one passed as a parameter
neutron_lookup_param = '--network_id=%s' % network.id
meta_cidr = netaddr.IPNetwork(dhcp.METADATA_DEFAULT_CIDR)
has_metadata_subnet = any(netaddr.IPNetwork(s.cidr) in meta_cidr
for s in network.subnets)
if (self.conf.enable_metadata_network and has_metadata_subnet):
router_ports = [port for port in network.ports
if (port.device_owner ==
constants.DEVICE_OWNER_ROUTER_INTF)]
if router_ports:
# Multiple router ports should not be allowed
if len(router_ports) > 1:
LOG.warning(_("%(port_num)d router ports found on the "
"metadata access network. Only the port "
"%(port_id)s, for router %(router_id)s "
"will be considered"),
{'port_num': len(router_ports),
'port_id': router_ports[0].id,
'router_id': router_ports[0].device_id})
neutron_lookup_param = ('--router_id=%s' %
router_ports[0].device_id)
def callback(pid_file):
metadata_proxy_socket = cfg.CONF.metadata_proxy_socket
proxy_cmd = ['neutron-ns-metadata-proxy',
'--pid_file=%s' % pid_file,
'--metadata_proxy_socket=%s' % metadata_proxy_socket,
neutron_lookup_param,
'--state_path=%s' % self.conf.state_path,
'--metadata_port=%d' % dhcp.METADATA_PORT]
proxy_cmd.extend(config.get_log_args(
cfg.CONF, 'neutron-ns-metadata-proxy-%s.log' % network.id))
return proxy_cmd
pm = external_process.ProcessManager(
self.conf,
network.id,
self.root_helper,
network.namespace)
pm.enable(callback)
def disable_isolated_metadata_proxy(self, network):
pm = external_process.ProcessManager(
self.conf,
network.id,
self.root_helper,
network.namespace)
pm.disable()
class DhcpPluginApi(n_rpc.RpcProxy):
"""Agent side of the dhcp rpc API.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
and update_dhcp_port methods.
"""
BASE_RPC_API_VERSION = '1.1'
def __init__(self, topic, context, use_namespaces):
super(DhcpPluginApi, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self.context = context
self.host = cfg.CONF.host
self.use_namespaces = use_namespaces
def get_active_networks_info(self):
"""Make a remote process call to retrieve all network info."""
networks = self.call(self.context,
self.make_msg('get_active_networks_info',
host=self.host))
return [dhcp.NetModel(self.use_namespaces, n) for n in networks]
def get_network_info(self, network_id):
"""Make a remote process call to retrieve network info."""
network = self.call(self.context,
self.make_msg('get_network_info',
network_id=network_id,
host=self.host))
if network:
return dhcp.NetModel(self.use_namespaces, network)
def get_dhcp_port(self, network_id, device_id):
"""Make a remote process call to get the dhcp port."""
port = self.call(self.context,
self.make_msg('get_dhcp_port',
network_id=network_id,
device_id=device_id,
host=self.host))
if port:
return dhcp.DictModel(port)
def create_dhcp_port(self, port):
"""Make a remote process call to create the dhcp port."""
port = self.call(self.context,
self.make_msg('create_dhcp_port',
port=port,
host=self.host))
if port:
return dhcp.DictModel(port)
def update_dhcp_port(self, port_id, port):
"""Make a remote process call to update the dhcp port."""
port = self.call(self.context,
self.make_msg('update_dhcp_port',
port_id=port_id,
port=port,
host=self.host))
if port:
return dhcp.DictModel(port)
def release_dhcp_port(self, network_id, device_id):
"""Make a remote process call to release the dhcp port."""
return self.call(self.context,
self.make_msg('release_dhcp_port',
network_id=network_id,
device_id=device_id,
host=self.host))
def release_port_fixed_ip(self, network_id, device_id, subnet_id):
"""Make a remote process call to release a fixed_ip on the port."""
return self.call(self.context,
self.make_msg('release_port_fixed_ip',
network_id=network_id,
subnet_id=subnet_id,
device_id=device_id,
host=self.host))
class NetworkCache(object):
"""Agent cache of the current network state."""
def __init__(self):
self.cache = {}
self.subnet_lookup = {}
self.port_lookup = {}
def get_network_ids(self):
return self.cache.keys()
def get_network_by_id(self, network_id):
return self.cache.get(network_id)
def get_network_by_subnet_id(self, subnet_id):
return self.cache.get(self.subnet_lookup.get(subnet_id))
def get_network_by_port_id(self, port_id):
return self.cache.get(self.port_lookup.get(port_id))
def put(self, network):
if network.id in self.cache:
self.remove(self.cache[network.id])
self.cache[network.id] = network
for subnet in network.subnets:
self.subnet_lookup[subnet.id] = network.id
for port in network.ports:
self.port_lookup[port.id] = network.id
def remove(self, network):
del self.cache[network.id]
for subnet in network.subnets:
del self.subnet_lookup[subnet.id]
for port in network.ports:
del self.port_lookup[port.id]
def put_port(self, port):
network = self.get_network_by_id(port.network_id)
for index in range(len(network.ports)):
if network.ports[index].id == port.id:
network.ports[index] = port
break
else:
network.ports.append(port)
self.port_lookup[port.id] = network.id
def remove_port(self, port):
network = self.get_network_by_port_id(port.id)
for index in range(len(network.ports)):
if network.ports[index] == port:
del network.ports[index]
del self.port_lookup[port.id]
break
def get_port_by_id(self, port_id):
network = self.get_network_by_port_id(port_id)
if network:
for port in network.ports:
if port.id == port_id:
return port
def get_state(self):
net_ids = self.get_network_ids()
num_nets = len(net_ids)
num_subnets = 0
num_ports = 0
for net_id in net_ids:
network = self.get_network_by_id(net_id)
num_subnets += len(network.subnets)
num_ports += len(network.ports)
return {'networks': num_nets,
'subnets': num_subnets,
'ports': num_ports}
class DhcpAgentWithStateReport(DhcpAgent):
def __init__(self, host=None):
super(DhcpAgentWithStateReport, self).__init__(host=host)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.PLUGIN)
self.agent_state = {
'binary': 'neutron-dhcp-agent',
'host': host,
'topic': topics.DHCP_AGENT,
'configurations': {
'dhcp_driver': cfg.CONF.dhcp_driver,
'use_namespaces': cfg.CONF.use_namespaces,
'dhcp_lease_duration': cfg.CONF.dhcp_lease_duration},
'start_flag': True,
'agent_type': constants.AGENT_TYPE_DHCP}
report_interval = cfg.CONF.AGENT.report_interval
self.use_call = True
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.agent_state.get('configurations').update(
self.cache.get_state())
ctx = context.get_admin_context_without_session()
self.state_rpc.report_state(ctx, self.agent_state, self.use_call)
self.use_call = False
except AttributeError:
# This means the server does not support report_state
LOG.warn(_("Neutron server does not support state report."
" State report for this agent will be disabled."))
self.heartbeat.stop()
self.run()
return
except Exception:
LOG.exception(_("Failed reporting state!"))
return
if self.agent_state.pop('start_flag', None):
if cfg.CONF.kill_dhcp_process:
self.run(kill_flag=True)
else:
self.run(kill_flag=False)
def agent_updated(self, context, payload):
"""Handle the agent_updated notification event."""
self.schedule_resync(_("Agent updated: %(payload)s") %
{"payload": payload})
LOG.info(_("agent_updated by server side %s!"), payload)
def after_start(self):
LOG.info(_("DHCP agent started"))
def register_options():
cfg.CONF.register_opts(DhcpAgent.OPTS)
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_use_namespaces_opts_helper(cfg.CONF)
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.register_opts(interface.OPTS)
def main():
register_options()
common_config.init(sys.argv[1:])
config.setup_logging(cfg.CONF)
server = neutron_service.Service.create(
binary='neutron-dhcp-agent',
topic=topics.DHCP_AGENT,
report_interval=cfg.CONF.AGENT.report_interval,
manager='neutron.agent.dhcp_agent.DhcpAgentWithStateReport')
service.launch(server).wait()
|
|
"""
Porter Stemmer
This is the Porter stemming algorithm. It follows the algorithm
presented in
Porter, M. "An algorithm for suffix stripping." Program 14.3 (1980): 130-137.
with some optional deviations that can be turned on or off with the
`mode` argument to the constructor.
Martin Porter, the algorithm's inventor, maintains a web page about the
algorithm at
http://www.tartarus.org/~martin/PorterStemmer/
which includes another Python implementation and other implementations
in many languages.
"""
from __future__ import print_function, unicode_literals
__docformat__ = 'plaintext'
import re
from nltk.stem.api import StemmerI
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class PorterStemmer(StemmerI):
"""
A word stemmer based on the Porter stemming algorithm.
Porter, M. "An algorithm for suffix stripping."
Program 14.3 (1980): 130-137.
See http://www.tartarus.org/~martin/PorterStemmer/ for the homepage
of the algorithm.
Martin Porter has endorsed several modifications to the Porter
algorithm since writing his original paper, and those extensions are
included in the implementations on his website. Additionally, others
have proposed further improvements to the algorithm, including NLTK
contributors. There are thus three modes that can be selected by
passing the appropriate constant to the class constructor's `mode`
attribute:
PorterStemmer.ORIGINAL_ALGORITHM
- Implementation that is faithful to the original paper.
Note that Martin Porter has deprecated this version of the
algorithm. Martin distributes implementations of the Porter
Stemmer in many languages, hosted at:
http://www.tartarus.org/~martin/PorterStemmer/
and all of these implementations include his extensions. He
strongly recommends against using the original, published
version of the algorithm; only use this mode if you clearly
understand why you are choosing to do so.
PorterStemmer.MARTIN_EXTENSIONS
- Implementation that only uses the modifications to the
algorithm that are included in the implementations on Martin
Porter's website. He has declared Porter frozen, so the
behaviour of those implementations should never change.
PorterStemmer.NLTK_EXTENSIONS (default)
- Implementation that includes further improvements devised by
NLTK contributors or taken from other modified implementations
found on the web.
For the best stemming, you should use the default NLTK_EXTENSIONS
version. However, if you need to get the same results as either the
original algorithm or one of Martin Porter's hosted versions for
compability with an existing implementation or dataset, you can use
one of the other modes instead.
"""
# Modes the Stemmer can be instantiated in
NLTK_EXTENSIONS = 'NLTK_EXTENSIONS'
MARTIN_EXTENSIONS = 'MARTIN_EXTENSIONS'
ORIGINAL_ALGORITHM = 'ORIGINAL_ALGORITHM'
def __init__(self, mode=NLTK_EXTENSIONS):
if mode not in (
self.NLTK_EXTENSIONS,
self.MARTIN_EXTENSIONS,
self.ORIGINAL_ALGORITHM
):
raise ValueError(
"Mode must be one of PorterStemmer.NLTK_EXTENSIONS, "
"PorterStemmer.MARTIN_EXTENSIONS, or "
"PorterStemmer.ORIGINAL_ALGORITHM"
)
self.mode = mode
if self.mode == self.NLTK_EXTENSIONS:
# This is a table of irregular forms. It is quite short,
# but still reflects the errors actually drawn to Martin
# Porter's attention over a 20 year period!
irregular_forms = {
"sky" : ["sky", "skies"],
"die" : ["dying"],
"lie" : ["lying"],
"tie" : ["tying"],
"news" : ["news"],
"inning" : ["innings", "inning"],
"outing" : ["outings", "outing"],
"canning" : ["cannings", "canning"],
"howe" : ["howe"],
"proceed" : ["proceed"],
"exceed" : ["exceed"],
"succeed" : ["succeed"],
}
self.pool = {}
for key in irregular_forms:
for val in irregular_forms[key]:
self.pool[val] = key
self.vowels = frozenset(['a', 'e', 'i', 'o', 'u'])
def _is_consonant(self, word, i):
"""Returns True if word[i] is a consonant, False otherwise
A consonant is defined in the paper as follows:
A consonant in a word is a letter other than A, E, I, O or
U, and other than Y preceded by a consonant. (The fact that
the term `consonant' is defined to some extent in terms of
itself does not make it ambiguous.) So in TOY the consonants
are T and Y, and in SYZYGY they are S, Z and G. If a letter
is not a consonant it is a vowel.
"""
if word[i] in self.vowels:
return False
if word[i] == 'y':
if i == 0:
return True
else:
return (not self._is_consonant(word, i - 1))
return True
def _measure(self, stem):
"""Returns the 'measure' of stem, per definition in the paper
From the paper:
A consonant will be denoted by c, a vowel by v. A list
ccc... of length greater than 0 will be denoted by C, and a
list vvv... of length greater than 0 will be denoted by V.
Any word, or part of a word, therefore has one of the four
forms:
CVCV ... C
CVCV ... V
VCVC ... C
VCVC ... V
These may all be represented by the single form
[C]VCVC ... [V]
where the square brackets denote arbitrary presence of their
contents. Using (VC){m} to denote VC repeated m times, this
may again be written as
[C](VC){m}[V].
m will be called the \measure\ of any word or word part when
represented in this form. The case m = 0 covers the null
word. Here are some examples:
m=0 TR, EE, TREE, Y, BY.
m=1 TROUBLE, OATS, TREES, IVY.
m=2 TROUBLES, PRIVATE, OATEN, ORRERY.
"""
cv_sequence = ''
# Construct a string of 'c's and 'v's representing whether each
# character in `stem` is a consonant or a vowel.
# e.g. 'falafel' becomes 'cvcvcvc',
# 'architecture' becomes 'vcccvcvccvcv'
for i in range(len(stem)):
if self._is_consonant(stem, i):
cv_sequence += 'c'
else:
cv_sequence += 'v'
# Count the number of 'vc' occurences, which is equivalent to
# the number of 'VC' occurrences in Porter's reduced form in the
# docstring above, which is in turn equivalent to `m`
return cv_sequence.count('vc')
def _has_positive_measure(self, stem):
return self._measure(stem) > 0
def _contains_vowel(self, stem):
"""Returns True if stem contains a vowel, else False"""
for i in range(len(stem)):
if not self._is_consonant(stem, i):
return True
return False
def _ends_double_consonant(self, word):
"""Implements condition *d from the paper
Returns True if word ends with a double consonant
"""
return (
len(word) >= 2 and
word[-1] == word[-2] and
self._is_consonant(word, len(word)-1)
)
def _ends_cvc(self, word):
"""Implements condition *o from the paper
From the paper:
*o - the stem ends cvc, where the second c is not W, X or Y
(e.g. -WIL, -HOP).
"""
return (
len(word) >= 3 and
self._is_consonant(word, len(word) - 3) and
not self._is_consonant(word, len(word) - 2) and
self._is_consonant(word, len(word) - 1) and
word[-1] not in ('w', 'x', 'y')
) or (
self.mode == self.NLTK_EXTENSIONS and
len(word) == 2 and
not self._is_consonant(word, 0) and
self._is_consonant(word, 1)
)
def _replace_suffix(self, word, suffix, replacement):
"""Replaces `suffix` of `word` with `replacement"""
assert word.endswith(suffix), "Given word doesn't end with given suffix"
if suffix == '':
return word + replacement
else:
return word[:-len(suffix)] + replacement
def _apply_rule_list(self, word, rules):
"""Applies the first applicable suffix-removal rule to the word
Takes a word and a list of suffix-removal rules represented as
3-tuples, with the first element being the suffix to remove,
the second element being the string to replace it with, and the
final element being the condition for the rule to be applicable,
or None if the rule is unconditional.
"""
for rule in rules:
suffix, replacement, condition = rule
if suffix == '*d' and self._ends_double_consonant(word):
stem = word[:-2]
if condition is None or condition(stem):
return stem + replacement
else:
# Don't try any further rules
return word
if word.endswith(suffix):
stem = self._replace_suffix(word, suffix, '')
if condition is None or condition(stem):
return stem + replacement
else:
# Don't try any further rules
return word
return word
def _step1a(self, word):
"""Implements Step 1a from "An algorithm for suffix stripping"
From the paper:
SSES -> SS caresses -> caress
IES -> I ponies -> poni
ties -> ti
SS -> SS caress -> caress
S -> cats -> cat
"""
# this NLTK-only rule extends the original algorithm, so
# that 'flies'->'fli' but 'dies'->'die' etc
if self.mode == self.NLTK_EXTENSIONS:
if word.endswith('ies') and len(word) == 4:
return self._replace_suffix(word, 'ies', 'ie')
return self._apply_rule_list(word, [
('sses', 'ss', None), # SSES -> SS
('ies', 'i', None), # IES -> I
('ss', 'ss', None), # SS -> SS
('s', '', None), # S ->
])
def _step1b(self, word):
"""Implements Step 1b from "An algorithm for suffix stripping"
From the paper:
(m>0) EED -> EE feed -> feed
agreed -> agree
(*v*) ED -> plastered -> plaster
bled -> bled
(*v*) ING -> motoring -> motor
sing -> sing
If the second or third of the rules in Step 1b is successful,
the following is done:
AT -> ATE conflat(ed) -> conflate
BL -> BLE troubl(ed) -> trouble
IZ -> IZE siz(ed) -> size
(*d and not (*L or *S or *Z))
-> single letter
hopp(ing) -> hop
tann(ed) -> tan
fall(ing) -> fall
hiss(ing) -> hiss
fizz(ed) -> fizz
(m=1 and *o) -> E fail(ing) -> fail
fil(ing) -> file
The rule to map to a single letter causes the removal of one of
the double letter pair. The -E is put back on -AT, -BL and -IZ,
so that the suffixes -ATE, -BLE and -IZE can be recognised
later. This E may be removed in step 4.
"""
# this NLTK-only block extends the original algorithm, so that
# 'spied'->'spi' but 'died'->'die' etc
if self.mode == self.NLTK_EXTENSIONS:
if word.endswith('ied'):
if len(word) == 4:
return self._replace_suffix(word, 'ied', 'ie')
else:
return self._replace_suffix(word, 'ied', 'i')
# (m>0) EED -> EE
if word.endswith('eed'):
stem = self._replace_suffix(word, 'eed', '')
if self._measure(stem) > 0:
return stem + 'ee'
else:
return word
rule_2_or_3_succeeded = False
for suffix in ['ed', 'ing']:
if word.endswith(suffix):
intermediate_stem = self._replace_suffix(word, suffix, '')
if self._contains_vowel(intermediate_stem):
rule_2_or_3_succeeded = True
break
if not rule_2_or_3_succeeded:
return word
return self._apply_rule_list(intermediate_stem, [
('at', 'ate', None), # AT -> ATE
('bl', 'ble', None), # BL -> BLE
('iz', 'ize', None), # IZ -> IZE
# (*d and not (*L or *S or *Z))
# -> single letter
(
'*d',
intermediate_stem[-1],
lambda stem: intermediate_stem[-1] not in ('l', 's', 'z')
),
# (m=1 and *o) -> E
(
'',
'e',
lambda stem: (self._measure(stem) == 1 and
self._ends_cvc(stem))
),
])
def _step1c(self, word):
"""Implements Step 1c from "An algorithm for suffix stripping"
From the paper:
Step 1c
(*v*) Y -> I happy -> happi
sky -> sky
"""
def nltk_condition(stem):
"""
This has been modified from the original Porter algorithm so
that y->i is only done when y is preceded by a consonant,
but not if the stem is only a single consonant, i.e.
(*c and not c) Y -> I
So 'happy' -> 'happi', but
'enjoy' -> 'enjoy' etc
This is a much better rule. Formerly 'enjoy'->'enjoi' and
'enjoyment'->'enjoy'. Step 1c is perhaps done too soon; but
with this modification that no longer really matters.
Also, the removal of the contains_vowel(z) condition means
that 'spy', 'fly', 'try' ... stem to 'spi', 'fli', 'tri' and
conflate with 'spied', 'tried', 'flies' ...
"""
return len(stem) > 1 and self._is_consonant(stem, len(stem) - 1)
def original_condition(stem):
return self._contains_vowel(stem)
return self._apply_rule_list(word, [
(
'y',
'i',
nltk_condition if self.mode == self.NLTK_EXTENSIONS
else original_condition
)
])
def _step2(self, word):
"""Implements Step 2 from "An algorithm for suffix stripping"
From the paper:
Step 2
(m>0) ATIONAL -> ATE relational -> relate
(m>0) TIONAL -> TION conditional -> condition
rational -> rational
(m>0) ENCI -> ENCE valenci -> valence
(m>0) ANCI -> ANCE hesitanci -> hesitance
(m>0) IZER -> IZE digitizer -> digitize
(m>0) ABLI -> ABLE conformabli -> conformable
(m>0) ALLI -> AL radicalli -> radical
(m>0) ENTLI -> ENT differentli -> different
(m>0) ELI -> E vileli - > vile
(m>0) OUSLI -> OUS analogousli -> analogous
(m>0) IZATION -> IZE vietnamization -> vietnamize
(m>0) ATION -> ATE predication -> predicate
(m>0) ATOR -> ATE operator -> operate
(m>0) ALISM -> AL feudalism -> feudal
(m>0) IVENESS -> IVE decisiveness -> decisive
(m>0) FULNESS -> FUL hopefulness -> hopeful
(m>0) OUSNESS -> OUS callousness -> callous
(m>0) ALITI -> AL formaliti -> formal
(m>0) IVITI -> IVE sensitiviti -> sensitive
(m>0) BILITI -> BLE sensibiliti -> sensible
"""
if self.mode == self.NLTK_EXTENSIONS:
# Instead of applying the ALLI -> AL rule after '(a)bli' per
# the published algorithm, instead we apply it first, and,
# if it succeeds, run the result through step2 again.
if (
word.endswith('alli') and
self._has_positive_measure(
self._replace_suffix(word, 'alli', '')
)
):
return self._step2(
self._replace_suffix(word, 'alli', 'al')
)
bli_rule = ('bli', 'ble', self._has_positive_measure)
abli_rule = ('abli', 'able', self._has_positive_measure)
rules = [
('ational', 'ate', self._has_positive_measure),
('tional', 'tion', self._has_positive_measure),
('enci', 'ence', self._has_positive_measure),
('anci', 'ance', self._has_positive_measure),
('izer', 'ize', self._has_positive_measure),
abli_rule if self.mode == self.ORIGINAL_ALGORITHM else bli_rule,
('alli', 'al', self._has_positive_measure),
('entli', 'ent', self._has_positive_measure),
('eli', 'e', self._has_positive_measure),
('ousli', 'ous', self._has_positive_measure),
('ization', 'ize', self._has_positive_measure),
('ation', 'ate', self._has_positive_measure),
('ator', 'ate', self._has_positive_measure),
('alism', 'al', self._has_positive_measure),
('iveness', 'ive', self._has_positive_measure),
('fulness', 'ful', self._has_positive_measure),
('ousness', 'ous', self._has_positive_measure),
('aliti', 'al', self._has_positive_measure),
('iviti', 'ive', self._has_positive_measure),
('biliti', 'ble', self._has_positive_measure),
]
if self.mode == self.NLTK_EXTENSIONS:
rules.append(
('fulli', 'ful', self._has_positive_measure)
)
# The 'l' of the 'logi' -> 'log' rule is put with the stem,
# so that short stems like 'geo' 'theo' etc work like
# 'archaeo' 'philo' etc.
rules.append((
"logi",
"log",
lambda stem: self._has_positive_measure(word[:-3])
))
if self.mode == self.MARTIN_EXTENSIONS:
rules.append(
("logi", "log", self._has_positive_measure)
)
return self._apply_rule_list(word, rules)
def _step3(self, word):
"""Implements Step 3 from "An algorithm for suffix stripping"
From the paper:
Step 3
(m>0) ICATE -> IC triplicate -> triplic
(m>0) ATIVE -> formative -> form
(m>0) ALIZE -> AL formalize -> formal
(m>0) ICITI -> IC electriciti -> electric
(m>0) ICAL -> IC electrical -> electric
(m>0) FUL -> hopeful -> hope
(m>0) NESS -> goodness -> good
"""
return self._apply_rule_list(word, [
('icate', 'ic', self._has_positive_measure),
('ative', '', self._has_positive_measure),
('alize', 'al', self._has_positive_measure),
('iciti', 'ic', self._has_positive_measure),
('ical', 'ic', self._has_positive_measure),
('ful', '', self._has_positive_measure),
('ness', '', self._has_positive_measure),
])
def _step4(self, word):
"""Implements Step 4 from "An algorithm for suffix stripping"
Step 4
(m>1) AL -> revival -> reviv
(m>1) ANCE -> allowance -> allow
(m>1) ENCE -> inference -> infer
(m>1) ER -> airliner -> airlin
(m>1) IC -> gyroscopic -> gyroscop
(m>1) ABLE -> adjustable -> adjust
(m>1) IBLE -> defensible -> defens
(m>1) ANT -> irritant -> irrit
(m>1) EMENT -> replacement -> replac
(m>1) MENT -> adjustment -> adjust
(m>1) ENT -> dependent -> depend
(m>1 and (*S or *T)) ION -> adoption -> adopt
(m>1) OU -> homologou -> homolog
(m>1) ISM -> communism -> commun
(m>1) ATE -> activate -> activ
(m>1) ITI -> angulariti -> angular
(m>1) OUS -> homologous -> homolog
(m>1) IVE -> effective -> effect
(m>1) IZE -> bowdlerize -> bowdler
The suffixes are now removed. All that remains is a little
tidying up.
"""
measure_gt_1 = lambda stem: self._measure(stem) > 1
return self._apply_rule_list(word, [
('al', '', measure_gt_1),
('ance', '', measure_gt_1),
('ence', '', measure_gt_1),
('er', '', measure_gt_1),
('ic', '', measure_gt_1),
('able', '', measure_gt_1),
('ible', '', measure_gt_1),
('ant', '', measure_gt_1),
('ement', '', measure_gt_1),
('ment', '', measure_gt_1),
('ent', '', measure_gt_1),
# (m>1 and (*S or *T)) ION ->
(
'ion',
'',
lambda stem: self._measure(stem) > 1 and stem[-1] in ('s', 't')
),
('ou', '', measure_gt_1),
('ism', '', measure_gt_1),
('ate', '', measure_gt_1),
('iti', '', measure_gt_1),
('ous', '', measure_gt_1),
('ive', '', measure_gt_1),
('ize', '', measure_gt_1),
])
def _step5a(self, word):
"""Implements Step 5a from "An algorithm for suffix stripping"
From the paper:
Step 5a
(m>1) E -> probate -> probat
rate -> rate
(m=1 and not *o) E -> cease -> ceas
"""
# Note that Martin's test vocabulary and reference
# implementations are inconsistent in how they handle the case
# where two rules both refer to a suffix that matches the word
# to be stemmed, but only the condition of the second one is
# true.
# Earlier in step2b we had the rules:
# (m>0) EED -> EE
# (*v*) ED ->
# but the examples in the paper included "feed"->"feed", even
# though (*v*) is true for "fe" and therefore the second rule
# alone would map "feed"->"fe".
# However, in THIS case, we need to handle the consecutive rules
# differently and try both conditions (obviously; the second
# rule here would be redundant otherwise). Martin's paper makes
# no explicit mention of the inconsistency; you have to infer it
# from the examples.
# For this reason, we can't use _apply_rule_list here.
if word.endswith('e'):
stem = self._replace_suffix(word, 'e', '')
if self._measure(stem) > 1:
return stem
if self._measure(stem) == 1 and not self._ends_cvc(stem):
return stem
return word
def _step5b(self, word):
"""Implements Step 5a from "An algorithm for suffix stripping"
From the paper:
Step 5b
(m > 1 and *d and *L) -> single letter
controll -> control
roll -> roll
"""
return self._apply_rule_list(word, [
('ll', 'l', lambda stem: self._measure(word[:-1]) > 1)
])
def stem(self, word):
stem = word.lower()
if self.mode == self.NLTK_EXTENSIONS and word in self.pool:
return self.pool[word]
if self.mode != self.ORIGINAL_ALGORITHM and len(word) <= 2:
# With this line, strings of length 1 or 2 don't go through
# the stemming process, although no mention is made of this
# in the published algorithm.
return word
stem = self._step1a(stem)
stem = self._step1b(stem)
stem = self._step1c(stem)
stem = self._step2(stem)
stem = self._step3(stem)
stem = self._step4(stem)
stem = self._step5a(stem)
stem = self._step5b(stem)
return stem
def __repr__(self):
return '<PorterStemmer>'
def demo():
"""
A demonstration of the porter stemmer on a sample from
the Penn Treebank corpus.
"""
from nltk.corpus import treebank
from nltk import stem
stemmer = stem.PorterStemmer()
orig = []
stemmed = []
for item in treebank.fileids()[:3]:
for (word, tag) in treebank.tagged_words(item):
orig.append(word)
stemmed.append(stemmer.stem(word))
# Convert the results to a string, and word-wrap them.
results = ' '.join(stemmed)
results = re.sub(r"(.{,70})\s", r'\1\n', results+' ').rstrip()
# Convert the original to a string, and word wrap it.
original = ' '.join(orig)
original = re.sub(r"(.{,70})\s", r'\1\n', original+' ').rstrip()
# Print the results.
print('-Original-'.center(70).replace(' ', '*').replace('-', ' '))
print(original)
print('-Results-'.center(70).replace(' ', '*').replace('-', ' '))
print(results)
print('*'*70)
|
|
import os
import pandas as pd
from sqlalchemy.sql import column
from igf_data.igfdb.baseadaptor import BaseAdaptor
from igf_data.igfdb.igfTables import File, File_attribute
class FileAdaptor(BaseAdaptor):
'''
An adaptor class for File tables
'''
def store_file_and_attribute_data(self,data,autosave=True):
'''
A method for dividing and storing data to file and attribute table
:param data: A list of dictionary or a Pandas DataFrame
:param autosave: A Toggle for automatically saving changes to db, default True
:returns: None
'''
(file_data, file_attr_data)=\
self.divide_data_to_table_and_attribute(data=data)
try:
self.store_file_data(data=file_data)
if len(file_attr_data.index)>0: # check if any attribute exists
self.store_file_attributes(data=file_attr_data)
if autosave:
self.commit_session()
except Exception as e:
if autosave:
self.rollback_session()
raise ValueError(
'Failed to store file and attributes, error: {0}'.format(e))
def divide_data_to_table_and_attribute(
self,data,required_column='file_path',table_columns=None,
attribute_name_column='attribute_name',attribute_value_column='attribute_value'):
'''
A method for separating data for File and File_attribute tables
:param data: A list of dictionary or a Pandas DataFrame
:param table_columns: List of table column names, default None
:param required_column: A column name to add to the attribute data
:param attribute_name_column: A label for attribute name column
:param attribute_value_column: A label for attribute value column
:returns: Two pandas dataframes, one for File and another for File_attribute table
'''
try:
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
file_columns = \
self.get_table_columns(
table_name=File,
excluded_columns=['file_id']) # get required columns for file table
(file_df, file_attr_df) = \
BaseAdaptor.\
divide_data_to_table_and_attribute(
self,
data=data,
required_column=required_column,
table_columns=file_columns,
attribute_name_column=attribute_name_column,
attribute_value_column=attribute_value_column) # divide dataframe
return (file_df, file_attr_df)
except Exception as e:
raise ValueError(
'Failed to divide file data, error: {0}'.format(e))
def store_file_data(self,data,autosave=False):
'''
Load data to file table
:param data: A list of dictionary or a Pandas DataFrame
:param autosave: A Toggle for automatically saving changes to db, default True
:returns: None
'''
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
try:
self.store_records(
table=File,
data=data) # store data without autocommit
if autosave:
self.commit_session()
except Exception as e:
if autosave:
self.rollback_session()
raise ValueError('Failed to store file data, error: {0}'.format(e))
def store_file_attributes(self,data,file_id='',autosave=False):
'''
A method for storing data to File_attribute table
:param data: A list of dictionary or a Pandas DataFrame
:param file_id: A file_id for updating the attribute table, default empty string
:param autosave: A Toggle for automatically saving changes to db, default True
:returns: None
'''
try:
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
if 'file_path' in data.columns:
map_function = \
lambda x: \
self.map_foreign_table_and_store_attribute(
data=x,
lookup_table=File,
lookup_column_name='file_path',
target_column_name='file_id') # prepare the map function for File id
data['file_id'] = ''
data = \
data.apply(
map_function,
axis=1,
result_type=None) # map file id
data.drop(
'file_path',
axis=1,
inplace=True)
#data=new_data # overwrite data
self.store_attributes(
attribute_table=File_attribute,
linked_column='file_id',
db_id=file_id,
data=data) # store data without autocommit
if autosave:
self.commit_session()
except Exception as e:
if autosave:
self.rollback_session()
raise ValueError(
'Failed to store file attributes, error: {0}'.format(e))
def fetch_file_records_file_path(self,file_path):
'''
A method for fetching data for file table
:param file_path: an absolute file path
:returns: A file object
'''
try:
file_obj = \
self.fetch_records_by_column(
table=File,
column_name=File.file_path,
column_id=file_path,
output_mode='one')
return file_obj
except Exception as e:
raise ValueError(
'Failed to fetch file record, error: {0}'.format(e))
def check_file_records_file_path(self,file_path):
'''
A method for checking file information in database
:param file_path: A absolute filepath
:returns: True if the file is present in db or False if its not
'''
try:
file_check = False
file_obj = \
self.fetch_records_by_column(
table=File,
column_name=File.file_path,
column_id=file_path,
output_mode='one_or_none')
if file_obj:
file_check = True
return file_check
except Exception as e:
raise ValueError(
'Failed to check file records, error: {0}'.format(e))
def remove_file_data_for_file_path(
self,file_path,remove_file=False,autosave=True):
'''
A method for removing entry for a specific file.
:param file_path: A complete file_path for checking database
:param remove_file: A toggle for removing filepath, default False
:param autosave: A toggle for automatically saving changes to database, default True
:returns: None
'''
try:
file_exists = \
self.check_file_records_file_path(file_path=file_path)
if not file_exists:
raise ValueError(
'File {0} not found in database'.format(file_path))
self.session.\
query(File).\
filter(File.file_path==file_path).\
delete(synchronize_session=False) # remove record from db
if remove_file:
os.remove(path=file_path) # removing file from disk
if autosave:
self.commit_session() # save changes to database
except Exception as e:
raise ValueError(
'Failed to remove file, error: {0}'.format(e))
def update_file_table_for_file_path(self,file_path,tag,value,autosave=False):
'''
A method for updating file table
:param file_path: A file_path for database look up
:param tag: A keyword for file column name
:param value: A new value for the file column
:param autosave: Toggle autosave, default off
:returns: None
'''
try:
file_columns = \
self.get_table_columns(
table_name=File,
excluded_columns=['file_id'])
if tag not in file_columns:
raise ValueError(
'column name {0} not allowed for table File'.\
format(tag))
_ = \
self.session.\
query(File).\
filter(File.file_path==file_path).\
update({tag:value},synchronize_session=False)
if autosave:
self.commit_session()
except Exception as e:
raise ValueError(
'Failed to update file entry, error: {0}'.format(e))
|
|
# -*- coding: utf-8 -*-
# Django settings for social pinax project.
import os.path
import posixpath
import pinax
PINAX_ROOT = os.path.abspath(os.path.dirname(pinax.__file__))
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# tells Pinax to use the default theme
PINAX_THEME = "default"
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# tells Pinax to serve media through the staticfiles app.
SERVE_MEDIA = DEBUG
INTERNAL_IPS = [
"127.0.0.1",
]
ADMINS = [
# ("Your Name", "your_email@domain.com"),
]
MANAGERS = ADMINS
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3", # Add "postgresql_psycopg2", "postgresql", "mysql", "sqlite3" or "oracle".
"NAME": "dev.db", # Or path to database file if using sqlite3.
"USER": "", # Not used with sqlite3.
"PASSWORD": "", # Not used with sqlite3.
"HOST": "", # Set to empty string for localhost. Not used with sqlite3.
"PORT": "", # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = "US/Eastern"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-us"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, "site_media", "media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = "/site_media/media/"
# Absolute path to the directory that holds static files like app media.
# Example: "/home/media/media.lawrence.com/apps/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, "site_media", "static")
# URL that handles the static files like app media.
# Example: "http://media.lawrence.com"
STATIC_URL = "/site_media/static/"
# Additional directories which hold static files
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, "media"),
os.path.join(PINAX_ROOT, "media", PINAX_THEME),
]
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = posixpath.join(STATIC_URL, "admin/")
# Make this unique, and don't share it with anybody.
SECRET_KEY = ""
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = [
"django.template.loaders.filesystem.load_template_source",
"django.template.loaders.app_directories.load_template_source",
]
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django_openid.consumer.SessionConsumer",
"django.contrib.messages.middleware.MessageMiddleware",
"groups.middleware.GroupAwareMiddleware",
"pinax.apps.account.middleware.LocaleMiddleware",
"django.middleware.doc.XViewMiddleware",
"pagination.middleware.PaginationMiddleware",
"django_sorting.middleware.SortingMiddleware",
"pinax.middleware.security.HideSensistiveFieldsMiddleware",
"debug_toolbar.middleware.DebugToolbarMiddleware",
]
ROOT_URLCONF = "social_project.urls"
TEMPLATE_DIRS = [
os.path.join(PROJECT_ROOT, "templates"),
os.path.join(PINAX_ROOT, "templates", PINAX_THEME),
]
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"staticfiles.context_processors.static_url",
"pinax.core.context_processors.pinax_settings",
"pinax.apps.account.context_processors.account",
"notification.context_processors.notification",
"announcements.context_processors.site_wide_announcements",
"messages.context_processors.inbox",
"friends_app.context_processors.invitations",
"social_project.context_processors.combined_inbox_count",
]
COMBINED_INBOX_COUNT_SOURCES = [
"messages.context_processors.inbox",
"friends_app.context_processors.invitations",
"notification.context_processors.notification",
]
INSTALLED_APPS = [
# Django
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"django.contrib.humanize",
"django.contrib.markup",
"pinax.templatetags",
# external
"notification", # must be first
"staticfiles",
"debug_toolbar",
"mailer",
"uni_form",
"django_openid",
"ajax_validation",
"timezones",
"emailconfirmation",
"announcements",
"pagination",
"friends",
"messages",
"oembed",
"groups",
"threadedcomments",
"wakawaka",
"swaps",
"voting",
"tagging",
"bookmarks",
"photologue",
"avatar",
"flag",
"microblogging",
"locations",
"django_sorting",
"django_markup",
"tagging_ext",
# Pinax
"pinax.apps.account",
"pinax.apps.signup_codes",
"pinax.apps.analytics",
"pinax.apps.profiles",
"pinax.apps.blog",
"pinax.apps.tribes",
"pinax.apps.photos",
"pinax.apps.topics",
"pinax.apps.threadedcomments_extras",
"pinax.apps.voting_extras",
# project
"about",
]
FIXTURE_DIRS = [
os.path.join(PROJECT_ROOT, "fixtures"),
]
MESSAGE_STORAGE = "django.contrib.messages.storage.session.SessionStorage"
EMAIL_BACKEND = "mailer.backend.DbBackend"
ABSOLUTE_URL_OVERRIDES = {
"auth.user": lambda o: "/profiles/profile/%s/" % o.username,
}
MARKUP_FILTER_FALLBACK = "none"
MARKUP_CHOICES = [
("restructuredtext", u"reStructuredText"),
("textile", u"Textile"),
("markdown", u"Markdown"),
("creole", u"Creole"),
]
AUTH_PROFILE_MODULE = "profiles.Profile"
NOTIFICATION_LANGUAGE_MODULE = "account.Account"
ACCOUNT_OPEN_SIGNUP = True
ACCOUNT_REQUIRED_EMAIL = False
ACCOUNT_EMAIL_VERIFICATION = False
ACCOUNT_EMAIL_AUTHENTICATION = False
ACCOUNT_UNIQUE_EMAIL = EMAIL_CONFIRMATION_UNIQUE_EMAIL = False
AUTHENTICATION_BACKENDS = [
"pinax.apps.account.auth_backends.AuthenticationBackend",
]
LOGIN_URL = "/account/login/" # @@@ any way this can be a url name?
LOGIN_REDIRECT_URLNAME = "what_next"
EMAIL_CONFIRMATION_DAYS = 2
EMAIL_DEBUG = DEBUG
ugettext = lambda s: s
LANGUAGES = [
("en", u"English"),
]
# URCHIN_ID = "ua-..."
YAHOO_MAPS_API_KEY = "..."
class NullStream(object):
def write(*args, **kwargs):
pass
writeline = write
writelines = write
RESTRUCTUREDTEXT_FILTER_SETTINGS = {
"cloak_email_addresses": True,
"file_insertion_enabled": False,
"raw_enabled": False,
"warning_stream": NullStream(),
"strip_comments": True,
}
# if Django is running behind a proxy, we need to do things like use
# HTTP_X_FORWARDED_FOR instead of REMOTE_ADDR. This setting is used
# to inform apps of this fact
BEHIND_PROXY = False
FORCE_LOWERCASE_TAGS = True
# Uncomment this line after signing up for a Yahoo Maps API key at the
# following URL: https://developer.yahoo.com/wsregapp/
# YAHOO_MAPS_API_KEY = ""
DEBUG_TOOLBAR_CONFIG = {
"INTERCEPT_REDIRECTS": False,
}
# local_settings.py can be used to override environment-specific settings
# like database and email that differ between development and production.
try:
from local_settings import *
except ImportError:
pass
|
|
"""
Manage vms running on the OpenBSD VMM hypervisor using vmctl(8).
.. versionadded:: 2019.2.0
:codeauthor: ``Jasper Lievisse Adriaanse <jasper@openbsd.org>``
.. note::
This module requires the `vmd` service to be running on the OpenBSD
target machine.
"""
import logging
import re
import salt.utils.path
from salt.exceptions import CommandExecutionError, SaltInvocationError
log = logging.getLogger(__name__)
def __virtual__():
"""
Only works on OpenBSD with vmctl(8) present.
"""
if __grains__["os"] == "OpenBSD" and salt.utils.path.which("vmctl"):
return True
return (
False,
"The vmm execution module cannot be loaded: either the system is not OpenBSD or"
" the vmctl binary was not found",
)
def _id_to_name(id):
"""
Lookup the name associated with a VM id.
"""
vm = status(id=id)
if vm == {}:
return None
else:
return vm["name"]
def create_disk(name, size):
"""
Create a VMM disk with the specified `name` and `size`.
size:
Size in megabytes, or use a specifier such as M, G, T.
CLI Example:
.. code-block:: bash
salt '*' vmctl.create_disk /path/to/disk.img size=10G
"""
ret = False
cmd = "vmctl create {} -s {}".format(name, size)
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] == 0:
ret = True
else:
raise CommandExecutionError(
"Problem encountered creating disk image",
info={"errors": [result["stderr"]], "changes": ret},
)
return ret
def load(path):
"""
Load additional configuration from the specified file.
path
Path to the configuration file.
CLI Example:
.. code-block:: bash
salt '*' vmctl.load path=/etc/vm.switches.conf
"""
ret = False
cmd = "vmctl load {}".format(path)
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] == 0:
ret = True
else:
raise CommandExecutionError(
"Problem encountered running vmctl",
info={"errors": [result["stderr"]], "changes": ret},
)
return ret
def reload():
"""
Remove all stopped VMs and reload configuration from the default configuration file.
CLI Example:
.. code-block:: bash
salt '*' vmctl.reload
"""
ret = False
cmd = "vmctl reload"
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] == 0:
ret = True
else:
raise CommandExecutionError(
"Problem encountered running vmctl",
info={"errors": [result["stderr"]], "changes": ret},
)
return ret
def reset(all=False, vms=False, switches=False):
"""
Reset the running state of VMM or a subsystem.
all:
Reset the running state.
switches:
Reset the configured switches.
vms:
Reset and terminate all VMs.
CLI Example:
.. code-block:: bash
salt '*' vmctl.reset all=True
"""
ret = False
cmd = ["vmctl", "reset"]
if all:
cmd.append("all")
elif vms:
cmd.append("vms")
elif switches:
cmd.append("switches")
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] == 0:
ret = True
else:
raise CommandExecutionError(
"Problem encountered running vmctl",
info={"errors": [result["stderr"]], "changes": ret},
)
return ret
def start(
name=None,
id=None,
bootpath=None,
disk=None,
disks=None,
local_iface=False,
memory=None,
nics=0,
switch=None,
):
"""
Starts a VM defined by the specified parameters.
When both a name and id are provided, the id is ignored.
name:
Name of the defined VM.
id:
VM id.
bootpath:
Path to a kernel or BIOS image to load.
disk:
Path to a single disk to use.
disks:
List of multiple disks to use.
local_iface:
Whether to add a local network interface. See "LOCAL INTERFACES"
in the vmctl(8) manual page for more information.
memory:
Memory size of the VM specified in megabytes.
switch:
Add a network interface that is attached to the specified
virtual switch on the host.
CLI Example:
.. code-block:: bash
salt '*' vmctl.start 2 # start VM with id 2
salt '*' vmctl.start name=web1 bootpath='/bsd.rd' nics=2 memory=512M disk='/disk.img'
"""
ret = {"changes": False, "console": None}
cmd = ["vmctl", "start"]
if not (name or id):
raise SaltInvocationError('Must provide either "name" or "id"')
elif name:
cmd.append(name)
else:
cmd.append(id)
name = _id_to_name(id)
if nics > 0:
cmd.append("-i {}".format(nics))
# Paths cannot be appended as otherwise the inserted whitespace is treated by
# vmctl as being part of the path.
if bootpath:
cmd.extend(["-b", bootpath])
if memory:
cmd.append("-m {}".format(memory))
if switch:
cmd.append("-n {}".format(switch))
if local_iface:
cmd.append("-L")
if disk and (disks and len(disks) > 0):
raise SaltInvocationError('Must provide either "disks" or "disk"')
if disk:
cmd.extend(["-d", disk])
if disks and len(disks) > 0:
cmd.extend(["-d", x] for x in disks)
# Before attempting to define a new VM, make sure it doesn't already exist.
# Otherwise return to indicate nothing was changed.
if len(cmd) > 3:
vmstate = status(name)
if vmstate:
ret["comment"] = "VM already exists and cannot be redefined"
return ret
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] == 0:
ret["changes"] = True
m = re.match(r".*successfully, tty (\/dev.*)", result["stderr"])
if m:
ret["console"] = m.groups()[0]
else:
m = re.match(r".*Operation already in progress$", result["stderr"])
if m:
ret["changes"] = False
else:
raise CommandExecutionError(
"Problem encountered running vmctl",
info={"errors": [result["stderr"]], "changes": ret},
)
return ret
def status(name=None, id=None):
"""
List VMs running on the host, or only the VM specified by ``id``. When
both a name and id are provided, the id is ignored.
name:
Name of the defined VM.
id:
VM id.
CLI Example:
.. code-block:: bash
salt '*' vmctl.status # to list all VMs
salt '*' vmctl.status name=web1 # to get a single VM
"""
ret = {}
cmd = ["vmctl", "status"]
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] != 0:
raise CommandExecutionError(
"Problem encountered running vmctl",
info={"error": [result["stderr"]], "changes": ret},
)
# Grab the header and save it with the lowercase names.
header = result["stdout"].splitlines()[0].split()
header = [x.lower() for x in header]
# A VM can be in one of the following states (from vmm.c:vcpu_state_decode())
# - stopped
# - running
# - requesting termination
# - terminated
# - unknown
for line in result["stdout"].splitlines()[1:]:
data = line.split()
vm = dict(list(zip(header, data)))
vmname = vm.pop("name")
if vm["pid"] == "-":
# If the VM has no PID it's not running.
vm["state"] = "stopped"
elif vmname and data[-2] == "-":
# When a VM does have a PID and the second to last field is a '-', it's
# transitioning to another state. A VM name itself cannot contain a
# '-' so it's safe to split on '-'.
vm["state"] = data[-1]
else:
vm["state"] = "running"
# When the status is requested of a single VM (by name) which is stopping,
# vmctl doesn't print the status line. So we'll parse the full list and
# return when we've found the requested VM.
if id and int(vm["id"]) == id:
return {vmname: vm}
elif name and vmname == name:
return {vmname: vm}
else:
ret[vmname] = vm
# Assert we've not come this far when an id or name have been provided. That
# means the requested VM does not exist.
if id or name:
return {}
return ret
def stop(name=None, id=None):
"""
Stop (terminate) the VM identified by the given id or name.
When both a name and id are provided, the id is ignored.
name:
Name of the defined VM.
id:
VM id.
CLI Example:
.. code-block:: bash
salt '*' vmctl.stop name=alpine
"""
ret = {}
cmd = ["vmctl", "stop"]
if not (name or id):
raise SaltInvocationError('Must provide either "name" or "id"')
elif name:
cmd.append(name)
else:
cmd.append(id)
result = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=False)
if result["retcode"] == 0:
if re.match("^vmctl: sent request to terminate vm.*", result["stderr"]):
ret["changes"] = True
else:
ret["changes"] = False
else:
raise CommandExecutionError(
"Problem encountered running vmctl",
info={"errors": [result["stderr"]], "changes": ret},
)
return ret
|
|
# -*- coding: utf-8 -*-
"""
lantz.feat
~~~~~~~~~~
Implements Feat and DictFeat property-like classes with data handling,
logging, timing, cache and notification.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import time
import copy
from weakref import WeakKeyDictionary
from . import Q_
from .processors import (Processor, ToQuantityProcessor, FromQuantityProcessor,
MapProcessor, ReverseMapProcessor, RangeProcessor)
class _NamedObject(object):
"""A class to construct named sentinels.
"""
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
def __str__(self):
return self.name
def __hash__(self):
return id(self)
def __deepcopy__(self, memo):
return self
MISSING = _NamedObject('MISSING')
def _dget(adict, instance=MISSING, key=MISSING):
try:
adict = adict[instance]
except KeyError:
adict = adict[MISSING]
try:
return adict[key]
except KeyError:
return adict[MISSING]
def _dset(adict, value, instance=MISSING, key=MISSING):
if instance not in adict:
adict[instance] = copy.deepcopy(adict[MISSING])
if key not in adict[instance]:
adict[instance][key] = copy.deepcopy(adict[instance][MISSING])
if isinstance(adict[instance][key], dict):
adict[instance][key].update(value)
else:
adict[instance][key] = value
class Feat(object):
"""Pimped Python property for interfacing with instruments. Can be used as
a decorator.
Processors can registered for each arguments to modify their values before
they are passed to the body of the method. Two standard processors are
defined: `values` and `units` and others can be given as callables in the
`procs` parameter.
If a method contains multiple arguments, use a tuple. None can be used as
`do not change`.
:param fget: getter function.
:param fset: setter function.
:param doc: docstring, if missing fget or fset docstring will be used.
:param values: A dictionary to map key to values.
A set to restrict the values.
If a list/tuple instead of a dict is given, the value is not
changed but only tested to belong to the container.
:param units: `Quantity` or string that can be interpreted as units.
:param procs: Other callables to be applied to input arguments.
"""
__original_doc__ = ''
def __init__(self, fget=MISSING, fset=None, doc=None, *,
values=None, units=None, limits=None, procs=None,
read_once=False):
self.fget = fget
self.fset = fset
self.__doc__ = doc
self.name = '?'
#: instance: value
self.value = WeakKeyDictionary()
#: instance: key: value
self.modifiers = WeakKeyDictionary()
self.get_processors = WeakKeyDictionary()
self.set_processors = WeakKeyDictionary()
# Take documentation from fget or fset
# if not provided explicitly.
if self.__doc__ is None:
if fget is not MISSING and fget.__doc__:
self.__doc__ = fget.__doc__
elif fset and fset.__doc__:
self.__doc__ = fset.__doc__
self.modifiers[MISSING] = {MISSING: {'values': values,
'units': units,
'limits': limits,
'processors': procs}}
self.get_processors[MISSING] = {MISSING: ()}
self.set_processors[MISSING] = {MISSING: ()}
self.read_once = read_once
self.rebuild(build_doc=True, store=True)
def rebuild(self, instance=MISSING, key=MISSING, build_doc=False, modifiers=None, store=False):
if not modifiers:
modifiers = _dget(self.modifiers, instance, key)
values = modifiers['values']
units = modifiers['units']
limits = modifiers['limits']
processors = modifiers['processors']
get_processors = []
set_processors = []
if units:
get_processors.append(ToQuantityProcessor(units))
set_processors.append(FromQuantityProcessor(units))
if values:
get_processors.append(ReverseMapProcessor(values))
set_processors.append(MapProcessor(values))
if limits:
if isinstance(limits[0], (list, tuple)):
set_processors.append(RangeProcessor(limits))
else:
set_processors.append(RangeProcessor((limits, )))
if processors:
for getp, setp in processors:
if getp is not None:
get_processors.append(Processor(getp))
if setp is not None:
set_processors.append(Processor(setp))
if build_doc:
_dochelper(self)
if store:
_dset(self.get_processors, get_processors, instance, key)
_dset(self.set_processors, set_processors, instance, key)
return get_processors, set_processors
def __call__(self, func):
if self.fget is MISSING:
return self.getter(func)
return self.setter(func)
def getter(self, func):
if func.__doc__ and not self.__original_doc__:
self.__original_doc__ = func.__doc__
_dochelper(self)
self.fget = func
return self
def setter(self, func):
if func.__doc__ and not self.__original_doc__:
self.__original_doc__ = func.__doc__
_dochelper(self)
self.fset = func
return self
def post_getter(self, func):
self.post_get = func
return self
def post_setter(self, func):
self.pre_set = func
return self
def post_get(self, value, instance=None, key=MISSING):
for processor in reversed(_dget(self.get_processors, instance, key)):
value = processor(value)
return value
def pre_set(self, value, instance=None, key=MISSING):
for processor in _dget(self.set_processors, instance, key):
value = processor(value)
return value
def get(self, instance, owner=None, key=MISSING):
if instance is None:
return self
name = self.name + ('' if key is MISSING else '[{!r}]'.format(key))
if self.fget is None or self.fget is MISSING:
raise AttributeError('{} is a write-only feature'.format(name))
current = self.get_cache(instance, key)
if self.read_once and current is not MISSING:
return current
# This part calls to the underlying get function wrapping
# and timing, caching, logging and error handling
with instance._lock:
instance.log_info('Getting {}', name)
try:
tic = time.time()
if key is MISSING:
value = self.fget(instance)
else:
value = self.fget(instance, key)
except Exception as e:
instance.log_error('While getting {}: {}', name, e)
raise e
instance.timing.add('get_' + name, time.time() - tic)
instance.log_debug('(raw) Got {} for {}', value, name)
try:
value = self.post_get(value, instance, key)
except Exception as e:
instance.log_error('While post-processing {} for {}: {}', value, name, e)
raise e
instance.log_info('Got {} for {}', value, name, lantz_feat=(name, str(value)))
self.set_cache(instance, value, key)
return value
def set(self, instance, value, force=False, key=MISSING):
name = self.name + ('' if key is MISSING else '[{!r}]'.format(key))
if self.fset is None:
raise AttributeError('{} is a read-only feature'.format(name))
# This part calls to the underlying get function wrapping
# and timing, caching, logging and error handling
with instance._lock:
current_value = self.get_cache(instance, key)
if not force and value == current_value:
instance.log_info('No need to set {} = {} (current={}, force={})', name, value, current_value, force)
return
instance.log_info('Setting {} = {} (current={}, force={})', name, value, current_value, force)
try:
t_value = self.pre_set(value, instance, key)
except Exception as e:
instance.log_error('While pre-processing {} for {}: {}', value, name, e)
raise e
instance.log_debug('(raw) Setting {} = {}', name, t_value)
try:
tic = time.time()
if key is MISSING:
self.fset(instance, t_value)
else:
self.fset(instance, key, t_value)
except Exception as e:
instance.log_error('While setting {} to {}. {}', name, value, e)
raise e
instance.timing.add('set_' + name, time.time() - tic)
instance.log_info('{} was set to {}', name, value, lantz_feat=(name, str(value)))
self.set_cache(instance, value, key)
def __get__(self, instance, owner=None):
return self.get(instance)
def __set__(self, instance, value):
self.set(instance, value)
def __delete__(self, instance):
raise AttributeError('{} is a permanent feat of {}'.format(self.name, instance.__class__.__name__))
def get_cache(self, instance, key=MISSING):
try:
return self.value[instance]
except KeyError:
return MISSING
def set_cache(self, instance, value, key=MISSING):
old_value = self.get_cache(instance, key)
if value == old_value:
return
if isinstance(value, Q_):
value = copy.copy(value)
self.value[instance] = value
getattr(instance, self.name + '_changed').emit(value, old_value)
class DictFeat(Feat):
"""Pimped Python property with getitem access for interfacing with
instruments. Can be used as a decorator.
Takes the same parameters as `Feat`, plus:
:param keys: List/tuple restricts the keys to the specified ones.
"""
def __init__(self, fget=MISSING, fset=None, doc=None, *,
keys=None, **kwargs):
super().__init__(fget, fset, doc, **kwargs)
self.modifiers[MISSING][MISSING]['keys'] = keys
def getitem(self, instance, key):
keys = _dget(self.modifiers, instance, key)['keys']
if keys and not key in keys:
raise KeyError('{} is not valid key for {} {}'.format(key, self.name,
keys))
if isinstance(keys, dict):
key = keys[key]
return self.get(instance, instance.__class__, key)
def setitem(self, instance, key, value, force=False):
keys = _dget(self.modifiers, instance, key)['keys']
if keys and not key in keys:
raise KeyError('{} is not valid key for {} {}'.format(key, self.name,
keys))
if isinstance(keys, dict):
key = keys[key]
self.set(instance, value, force, key)
def __get__(self, instance, owner=None):
if not instance:
return self
return _DictFeatAccesor(instance, self)
def __set__(self, instance, value):
if not isinstance(value, dict):
raise AttributeError('This is a DictFeat and cannot be set in this way. '
'You probably want to do something like:'
'obj.prop[index] = value or obj.prop = dict')
for key, value in value.items():
self.setitem(instance, key, value)
def __delete__(self, instance):
raise AttributeError('{} is a permanent attribute from {}', self.name, instance.__class__.__name__)
def get_cache(self, instance, key=MISSING):
keys = _dget(self.modifiers, instance, key)['keys']
if instance not in self.value:
self.value[instance] = dict()
if isinstance(keys, dict):
keys = keys.values()
if keys and key not in keys:
raise KeyError('{} is not valid key for {} {}'.format(key, self.name,
keys))
if key is MISSING:
return self.value[instance]
else:
return self.value[instance].get(key, MISSING)
def set_cache(self, instance, value, key=MISSING):
old_value = self.get_cache(instance, key)
if value == old_value:
return
if key is MISSING:
assert isinstance(value, dict)
self.value[instance] = value
else:
self.value[instance][key] = value
getattr(instance, self.name + '_changed').emit(value, old_value, {'key': key})
def _dochelper(feat):
if not hasattr(feat, '__original_doc__'):
feat.__original_doc__ = feat.__doc__ or ''
doc = ''
predoc = ''
modifiers = feat.modifiers[MISSING][MISSING]
if isinstance(feat, DictFeat):
predoc = ':keys: {}\n\n'.format(modifiers.get('keys', None) or 'ANY')
if modifiers['values']:
doc += ':values: {}\n'.format(modifiers['values'])
if modifiers['units']:
doc += ':units: {}\n'.format(modifiers['units'])
if modifiers['limits']:
doc += ':limits: {}\n'.format(modifiers['limits'])
if modifiers['processors']:
docpg = []
docps = []
for getp, setp in modifiers['processors']:
if getp is not None:
docpg.insert(0, ' - {}'.format(getp))
if setp is not None:
docps.append(' - {}'.format(setp))
if docpg:
doc += ':get procs: {}'.format('\n'.join(docpg))
if docps:
doc += ':set procs: {}'.format('\n'.join(docps))
if predoc:
predoc = '\n\n{}'.format(predoc)
if doc:
doc = '\n\n{}'.format(doc)
feat.__doc__ = predoc + feat.__original_doc__ + doc
class FeatProxy(object):
"""Proxy object for Feat that allows to
store instance specific modifiers.
"""
def __init__(self, instance, feat, key=MISSING):
super().__setattr__('instance', instance)
super().__setattr__('feat', feat)
super().__setattr__('key', key)
def __getattr__(self, item):
modifiers = _dget(self.feat.modifiers, self.instance, self.key)
if item not in modifiers:
return getattr(self.feat, item)
return modifiers[item]
def __setattr__(self, item, value):
_modifiers = _dget(self.feat.modifiers, MISSING, MISSING)
if item not in _modifiers:
raise AttributeError()
_dset(self.feat.modifiers, {item: value}, self.instance, self.key)
self.feat.rebuild(self.instance, self.key, build_doc=False, store=True)
def __getitem__(self, key):
if not isinstance(self.feat, DictFeat):
raise TypeError
return self.__class__(self.instance, self.feat, key)
class _DictFeatAccesor(object):
"""Helper class to provide indexed access to DictFeat.
"""
def __init__(self, instance, dictfeat):
self.df = dictfeat
self.instance = instance
def __getitem__(self, key):
return DictFeat.getitem(self.df, self.instance, key)
def __setitem__(self, key, value):
DictFeat.setitem(self.df, self.instance, key, value)
def __repr__(self):
return repr(self.df.value[self.instance])
|
|
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import tinctest
from tinctest.lib import local_path
from tinctest.runner import TINCTestRunner
from mpp.models import SQLTestCase, SQLTestCaseException
import unittest2 as unittest
import shutil
from contextlib import closing
from datetime import datetime
from StringIO import StringIO
from unittest2.runner import _WritelnDecorator
# we're testing SQLTestCase as it pertains to tinc.py (and only tinc.py)
# as such, any attempts by raw unit2 to discover and load MockSQLTestCase must be averted
@unittest.skip('mock')
class MockSQLTestCase(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
"""
db_name=os.getenv('USER')
def test_explicit_test_method(self):
pass
@unittest.skip('mock')
class MockSQLTestCaseGenerateAns(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
"""
sql_dir = 'sql_no_ans/'
generate_ans = 'yes'
def test_explicit_test_method(self):
pass
@unittest.skip('mock')
class MockSQLTestCaseForceGenerateAns(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
"""
sql_dir = 'sql_no_ans/'
generate_ans = 'force'
def test_explicit_test_method(self):
pass
@unittest.skip('mock')
class MockSQLTestCaseIncorrectGenerateAns(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
"""
# Misspelled generate_ans. Invalid value.
generate_ans = 'yess'
def test_explicit_test_method(self):
pass
@unittest.skip('mock')
class MockSQLTestCaseGpdiffNoAnsFile(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
"""
sql_dir = 'sql_no_ans/'
def test_explicit_test_method(self):
pass
@unittest.skip('mock')
class MockSQLTestCaseNoGpdiffNoAnsFile(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
@gpdiff False
"""
sql_dir = 'sql_no_ans/'
def test_explicit_test_method(self):
pass
@unittest.skip('mock')
class MockSQLTestCaseWithOptimizerOn(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
@optimizer_mode on
"""
db_name=os.getenv('USER')
@unittest.skip('mock')
class MockSQLTestCaseWithOptimizerOff(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
@optimizer_mode off
"""
db_name=os.getenv('USER')
@unittest.skip('mock')
class MockSQLTestCaseWithOptimizerBoth(SQLTestCase):
"""
@description test case with metadata
@created 2012-07-05 12:00:00
@modified 2012-07-05 12:00:02
@tags orca hashagg
@optimizer_mode both
"""
db_name=os.getenv('USER')
class SQLTestCaseTests(unittest.TestCase):
def test_run_sql_test_failure(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCase.test_query02":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02.diff')))
shutil.rmtree(test_case.get_out_dir())
def test_run_sql_test_success(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCase.test_query03":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
shutil.rmtree(test_case.get_out_dir())
def test_run_entire_sql_test_case(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case = None
for test_case in test_suite._tests:
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_suite.run(test_result)
# 3 sql files with ans files and 1 explicit method
self.assertEqual(test_result.testsRun, 4)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02.diff')))
shutil.rmtree(test_case.get_out_dir())
def test_verify_setup_teardown(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
for test_case in test_suite._tests:
test_case.__class__.__unittest_skip__ = False
if os.path.exists(local_path("output/")):
shutil.rmtree(local_path("output/"))
test_result = unittest.TestResult()
test_suite.run(test_result)
self.assertEqual(test_result.testsRun, 4)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
# Verify if setup and teardown sqls were executed
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'setup.out')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'setup', 'setup1.out')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'teardown.out')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'teardown', 'teardown1.out')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_setup.out')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_teardown.out')))
def test_run_explicit_test_method(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCase.test_explicit_test_method":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
def test_with_local_init_file(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCase.test_query04":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
def test_run_no_ans_file(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCase)
# Store all test names in a list
test_case_list = []
for temp in test_suite._tests:
test_case_list.append(temp.name)
# Verify that other sql files with ans files and explicit method is in the list
self.assertTrue('MockSQLTestCase.test_explicit_test_method' in test_case_list)
self.assertTrue('MockSQLTestCase.test_query02' in test_case_list)
# Verify that test_query_no_ans_file is not there, even though the sql file is there without the ans file
self.assertTrue('MockSQLTestCase.test_query_no_ans_file' not in test_case_list)
# Verify the default value of generate_ans is no
self.assertTrue(MockSQLTestCase.generate_ans == 'no')
def test_gpdiff_no_ans_file(self):
"""
Test whether we throw an excpetion when there is no ans file for a sql file and if gpdiff is set to True
"""
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseGpdiffNoAnsFile)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCaseGpdiffNoAnsFile.test_query_no_ans_file":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 1)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
def test_no_gpdiff_no_ans_file(self):
"""
Test whether we construct a test for sqls with no ans files when gpdiff is turned off
"""
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseNoGpdiffNoAnsFile)
# Store all test names in a list
test_case_list = []
for temp in test_suite._tests:
test_case_list.append(temp.name)
# Verify that other sql files with ans files and explicit method is in the list
self.assertTrue('MockSQLTestCaseNoGpdiffNoAnsFile.test_explicit_test_method' in test_case_list)
self.assertTrue('MockSQLTestCaseNoGpdiffNoAnsFile.test_query02' in test_case_list)
# Verify that test_query_no_ans_file is there, even though the sql file is there without the ans file
self.assertTrue('MockSQLTestCaseNoGpdiffNoAnsFile.test_query_no_ans_file' in test_case_list)
def test_run_generate_ans_file_class_variable(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseGenerateAns)
# Store all test names in a list
test_case_list = []
for temp in test_suite._tests:
test_case_list.append(temp.name)
# Verify that other sql files with ans files and explicit method is in the list
self.assertTrue('MockSQLTestCaseGenerateAns.test_explicit_test_method' in test_case_list)
self.assertTrue('MockSQLTestCaseGenerateAns.test_query02' in test_case_list)
# Verify that test_query_no_ans_file is also there, even though its ans file is not there
self.assertTrue('MockSQLTestCaseGenerateAns.test_query_no_ans_file' in test_case_list)
def test_run_incorrect_generate_ans_file_class_variable(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseIncorrectGenerateAns)
count = 0
for test in test_suite._tests:
if 'TINCTestCaseLoadFailure' in str(test):
count += 1
self.assertEquals(count, 1)
def test_run_sql_generate_ans(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseGenerateAns)
# Ans file that will be generated
ans_file = local_path("query_no_ans_file.ans")
# If ans file is there for some reason, remove it (not testing force here)
if os.path.exists(ans_file):
os.remove(ans_file)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCaseGenerateAns.test_query_no_ans_file":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
# Verify that ans file is generated
self.assertTrue(os.path.exists(local_path("setup.ans")))
self.assertTrue(os.path.exists(ans_file))
self.assertTrue(os.path.exists(local_path("teardown.ans")))
# Cleanup
os.remove(local_path("setup.ans"))
os.remove(ans_file)
os.remove(local_path("teardown.ans"))
def test_run_sql_force_generate_ans(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseForceGenerateAns)
# Ans file that will be generated
ans_file = local_path("query_no_ans_file.ans")
# Create the empty ans file to allow force to overwrite
open(ans_file, 'w').close()
self.assertTrue(os.path.getsize(ans_file) == 0)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCaseForceGenerateAns.test_query_no_ans_file":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
# Verify that ans file is there
self.assertTrue(os.path.exists(local_path("setup.ans")))
self.assertTrue(os.path.exists(ans_file))
self.assertTrue(os.path.exists(local_path("teardown.ans")))
# Verify that ans file size is greater than 0
self.assertTrue(os.path.getsize(ans_file) > 0)
# Cleanup
os.remove(local_path("setup.ans"))
os.remove(ans_file)
os.remove(local_path("teardown.ans"))
def test_run_sql_force_generate_ans_permission_denied(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseForceGenerateAns)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query04 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCaseForceGenerateAns.test_query04":
# query04.ans wouldn't be checked-out from perforce, so it would have no write operation allowed
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 1)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
def test_run_sql_file(self):
test_case = MockSQLTestCase('test_query03')
if os.path.exists(test_case.get_out_dir()):
shutil.rmtree(test_case.get_out_dir())
# Default mode
test_case.run_sql_file(local_path('query03.sql'))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03.out')))
self.assertFalse(self._check_str_in_file('SET optimizer',
os.path.join(test_case.get_out_dir(), 'query03.sql')))
# Optimizer on mode
test_case.run_sql_file(local_path('query03.sql'), optimizer=True)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_orca.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_orca.out')))
self.assertTrue(self._check_str_in_file('SET optimizer=on;',
os.path.join(test_case.get_out_dir(), 'query03_orca.sql')))
# Optimizer off mode
test_case.run_sql_file(local_path('query03.sql'), optimizer=False)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_planner.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_planner.out')))
self.assertTrue(self._check_str_in_file('SET optimizer=off;',
os.path.join(test_case.get_out_dir(), 'query03_planner.sql')))
def test_run_sql_test_optimizer_on(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseWithOptimizerOn)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCaseWithOptimizerOn.test_query03":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_orca.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_orca.out')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query03_orca.sql')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query03_orca.out')))
shutil.rmtree(test_case.get_out_dir())
def test_run_sql_test_optimizer_off(self):
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromTestCase(MockSQLTestCaseWithOptimizerOff)
# Find our desired test case in test_suite.
# This code is a consequence of us only having implemented
# loadTestsFromTestCase. An implementation of loadTestsFromNames
# would likely have allowed us to insolate test_query02 directly.
test_case = None
for temp in test_suite._tests:
if temp.name == "MockSQLTestCaseWithOptimizerOff.test_query03":
test_case = temp
self.assertIsNotNone(test_case)
# As explained above, we want MockSQLTestCase to run if and only if
# it's being invoked by our unit tests. So, it's skipped if discovered
# directly by unit2. Here, bearing in mind that SQLTestCaseTests is itself
# triggered by unit2, we override MockSQLTestCase's skip decorator to allow
# this explicit construction of MockSQLTestCase to proceed.
test_case.__class__.__unittest_skip__ = False
test_result = unittest.TestResult()
test_case.run(test_result)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_planner.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_planner.out')))
self.assertTrue(self._check_str_in_file("SET optimizer=off;",
os.path.join(test_case.get_out_dir(), 'query03_planner.sql')))
self.assertTrue(self._check_str_in_file("SET optimizer=off;",
os.path.join(test_case.get_out_dir(), 'query03_planner.out')))
shutil.rmtree(test_case.get_out_dir())
def test_run_sql_test_optimizer_both(self):
test_loader = tinctest.TINCTestLoader()
# For data provider test cases, we have to use loadTestsFromName, since loadTestsFromTestCase won't filter and expand
test_suite = test_loader.loadTestsFromName("mpp.models.regress.sql_related.regress_sql_test_case.regress_sql_test_case.MockSQLTestCaseWithOptimizerBoth")
# Find our desired test case in test_suite.
test_case = None
new_test_suite = tinctest.TINCTestSuite()
for temp in test_suite._tests:
if "MockSQLTestCaseWithOptimizerBoth.test_query03" in temp.name:
new_test_suite.addTest(temp)
temp.__class__.__unittest_skip__ = False
test_case = temp
self.assertIsNotNone(new_test_suite)
self.assertEquals(new_test_suite.countTestCases(), 2)
test_result = unittest.TestResult()
new_test_suite.run(test_result)
self.assertEqual(test_result.testsRun, 2)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 0)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_planner.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_planner.out')))
self.assertTrue(self._check_str_in_file("SET optimizer=off;",
os.path.join(temp.get_out_dir(), 'query03_planner.sql')))
self.assertTrue(self._check_str_in_file("SET optimizer=off;",
os.path.join(test_case.get_out_dir(), 'query03_planner.out')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_orca.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query03_orca.out')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query03_orca.sql')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query03_orca.out')))
shutil.rmtree(test_case.get_out_dir())
def _check_str_in_file(self, check_string, file_path):
with open(file_path, 'r') as f:
for line in f:
if check_string in line:
return True
return False
def test_run_sql_test_optimizer_minidump_on_failure(self):
"""
Test whether we gather minidumps on failures when the test is exeucted with optimizer on.
"""
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromName('mpp.models.regress.sql_related.regress_sql_test_case.' + \
'regress_sql_test_case.' + \
'MockSQLTestCaseWithOptimizerOn.test_query02')
self.assertIsNotNone(test_suite)
self.assertTrue(len(test_suite._tests), 1)
test_result = None
test_case = None
for test in test_suite._tests:
test.__class__.__unittest_skip__ = False
test_case = test
if os.path.exists(test_case.get_out_dir()):
shutil.rmtree(test_case.get_out_dir())
with closing(_WritelnDecorator(StringIO())) as buffer:
tinc_test_runner = TINCTestRunner(stream = buffer, descriptions = True, verbosity = 1)
test_result = tinc_test_runner.run(test_suite)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02_orca.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02_orca.out')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query02_orca.sql')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query02_orca.out')))
# Verify that we collect minidump on failure for optimizer execution mode
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02_minidump.mdp')))
@unittest.skip("QAINF-999")
def test_run_sql_test_optimizer_minidump_on_failure2(self):
"""
Test whether we gather minidumps on failures when the test is exeucted with optimizer_mode both.
"""
test_loader = tinctest.TINCTestLoader()
test_suite = test_loader.loadTestsFromName('mpp.models.regress.sql_related.regress_sql_test_case.' + \
'regress_sql_test_case.' + \
'MockSQLTestCaseWithOptimizerBoth.test_query02')
self.assertIsNotNone(test_suite)
new_test_suite = tinctest.TINCTestSuite()
self.assertEquals(test_suite.countTestCases(), 2)
test_result = None
test_case = None
for test in test_suite._tests:
if 'test_query02_orca' in test.name:
test.__class__.__unittest_skip__ = False
test_case = test
new_test_suite.addTest(test)
self.assertIsNotNone(test_case)
if os.path.exists(test_case.get_out_dir()):
shutil.rmtree(test_case.get_out_dir())
with closing(_WritelnDecorator(StringIO())) as buffer:
tinc_test_runner = TINCTestRunner(stream = buffer, descriptions = True, verbosity = 1)
test_result = tinc_test_runner.run(new_test_suite)
self.assertEqual(test_result.testsRun, 1)
self.assertEqual(len(test_result.errors), 0)
self.assertEqual(len(test_result.skipped), 0)
self.assertEqual(len(test_result.failures), 1)
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02_orca.sql')))
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02_orca.out')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query02_orca.sql')))
self.assertTrue(self._check_str_in_file("SET optimizer=on;",
os.path.join(test_case.get_out_dir(), 'query02_orca.out')))
# Verify that we collect minidump on failure for optimizer execution mode
self.assertTrue(os.path.exists(os.path.join(test_case.get_out_dir(), 'query02_minidump.mdp')))
|
|
# Rationale for this module comes from the dependency flow:
# main -> bunch of stuff -> core -> space, continuations
#from rpython.rlib.objectmodel import we_are_translated, keepalive_until_here
from rpython.rtyper.lltypesystem import rffi, lltype, llmemory
from rpython.rlib.rthread import ThreadLocalReference
from rpython.rlib.rgc import FinalizerQueue
from rpython.rlib import jit
from continuations import Continuation
import rlibuv as uv
import space
import uv_util
handle_stash = uv_util.stashFor("handle")
# Remember that logging starts only after the
# execution context has been init.
class ExecutionContext(object):
#_immutable_fields_ = ['debug_hook?']
def __init__(self, config, lever_path, uv_loop, uv_idler):
self.config = config
self.lever_path = lever_path
self.sthread = None # Stacklets
self.uv_loop = uv_loop
self.uv_idler = uv_idler
self.uv_closing = {} # Handles about to close.
self.uv_sleepers = {} # Holds the sleeping greenlets.
self.uv_readers = {} # Reading streams.
self.uv_writers = {}
self.queue = [] # Event queue.
self.current = Greenlet(self, None, [])#, None)
self.eventloop = self.current
self.exit_status = 0
# The newer and better handlers.
# TODO: drop most of the old handlers to favor these.
self.uv__read = {}
self.uv__write = {}
self.uv__connect = {}
self.uv__udp_recv = {}
self.uv__udp_send = {}
self.uv__shutdown = {}
self.uv__connection = {}
self.uv__close = {}
self.uv__poll = {}
self.uv__timer = {}
#self.uv__async = {} # Dropped the ones
#self.uv__prepare = {} # I possibly won't need.
#self.uv__check = {}
#self.uv__idle = {}
#self.uv__exit = {}
#self.uv__walk = {}
self.uv__fs = {}
self.uv__fs_event = {}
#self.uv__work = {}
self.uv__after_work = {}
self.uv__getaddrinfo = {}
self.uv__getnameinfo = {}
#self.debug_hook = None
self.handles = handle_stash(self.uv_loop)
#from rpython.rlib.rweakref import RWeakKeyDictionary
#self.must_finalize_on_quit = RWeakKeyDictionary(space.Object, space.Object)
self.on_exit = []
self.finalizer_cycle = False
def enqueue(self, task):
if len(self.queue) == 0 and not uv.is_active(rffi.cast(uv.handle_ptr, self.uv_idler)):
uv.idle_start(self.uv_idler, run_queued_tasks)
self.queue.append(task)
def run_queued_tasks(handle):
ec = get_ec()
queue, ec.queue = ec.queue, []
for item in queue:
root_switch(ec, [item])
if len(ec.queue) == 0: # trick.
uv.idle_stop(ec.uv_idler)
def enqueue_for_exit(ec):
# Ensures that handles that were active during the exit won't keep
# the event loop rolling.
# This is a bit of a hack, because every waiting greenlet should throw an
# 'Exit' or 'Discard' exception instead of this.
uv.walk(ec.uv_loop, unref_active_handle, lltype.nullptr(rffi.VOIDP.TO))
# Also.. How is it even possible that handles are active after an exit?
# Well I got it to happen with Ctrl+C.
on_exit, ec.on_exit = ec.on_exit, []
for argv in on_exit:
schedule(argv)
active = len(on_exit) > 0
#while len(ec.must_finalize_on_quit) > 0:
# try:
# ded = ec.must_finalize_on_quit.popitem()[0]
# schedule([ded.getattr(u"+finalize")])
# active = True
# except space.Unwinder as unwinder:
# root_unwind(unwinder)
return active
def unref_active_handle(handle, arg):
if uv.is_active(handle) != 0:
uv.unref(handle)
class RootFinalizerQueue(FinalizerQueue):
Class = space.Object
def finalizer_trigger(self):
g.finalizer_ec.finalizer_cycle = True
class GlobalState(object):
ec = ThreadLocalReference(ExecutionContext, loop_invariant=True)
log = None
work_pool = None # Creates work pool on demand.
# It's in base.py
finalizer_queue = RootFinalizerQueue()
finalizer_ec = None
def init_executioncontext(*args):
ec = ExecutionContext(*args)
g.ec.set(ec)
return ec
#global_state = ThreadLocalReference(GlobalState)
g = GlobalState()
def get_ec():
ec = g.ec.get()
if isinstance(ec, ExecutionContext):
return ec
import os
os.write(2, "threads don't support get_ec now.\n")
assert False, "failure"
def root_switch(ec, argv):
try:
switch(argv)
except space.Unwinder as unwinder:
g.log.exception(unwinder.exception)
if ec.finalizer_cycle:
run_finalizers(ec)
@jit.dont_look_inside
def run_finalizers(ec):
ded = g.finalizer_queue.next_dead()
while ded:
try:
schedule([ded.getattr(u"+finalize")])
except space.Unwinder as unwinder:
root_unwind(unwinder)
ded = g.finalizer_queue.next_dead()
ec.finalizer_cycle = False
def root_unwind(unwinder):
g.log.exception(unwinder.exception)
def schedule(argv):
c = to_greenlet(argv)
c.ec.enqueue(c)
return c
def to_greenlet(argv):
ec = get_ec()
if len(argv) > 0:
c = argv.pop(0)
if isinstance(c, Greenlet):
c.argv += argv
else:
c = Greenlet(ec, ec.eventloop, [c] + argv)
else:
c = Greenlet(ec, ec.eventloop, argv)
if c.is_exhausted():
raise space.OldError(u"attempting to put exhausted greenlet into queue")
return c
class Greenlet(space.Object):
def __init__(self, ec, parent, argv):#, debug_hook):
self.ec = ec
self.parent = parent
self.handle = None
self.argv = argv
self.unwinder = None
#self.debug_hook = debug_hook
def getattr(self, name):
if name == u'parent':
return self.parent or space.null
return space.Object.getattr(self, name)
def repr(self):
return u"<greenlet>"
def is_exhausted(self):
return self.handle is not None and self.handle.is_empty()
@Greenlet.instantiator
def greenlet(argv):
ec = get_ec()
return Greenlet(ec, ec.current, argv)#, ec.debug_hook)
@Continuation.wrapped_callback
def new_greenlet(cont):
ec = get_ec()
self = ec.current
argv, self.argv = self.argv, [] # XXX: Throw into empty greenlet won't happen.
try:
if len(argv) == 0:
raise space.OldError(u"greenlet with no arguments")
func = argv.pop(0)
argv = argv_expand(func.call(argv))
unwinder = None
except space.Unwinder as unwinder:
argv = []
except Exception as exc:
msg = "SystemError: greenlet raised " + str(exc)
argv = []
unwinder = space.unwind(space.LError(msg.decode('utf-8')))
assert self == ec.current
parent = self.parent
while parent and parent.handle.is_empty():
# note that non-initiated or non-activated parent is invalid.
parent = parent.parent
assert parent is not None
parent.argv.extend(argv)
parent.unwinder = unwinder
ec.current = parent
self.handle, parent.handle = parent.handle, self.handle
return self.handle # XXX: note that the handle must not be null for this to work.
def switch(argv):
ec = get_ec()
target = argv.pop(0)
self = ec.current
#self.debug_hook = ec.debug_hook
if not isinstance(target, Greenlet):
raise space.unwind(space.LError(
u"first argument to 'switch' not a greenlet"))
if target.ec != ec:
raise space.unwind(space.LError(
u"this greenlet belongs for a different thread"))
if ec.current == target:
argv, self.argv = self.argv, []
argv.extend(argv)
return argv_compact(argv)
if target.handle is not None and target.handle.is_empty():
raise space.OldError(u"empty greenlet")
target.argv.extend(argv)
ec.current = target
if target.handle:
self.handle, target.handle = target.handle, self.handle
self.handle.switch()
else:
self.handle = Continuation()
self.handle.init(new_greenlet)
#ec.debug_hook = self.debug_hook
argv, self.argv = self.argv, []
if self.unwinder:
unwinder, self.unwinder = self.unwinder, None
raise unwinder
return argv_compact(argv)
Greenlet.interface.methods[u'switch'] = space.Builtin(switch)
def argv_compact(argv):
if len(argv) == 0:
return space.null
if len(argv) == 1:
return argv[0]
return space.List(argv)
def argv_expand(obj):
if obj is space.null:
return []
if not isinstance(obj, space.List):
return [obj]
return obj.contents
|
|
# Copyright 2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
======
Scales
======
.. currentmodule:: bqplot.scales
.. autosummary::
:toctree: _generate/
Scale
LinearScale
LogScale
DateScale
OrdinalScale
ColorScale
DateColorScale
OrdinalColorScale
GeoScale
Mercator
AlbersUSA
Gnomonic
Stereographic
"""
from ipywidgets import Widget, Color
from traitlets import Unicode, List, Enum, Float, Bool, Type, Tuple
import numpy as np
from .traits import Date
from ._version import __frontend_version__
def register_scale(key=None):
"""Returns a decorator to register a scale type in the scale type
registry.
If no key is provided, the class name is used as a key. A key is
provided for each core bqplot scale type so that the frontend can use
this key regardless of the kernal language.
"""
def wrap(scale):
label = key if key is not None else scale.__module__ + scale.__name__
Scale.scale_types[label] = scale
return scale
return wrap
class Scale(Widget):
"""The base scale class.
Scale objects represent a mapping between data (the domain) and a visual
quantity (The range).
Attributes
----------
scale_types: dict (class-level attribute)
A registry of existing scale types.
domain_class: type (default: Float)
traitlet type used to validate values in of the domain of the scale.
reverse: bool (default: False)
whether the scale should be reversed.
allow_padding: bool (default: True)
indicates whether figures are allowed to add data padding to this scale
or not.
precedence: int (class-level attribute)
attribute used to determine which scale takes precedence in cases when
two or more scales have the same rtype and dtype.
"""
scale_types = {}
precedence = 1
domain_class = Type(Float)
reverse = Bool().tag(sync=True)
allow_padding = Bool(True).tag(sync=True)
_view_name = Unicode('Scale').tag(sync=True)
_model_name = Unicode('ScaleModel').tag(sync=True)
_view_module = Unicode('bqplot').tag(sync=True)
_model_module = Unicode('bqplot').tag(sync=True)
_view_module_version = Unicode(__frontend_version__).tag(sync=True)
_model_module_version = Unicode(__frontend_version__).tag(sync=True)
_ipython_display_ = None # We cannot display a scale outside of a figure
class GeoScale(Scale):
"""The base projection scale class for Map marks.
The GeoScale represents a mapping between topographic data and a
2d visual representation.
"""
_view_name = Unicode('GeoScale').tag(sync=True)
_model_name = Unicode('GeoScaleModel').tag(sync=True)
@register_scale('bqplot.Mercator')
class Mercator(GeoScale):
"""A geographical projection scale commonly used for world maps.
The Mercator projection is a cylindrical map projection which ensures that
any course of constant bearing is a straight line.
Attributes
----------
scale_factor: float (default: 190)
Specifies the scale value for the projection
center: tuple (default: (0, 60))
Specifies the longitude and latitude where the map is centered.
rotate: tuple (default: (0, 0))
Degree of rotation in each axis.
rtype: (Number, Number) (class-level attribute)
This attribute should not be modifed. The range type of a geo
scale is a tuple.
dtype: type (class-level attribute)
the associated data type / domain type
"""
scale_factor = Float(190).tag(sync=True)
center = Tuple((0, 60)).tag(sync=True)
rotate = Tuple((0, 0)).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('Mercator').tag(sync=True)
_model_name = Unicode('MercatorModel').tag(sync=True)
@register_scale('bqplot.Albers')
class Albers(GeoScale):
"""A geographical scale which is an alias for a conic equal area projection.
The Albers projection is a conic equal area map. It does not preserve scale
or shape, though it is recommended for chloropleths since it preserves the
relative areas of geographic features. Default values are US-centric.
Attributes
----------
scale_factor: float (default: 250)
Specifies the scale value for the projection
rotate: tuple (default: (96, 0))
Degree of rotation in each axis.
parallels: tuple (default: (29.5, 45.5))
Sets the two parallels for the conic projection.
center: tuple (default: (0, 60))
Specifies the longitude and latitude where the map is centered.
precision: float (default: 0.1)
Specifies the threshold for the projections adaptive resampling to the
specified value in pixels.
rtype: (Number, Number) (class-level attribute)
This attribute should not be modifed. The range type of a geo
scale is a tuple.
dtype: type (class-level attribute)
the associated data type / domain type
"""
scale_factor = Float(250).tag(sync=True)
rotate = Tuple((96, 0)).tag(sync=True)
center = Tuple((0, 60)).tag(sync=True)
parallels = Tuple((29.5, 45.5)).tag(sync=True)
precision = Float(0.1).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('Albers').tag(sync=True)
_model_name = Unicode('AlbersModel').tag(sync=True)
@register_scale('bqplot.AlbersUSA')
class AlbersUSA(GeoScale):
"""A composite projection of four Albers projections meant specifically for
the United States.
Attributes
----------
scale_factor: float (default: 1200)
Specifies the scale value for the projection
translate: tuple (default: (600, 490))
rtype: (Number, Number) (class-level attribute)
This attribute should not be modifed. The range type of a geo
scale is a tuple.
dtype: type (class-level attribute)
the associated data type / domain type
"""
scale_factor = Float(1200).tag(sync=True)
translate = Tuple((600, 490)).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('AlbersUSA').tag(sync=True)
_model_name = Unicode('AlbersUSAModel').tag(sync=True)
@register_scale('bqplot.EquiRectangular')
class EquiRectangular(GeoScale):
"""An elementary projection that uses the identity function.
The projection is neither equal-area nor conformal.
Attributes
----------
scale_factor: float (default: 145)
Specifies the scale value for the projection
center: tuple (default: (0, 60))
Specifies the longitude and latitude where the map is centered.
"""
scale_factor = Float(145.0).tag(sync=True)
center = Tuple((0, 60)).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('EquiRectangular').tag(sync=True)
_model_name = Unicode('EquiRectangularModel').tag(sync=True)
@register_scale('bqplot.Orthographic')
class Orthographic(GeoScale):
"""A perspective projection that depicts a hemisphere as it appears from
outer space.
The projection is neither equal-area nor conformal.
Attributes
----------
scale_factor: float (default: 145)
Specifies the scale value for the projection
center: tuple (default: (0, 60))
Specifies the longitude and latitude where the map is centered.
rotate: tuple (default: (96, 0))
Degree of rotation in each axis.
clip_angle: float (default: 90.)
Specifies the clipping circle radius to the specified angle in degrees.
precision: float (default: 0.1)
Specifies the threshold for the projections adaptive resampling to the
specified value in pixels.
"""
scale_factor = Float(145.0).tag(sync=True)
center = Tuple((0, 60)).tag(sync=True)
rotate = Tuple((0, 0)).tag(sync=True)
clip_angle = Float(90.0, min=0.0, max=360.0).tag(sync=True)
precision = Float(0.1).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('Orthographic').tag(sync=True)
_model_name = Unicode('OrthographicModel').tag(sync=True)
@register_scale('bqplot.Gnomonic')
class Gnomonic(GeoScale):
"""A perspective projection which displays great circles as straight lines.
The projection is neither equal-area nor conformal.
Attributes
----------
scale_factor: float (default: 145)
Specifies the scale value for the projection
center: tuple (default: (0, 60))
Specifies the longitude and latitude where the map is centered.
precision: float (default: 0.1)
Specifies the threshold for the projections adaptive resampling to the
specified value in pixels.
clip_angle: float (default: 89.999)
Specifies the clipping circle radius to the specified angle in degrees.
"""
scale_factor = Float(145.0).tag(sync=True)
center = Tuple((0, 60)).tag(sync=True)
precision = Float(0.1).tag(sync=True)
clip_angle = Float(89.999, min=0.0, max=360.0).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('Gnomonic').tag(sync=True)
_model_name = Unicode('GnomonicModel').tag(sync=True)
@register_scale('bqplot.Stereographic')
class Stereographic(GeoScale):
"""A perspective projection that uses a bijective and smooth map at every
point except the projection point.
The projection is not an equal-area projection but it is conformal.
Attributes
----------
scale_factor: float (default: 250)
Specifies the scale value for the projection
rotate: tuple (default: (96, 0))
Degree of rotation in each axis.
center: tuple (default: (0, 60))
Specifies the longitude and latitude where the map is centered.
precision: float (default: 0.1)
Specifies the threshold for the projections adaptive resampling to the
specified value in pixels.
clip_angle: float (default: 90.)
Specifies the clipping circle radius to the specified angle in degrees.
"""
scale_factor = Float(145.0).tag(sync=True)
center = Tuple((0, 60)).tag(sync=True)
precision = Float(0.1).tag(sync=True)
rotate = Tuple((96, 0)).tag(sync=True)
clip_angle = Float(179.9999, min=0.0, max=360.0).tag(sync=True)
rtype = '(Number, Number)'
dtype = np.number
_view_name = Unicode('Stereographic').tag(sync=True)
_model_name = Unicode('StereographicModel').tag(sync=True)
@register_scale('bqplot.LinearScale')
class LinearScale(Scale):
"""A linear scale.
An affine mapping from a numerical domain to a numerical range.
Attributes
----------
min: float or None (default: None)
if not None, min is the minimal value of the domain
max: float or None (default: None)
if not None, max is the maximal value of the domain
rtype: string (class-level attribute)
This attribute should not be modifed. The range type of a linear
scale is numerical.
dtype: type (class-level attribute)
the associated data type / domain type
precedence: int (class-level attribute, default_value=2)
attribute used to determine which scale takes precedence in cases when
two or more scales have the same rtype and dtype.
default_value is 2 because for the same range and domain types,
LinearScale should take precedence.
stabilized: bool (default: False)
if set to False, the domain of the scale is tied to the data range
if set to True, the domain of the scale is updated only when
the data range is beyond certain thresholds, given by the attributes
mid_range and min_range.
mid_range: float (default: 0.8)
Proportion of the range that is spanned initially.
Used only if stabilized is True.
min_range: float (default: 0.6)
Minimum proportion of the range that should be spanned by the data.
If the data span falls beneath that level, the scale is reset.
min_range must be <= mid_range.
Used only if stabilized is True.
"""
rtype = 'Number'
dtype = np.number
precedence = 2
min = Float(None, allow_none=True).tag(sync=True)
max = Float(None, allow_none=True).tag(sync=True)
stabilized = Bool(False).tag(sync=True)
min_range = Float(0.6, min=0.0, max=1.0).tag(sync=True)
mid_range = Float(0.8, min=0.1, max=1.0).tag(sync=True)
_view_name = Unicode('LinearScale').tag(sync=True)
_model_name = Unicode('LinearScaleModel').tag(sync=True)
@register_scale('bqplot.LogScale')
class LogScale(Scale):
"""A log scale.
A logarithmic mapping from a numerical domain to a numerical range.
Attributes
----------
min: float or None (default: None)
if not None, min is the minimal value of the domain
max: float or None (default: None)
if not None, max is the maximal value of the domain
rtype: string (class-level attribute)
This attribute should not be modifed by the user.
The range type of a linear scale is numerical.
dtype: type (class-level attribute)
the associated data type / domain type
"""
rtype = 'Number'
dtype = np.number
min = Float(None, allow_none=True).tag(sync=True)
max = Float(None, allow_none=True).tag(sync=True)
_view_name = Unicode('LogScale').tag(sync=True)
_model_name = Unicode('LogScaleModel').tag(sync=True)
@register_scale('bqplot.DateScale')
class DateScale(Scale):
"""A date scale, with customizable formatting.
An affine mapping from dates to a numerical range.
Attributes
----------
min: Date or None (default: None)
if not None, min is the minimal value of the domain
max: Date (default: None)
if not None, max is the maximal value of the domain
domain_class: type (default: Date)
traitlet type used to validate values in of the domain of the scale.
rtype: string (class-level attribute)
This attribute should not be modifed by the user.
The range type of a linear scale is numerical.
dtype: type (class-level attribute)
the associated data type / domain type
"""
rtype = 'Number'
dtype = np.datetime64
domain_class = Type(Date)
min = Date(default_value=None, allow_none=True).tag(sync=True)
max = Date(default_value=None, allow_none=True).tag(sync=True)
_view_name = Unicode('DateScale').tag(sync=True)
_model_name = Unicode('DateScaleModel').tag(sync=True)
@register_scale('bqplot.OrdinalScale')
class OrdinalScale(Scale):
"""An ordinal scale.
A mapping from a discrete set of values to a numerical range.
Attributes
----------
domain: list (default: [])
The discrete values mapped by the ordinal scale
rtype: string (class-level attribute)
This attribute should not be modifed by the user.
The range type of a linear scale is numerical.
dtype: type (class-level attribute)
the associated data type / domain type
"""
rtype = 'Number'
dtype = np.str
domain = List().tag(sync=True)
_view_name = Unicode('OrdinalScale').tag(sync=True)
_model_name = Unicode('OrdinalScaleModel').tag(sync=True)
@register_scale('bqplot.ColorScale')
class ColorScale(Scale):
"""A color scale.
A mapping from numbers to colors. The relation is affine by part.
Attributes
----------
scale_type: {'linear'}
scale type
colors: list of colors (default: [])
list of colors
min: float or None (default: None)
if not None, min is the minimal value of the domain
max: float or None (default: None)
if not None, max is the maximal value of the domain
mid: float or None (default: None)
if not None, mid is the value corresponding to the mid color.
scheme: string (default: 'RdYlGn')
Colorbrewer color scheme of the color scale.
rtype: string (class-level attribute)
The range type of a color scale is 'Color'. This should not be modifed.
dtype: type (class-level attribute)
the associated data type / domain type
"""
rtype = 'Color'
dtype = np.number
scale_type = Enum(['linear'], default_value='linear').tag(sync=True)
colors = List(trait=Color(default_value=None, allow_none=True))\
.tag(sync=True)
min = Float(None, allow_none=True).tag(sync=True)
max = Float(None, allow_none=True).tag(sync=True)
mid = Float(None, allow_none=True).tag(sync=True)
scheme = Unicode('RdYlGn').tag(sync=True)
_view_name = Unicode('ColorScale').tag(sync=True)
_model_name = Unicode('ColorScaleModel').tag(sync=True)
@register_scale('bqplot.DateColorScale')
class DateColorScale(ColorScale):
"""A date color scale.
A mapping from dates to a numerical domain.
Attributes
----------
min: Date or None (default: None)
if not None, min is the minimal value of the domain
max: Date or None (default: None)
if not None, max is the maximal value of the domain
mid: Date or None (default: None)
if not None, mid is the value corresponding to the mid color.
rtype: string (class-level attribute)
This attribute should not be modifed by the user.
The range type of a color scale is 'Color'.
dtype: type (class-level attribute)
the associated data type / domain type
"""
dtype = np.datetime64
domain_class = Type(Date)
min = Date(default_value=None, allow_none=True).tag(sync=True)
mid = Date(default_value=None, allow_none=True).tag(sync=True)
max = Date(default_value=None, allow_none=True).tag(sync=True)
_view_name = Unicode('DateColorScale').tag(sync=True)
_model_name = Unicode('DateColorScaleModel').tag(sync=True)
@register_scale('bqplot.OrdinalColorScale')
class OrdinalColorScale(ColorScale):
"""An ordinal color scale.
A mapping from a discrete set of values to colors.
Attributes
----------
domain: list (default: [])
The discrete values mapped by the ordinal scales.
rtype: string (class-level attribute)
This attribute should not be modifed by the user.
The range type of a color scale is 'color'.
dtype: type (class-level attribute)
the associated data type / domain type
"""
rtype = 'Color'
dtype = np.str
domain = List().tag(sync=True)
_view_name = Unicode('OrdinalColorScale').tag(sync=True)
_model_name = Unicode('OrdinalScaleModel').tag(sync=True)
|
|
#!/usr/bin/env python
"""Unit tests run as PYTHONPATH=../../.. python3 ./test_valve.py."""
# Copyright (C) 2015 Research and Innovation Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2019 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import unittest
from ryu.lib import mac
from ryu.lib.packet import slow
from ryu.ofproto import ether
from ryu.ofproto import ofproto_v1_3 as ofp
from ryu.ofproto import ofproto_v1_3_parser as parser
from faucet import valve_of
from faucet import valve_packet
from clib.valve_test_lib import (
CONFIG, DP1_CONFIG, FAUCET_MAC, GROUP_DP1_CONFIG, IDLE_DP1_CONFIG,
ValveTestBases)
from clib.fakeoftable import CONTROLLER_PORT
class ValveTestCase(ValveTestBases.ValveTestBig):
"""Run complete set of basic tests."""
class ValveFuzzTestCase(ValveTestBases.ValveTestNetwork):
"""Test unknown ports/VLANs."""
CONFIG = """
dps:
s1:
%s
interfaces:
p1:
number: 1
native_vlan: 0x100
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_fuzz_vlan(self):
"""Test unknown VIDs/ports."""
for _ in range(0, 3):
for i in range(0, 64):
self.rcv_packet(1, i, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.P2_V200_MAC,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3',
'vid': i})
for i in range(0, 64):
self.rcv_packet(i, 0x100, {
'eth_src': self.P1_V100_MAC,
'eth_dst': self.P2_V200_MAC,
'ipv4_src': '10.0.0.2',
'ipv4_dst': '10.0.0.3',
'vid': 0x100})
# pylint: disable=no-member
# pylint: disable=no-value-for-parameter
cache_info = valve_packet.parse_packet_in_pkt.cache_info()
self.assertGreater(cache_info.hits, cache_info.misses, msg=cache_info)
class ValveCoprocessorTestCase(ValveTestBases.ValveTestNetwork):
"""Test direct packet output using coprocessor."""
CONFIG = """
dps:
s1:
%s
interfaces:
p1:
number: 1
coprocessor: {strategy: vlan_vid, vlan_vid_base: 0x200}
p2:
number: 2
native_vlan: testvlan
p3:
number: 3
native_vlan: testvlan
vlans:
testvlan:
vid: 0x100
acls_in: [bypassedbycoprocessor]
acls:
bypassedbycoprocessor:
- rule:
ipv4_src: 10.0.0.99
dl_type: 0x0800
actions:
allow: 0
- rule:
actions:
allow: 1
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_output(self):
# VID for direct output to port 2
copro_vid_out = (0x200 + 2) | ofp.OFPVID_PRESENT
direct_match = {
'in_port': 1, 'vlan_vid': copro_vid_out, 'eth_type': ether.ETH_TYPE_IP,
'eth_src': self.P1_V100_MAC, 'eth_dst': mac.BROADCAST_STR}
table = self.network.tables[self.DP_ID]
self.assertTrue(table.is_output(direct_match, port=2))
p2_host_match = {
'eth_src': self.P1_V100_MAC, 'eth_dst': self.P2_V200_MAC,
'ipv4_src': '10.0.0.2', 'ipv4_dst': '10.0.0.3',
'eth_type': ether.ETH_TYPE_IP}
p2_host_receive = copy.deepcopy(p2_host_match)
p2_host_receive.update({'in_port': 2})
# learn P2 host
self.rcv_packet(2, 0x100, p2_host_receive)
# copro can send to P2 via regular pipeline and is not subject to VLAN ACL.
p2_copro_host_receive = copy.deepcopy(p2_host_match)
p2_copro_host_receive.update(
{'in_port': 1,
'ipv4_src': '10.0.0.99', 'ipv4_dst': '10.0.0.3',
'eth_src': p2_host_match['eth_dst'],
'eth_dst': p2_host_match['eth_src']})
p2_copro_host_receive['vlan_vid'] = 0x100 | ofp.OFPVID_PRESENT
self.assertTrue(table.is_output(p2_copro_host_receive, port=2, vid=0x100))
# copro send to P2 was not flooded
self.assertFalse(table.is_output(p2_copro_host_receive, port=3, vid=0x100))
class ValveRestBcastTestCase(ValveTestBases.ValveTestNetwork):
CONFIG = """
dps:
s1:
%s
interfaces:
p1:
number: 1
native_vlan: 0x100
restricted_bcast_arpnd: true
p2:
number: 2
native_vlan: 0x100
p3:
number: 3
native_vlan: 0x100
restricted_bcast_arpnd: true
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_rest_bcast(self):
match = {
'in_port': 1, 'vlan_vid': 0, 'eth_type': ether.ETH_TYPE_IP,
'eth_src': self.P1_V100_MAC, 'eth_dst': mac.BROADCAST_STR}
table = self.network.tables[self.DP_ID]
self.assertTrue(table.is_output(match, port=2))
self.assertFalse(table.is_output(match, port=3))
match = {
'in_port': 2, 'vlan_vid': 0, 'eth_type': ether.ETH_TYPE_IP,
'eth_src': self.P1_V100_MAC, 'eth_dst': mac.BROADCAST_STR}
self.assertTrue(table.is_output(match, port=1))
self.assertTrue(table.is_output(match, port=3))
class ValveOFErrorTestCase(ValveTestBases.ValveTestNetwork):
"""Test decoding of OFErrors."""
def setUp(self):
self.setup_valves(CONFIG)
def test_oferror_parser(self):
"""Test OF error parser works"""
for type_code, error_tuple in valve_of.OFERROR_TYPE_CODE.items():
self.assertTrue(isinstance(type_code, int))
type_str, error_codes = error_tuple
self.assertTrue(isinstance(type_str, str))
for error_code, error_str in error_codes.items():
self.assertTrue(isinstance(error_code, int))
self.assertTrue(isinstance(error_str, str))
test_err = parser.OFPErrorMsg(
datapath=None, type_=ofp.OFPET_FLOW_MOD_FAILED, code=ofp.OFPFMFC_UNKNOWN)
valve = self.valves_manager.valves[self.DP_ID]
valve.oferror(test_err)
test_unknown_type_err = parser.OFPErrorMsg(
datapath=None, type_=666, code=ofp.OFPFMFC_UNKNOWN)
valve.oferror(test_unknown_type_err)
test_unknown_code_err = parser.OFPErrorMsg(
datapath=None, type_=ofp.OFPET_FLOW_MOD_FAILED, code=666)
valve.oferror(test_unknown_code_err)
class ValveGroupTestCase(ValveTestBases.ValveTestNetwork):
"""Tests for datapath with group support."""
CONFIG = """
dps:
s1:
%s
interfaces:
p1:
number: 1
native_vlan: v100
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
vlans:
v100:
vid: 0x100
v200:
vid: 0x200
""" % GROUP_DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_unknown_eth_dst_rule(self):
"""Test that packets with unkown eth dst addrs get flooded correctly.
They must be output to each port on the associated vlan, with the
correct vlan tagging. And they must not be forwarded to a port not
on the associated vlan
"""
self.learn_hosts()
matches = [
{
'in_port': 3,
'vlan_vid': self.V100,
},
{
'in_port': 2,
'vlan_vid': 0,
'eth_dst': self.P1_V100_MAC
},
{
'in_port': 1,
'vlan_vid': 0,
'eth_src': self.P1_V100_MAC
},
{
'in_port': 3,
'vlan_vid': self.V200,
'eth_src': self.P2_V200_MAC,
}
]
self.verify_flooding(matches)
class ValveIdleLearnTestCase(ValveTestBases.ValveTestNetwork):
"""Smoke test for idle-flow based learning. This feature is not currently reliable."""
CONFIG = """
dps:
s1:
%s
interfaces:
p1:
number: 1
native_vlan: v100
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
p5:
number: 5
output_only: True
mirror: 4
vlans:
v100:
vid: 0x100
v200:
vid: 0x200
""" % IDLE_DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_known_eth_src_rule(self):
"""Test removal flow handlers."""
self.learn_hosts()
valve = self.valves_manager.valves[self.DP_ID]
self.assertTrue(
valve.flow_timeout(
self.mock_time(),
valve.dp.tables['eth_dst'].table_id,
{'vlan_vid': self.V100, 'eth_dst': self.P1_V100_MAC}))
self.assertFalse(
valve.flow_timeout(
self.mock_time(),
valve.dp.tables['eth_src'].table_id,
{'vlan_vid': self.V100, 'in_port': 1, 'eth_src': self.P1_V100_MAC}))
def test_host_learn_coldstart(self):
"""Test flow learning, including cold-start cache invalidation"""
valve = self.valves_manager.valves[self.DP_ID]
match = {
'in_port': 3, 'vlan_vid': self.V100, 'eth_type': ether.ETH_TYPE_IP,
'eth_src': self.P3_V100_MAC, 'eth_dst': self.P1_V100_MAC}
table = self.network.tables[self.DP_ID]
self.assertTrue(table.is_output(match, port=1))
self.assertTrue(table.is_output(match, port=2))
self.assertTrue(table.is_output(match, port=CONTROLLER_PORT))
self.learn_hosts()
self.assertTrue(table.is_output(match, port=1))
self.assertFalse(table.is_output(match, port=2))
self.assertFalse(table.is_output(match, port=CONTROLLER_PORT))
self.cold_start()
self.assertTrue(table.is_output(match, port=1))
self.assertTrue(table.is_output(match, port=2))
self.assertTrue(table.is_output(match, port=CONTROLLER_PORT))
self.mock_time(valve.dp.timeout // 4 * 3)
self.learn_hosts()
self.assertTrue(table.is_output(match, port=1))
self.assertFalse(table.is_output(match, port=2))
self.assertFalse(table.is_output(match, port=CONTROLLER_PORT))
class ValveLACPTestCase(ValveTestBases.ValveTestNetwork):
"""Test LACP."""
CONFIG = """
dps:
s1:
%s
lacp_timeout: 5
interfaces:
p1:
number: 1
native_vlan: v100
lacp: 1
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
p5:
number: 5
tagged_vlans: [v300]
vlans:
v100:
vid: 0x100
v200:
vid: 0x200
v300:
vid: 0x300
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
self.activate_all_ports()
def test_lacp(self):
"""Test LACP comes up."""
test_port = 1
labels = self.port_labels(test_port)
valve = self.valves_manager.valves[self.DP_ID]
self.assertEqual(
1, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertFalse(
valve.dp.ports[1].non_stack_forwarding())
self.rcv_packet(test_port, 0, {
'actor_system': '0e:00:00:00:00:02',
'partner_system': FAUCET_MAC,
'eth_dst': slow.SLOW_PROTOCOL_MULTICAST,
'eth_src': '0e:00:00:00:00:02',
'actor_state_synchronization': 1})
self.assertEqual(
3, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertTrue(
valve.dp.ports[1].non_stack_forwarding())
self.learn_hosts()
self.verify_expiry()
def test_lacp_flap(self):
"""Test LACP handles state 0->1->0."""
valve = self.valves_manager.valves[self.DP_ID]
test_port = 1
labels = self.port_labels(test_port)
self.assertEqual(
1, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertFalse(
valve.dp.ports[1].non_stack_forwarding())
self.rcv_packet(test_port, 0, {
'actor_system': '0e:00:00:00:00:02',
'partner_system': FAUCET_MAC,
'eth_dst': slow.SLOW_PROTOCOL_MULTICAST,
'eth_src': '0e:00:00:00:00:02',
'actor_state_synchronization': 1})
self.assertEqual(
3, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertTrue(
valve.dp.ports[1].non_stack_forwarding())
self.learn_hosts()
self.verify_expiry()
self.rcv_packet(test_port, 0, {
'actor_system': '0e:00:00:00:00:02',
'partner_system': FAUCET_MAC,
'eth_dst': slow.SLOW_PROTOCOL_MULTICAST,
'eth_src': '0e:00:00:00:00:02',
'actor_state_synchronization': 0})
self.assertEqual(
5, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertFalse(
valve.dp.ports[1].non_stack_forwarding())
def test_lacp_timeout(self):
"""Test LACP comes up and then times out."""
valve = self.valves_manager.valves[self.DP_ID]
test_port = 1
labels = self.port_labels(test_port)
self.assertEqual(
1, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertFalse(
valve.dp.ports[1].non_stack_forwarding())
self.rcv_packet(test_port, 0, {
'actor_system': '0e:00:00:00:00:02',
'partner_system': FAUCET_MAC,
'eth_dst': slow.SLOW_PROTOCOL_MULTICAST,
'eth_src': '0e:00:00:00:00:02',
'actor_state_synchronization': 1})
self.assertEqual(
3, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertTrue(
valve.dp.ports[1].non_stack_forwarding())
future_now = self.mock_time(10)
expire_ofmsgs = valve.state_expire(future_now, None)
self.assertTrue(expire_ofmsgs)
self.assertEqual(
1, int(self.get_prom('port_lacp_state', labels=labels)))
self.assertFalse(
valve.dp.ports[1].non_stack_forwarding())
def test_dp_disconnect(self):
"""Test LACP state when disconnects."""
test_port = 1
labels = self.port_labels(test_port)
self.assertEqual(
1, int(self.get_prom('port_lacp_state', labels=labels)))
self.rcv_packet(test_port, 0, {
'actor_system': '0e:00:00:00:00:02',
'partner_system': FAUCET_MAC,
'eth_dst': slow.SLOW_PROTOCOL_MULTICAST,
'eth_src': '0e:00:00:00:00:02',
'actor_state_synchronization': 1})
self.assertEqual(
3, int(self.get_prom('port_lacp_state', labels=labels)))
self.disconnect_dp()
self.assertEqual(
0, int(self.get_prom('port_lacp_state', labels=labels)))
class ValveTFMSizeOverride(ValveTestBases.ValveTestNetwork):
"""Test TFM size override."""
CONFIG = """
dps:
s1:
%s
table_sizes:
eth_src: 999
interfaces:
p1:
number: 1
native_vlan: v100
vlans:
v100:
vid: 0x100
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_size(self):
table = self.network.tables[self.DP_ID]
tfm_by_name = {body.name: body for body in table.tfm.values()}
eth_src_table = tfm_by_name.get(b'eth_src', None)
self.assertTrue(eth_src_table)
if eth_src_table is not None:
self.assertEqual(999, eth_src_table.max_entries)
class ValveTFMSize(ValveTestBases.ValveTestNetwork):
"""Test TFM sizer."""
NUM_PORTS = 128
CONFIG = """
dps:
s1:
%s
lacp_timeout: 5
interfaces:
p1:
number: 1
native_vlan: v100
lacp: 1
lacp_active: True
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
p5:
number: 5
tagged_vlans: [v300]
interface_ranges:
6-128:
native_vlan: v100
vlans:
v100:
vid: 0x100
v200:
vid: 0x200
v300:
vid: 0x300
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
def test_size(self):
table = self.network.tables[self.DP_ID]
tfm_by_name = {body.name: body for body in table.tfm.values()}
flood_table = tfm_by_name.get(b'flood', None)
self.assertTrue(flood_table)
if flood_table is not None:
self.assertGreater(flood_table.max_entries, self.NUM_PORTS * 2)
class ValveActiveLACPTestCase(ValveTestBases.ValveTestNetwork):
"""Test LACP."""
CONFIG = """
dps:
s1:
%s
lacp_timeout: 5
interfaces:
p1:
number: 1
native_vlan: v100
lacp: 1
lacp_active: True
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
p5:
number: 5
tagged_vlans: [v300]
vlans:
v100:
vid: 0x100
v200:
vid: 0x200
v300:
vid: 0x300
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
self.activate_all_ports()
def test_lacp(self):
"""Test LACP comes up."""
test_port = 1
labels = self.port_labels(test_port)
self.assertEqual(
1, int(self.get_prom('port_lacp_state', labels=labels)))
# Ensure LACP packet sent.
valve = self.valves_manager.valves[self.DP_ID]
ofmsgs = valve.fast_advertise(self.mock_time(), None)[valve]
self.assertTrue(ValveTestBases.packet_outs_from_flows(ofmsgs))
self.rcv_packet(test_port, 0, {
'actor_system': '0e:00:00:00:00:02',
'partner_system': FAUCET_MAC,
'eth_dst': slow.SLOW_PROTOCOL_MULTICAST,
'eth_src': '0e:00:00:00:00:02',
'actor_state_synchronization': 1})
self.assertEqual(
3, int(self.get_prom('port_lacp_state', labels=labels)))
self.learn_hosts()
self.verify_expiry()
class ValveL2LearnTestCase(ValveTestBases.ValveTestNetwork):
"""Test L2 Learning"""
def setUp(self):
self.setup_valves(CONFIG)
def test_expiry(self):
learn_labels = {
'vid': str(0x200),
'eth_src': self.P2_V200_MAC
}
self.assertEqual(
0, self.get_prom('learned_l2_port', labels=learn_labels))
self.learn_hosts()
self.assertEqual(
2.0, self.get_prom('learned_l2_port', labels=learn_labels))
self.verify_expiry()
self.assertEqual(
0, self.get_prom('learned_l2_port', labels=learn_labels))
class ValveMirrorTestCase(ValveTestBases.ValveTestBig):
"""Test ACL and interface mirroring."""
# TODO: check mirror packets are present/correct
CONFIG = """
acls:
mirror_ospf:
- rule:
nw_dst: '224.0.0.5'
dl_type: 0x800
actions:
mirror: p5
allow: 1
- rule:
dl_type: 0x800
actions:
allow: 0
- rule:
actions:
allow: 1
dps:
s1:
%s
interfaces:
p1:
number: 1
native_vlan: v100
lldp_beacon:
enable: True
system_name: "faucet"
port_descr: "first_port"
acls_in: [mirror_ospf]
p2:
number: 2
native_vlan: v200
tagged_vlans: [v100]
p3:
number: 3
tagged_vlans: [v100, v200]
p4:
number: 4
tagged_vlans: [v200]
p5:
number: 5
output_only: True
mirror: 4
vlans:
v100:
vid: 0x100
faucet_vips: ['10.0.0.254/24']
routes:
- route:
ip_dst: 10.99.99.0/24
ip_gw: 10.0.0.1
- route:
ip_dst: 10.99.98.0/24
ip_gw: 10.0.0.99
v200:
vid: 0x200
faucet_vips: ['fc00::1:254/112', 'fe80::1:254/64']
routes:
- route:
ip_dst: 'fc00::10:0/112'
ip_gw: 'fc00::1:1'
- route:
ip_dst: 'fc00::20:0/112'
ip_gw: 'fc00::1:99'
routers:
router1:
bgp:
as: 1
connect_mode: 'passive'
neighbor_as: 2
port: 9179
routerid: '1.1.1.1'
server_addresses: ['127.0.0.1']
neighbor_addresses: ['127.0.0.1']
vlan: v100
""" % DP1_CONFIG
def setUp(self):
self.setup_valves(self.CONFIG)
if __name__ == "__main__":
unittest.main() # pytype: disable=module-attr
|
|
# -*- coding: utf-8 -*-
"""
tablib.core
~~~~~~~~~~~
This module implements the central Tablib objects.
:copyright: (c) 2016 by Kenneth Reitz. 2019 Jazzband.
:license: MIT, see LICENSE for more details.
"""
from collections import OrderedDict
from copy import copy
from operator import itemgetter
from tablib import formats
from tablib.compat import unicode
__title__ = 'tablib'
__author__ = 'Kenneth Reitz'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 Kenneth Reitz. 2019 Jazzband.'
__docformat__ = 'restructuredtext'
class Row(object):
"""Internal Row object. Mainly used for filtering."""
__slots__ = ['_row', 'tags']
def __init__(self, row=list(), tags=list()):
self._row = list(row)
self.tags = list(tags)
def __iter__(self):
return (col for col in self._row)
def __len__(self):
return len(self._row)
def __repr__(self):
return repr(self._row)
def __getslice__(self, i, j):
return self._row[i:j]
def __getitem__(self, i):
return self._row[i]
def __setitem__(self, i, value):
self._row[i] = value
def __delitem__(self, i):
del self._row[i]
def __getstate__(self):
slots = dict()
for slot in self.__slots__:
attribute = getattr(self, slot)
slots[slot] = attribute
return slots
def __setstate__(self, state):
for (k, v) in list(state.items()): setattr(self, k, v)
def rpush(self, value):
self.insert(0, value)
def lpush(self, value):
self.insert(len(value), value)
def append(self, value):
self.rpush(value)
def insert(self, index, value):
self._row.insert(index, value)
def __contains__(self, item):
return (item in self._row)
@property
def tuple(self):
"""Tuple representation of :class:`Row`."""
return tuple(self._row)
@property
def list(self):
"""List representation of :class:`Row`."""
return list(self._row)
def has_tag(self, tag):
"""Returns true if current row contains tag."""
if tag == None:
return False
elif isinstance(tag, str):
return (tag in self.tags)
else:
return bool(len(set(tag) & set(self.tags)))
class Dataset(object):
"""The :class:`Dataset` object is the heart of Tablib. It provides all core
functionality.
Usually you create a :class:`Dataset` instance in your main module, and append
rows as you collect data. ::
data = tablib.Dataset()
data.headers = ('name', 'age')
for (name, age) in some_collector():
data.append((name, age))
Setting columns is similar. The column data length must equal the
current height of the data and headers must be set ::
data = tablib.Dataset()
data.headers = ('first_name', 'last_name')
data.append(('John', 'Adams'))
data.append(('George', 'Washington'))
data.append_col((90, 67), header='age')
You can also set rows and headers upon instantiation. This is useful if
dealing with dozens or hundreds of :class:`Dataset` objects. ::
headers = ('first_name', 'last_name')
data = [('John', 'Adams'), ('George', 'Washington')]
data = tablib.Dataset(*data, headers=headers)
:param \\*args: (optional) list of rows to populate Dataset
:param headers: (optional) list strings for Dataset header row
:param title: (optional) string to use as title of the Dataset
.. admonition:: Format Attributes Definition
If you look at the code, the various output/import formats are not
defined within the :class:`Dataset` object. To add support for a new format, see
:ref:`Adding New Formats <newformats>`.
"""
_formats = {}
def __init__(self, *args, **kwargs):
self._data = list(Row(arg) for arg in args)
self.__headers = None
# ('title', index) tuples
self._separators = []
# (column, callback) tuples
self._formatters = []
self.headers = kwargs.get('headers')
self.title = kwargs.get('title')
self._register_formats()
def __len__(self):
return self.height
def __getitem__(self, key):
if isinstance(key, (str, unicode)):
if key in self.headers:
pos = self.headers.index(key) # get 'key' index from each data
return [row[pos] for row in self._data]
else:
raise KeyError
else:
_results = self._data[key]
if isinstance(_results, Row):
return _results.tuple
else:
return [result.tuple for result in _results]
def __setitem__(self, key, value):
self._validate(value)
self._data[key] = Row(value)
def __delitem__(self, key):
if isinstance(key, (str, unicode)):
if key in self.headers:
pos = self.headers.index(key)
del self.headers[pos]
for i, row in enumerate(self._data):
del row[pos]
self._data[i] = row
else:
raise KeyError
else:
del self._data[key]
def __repr__(self):
try:
return '<%s dataset>' % (self.title.lower())
except AttributeError:
return '<dataset object>'
def __unicode__(self):
result = []
# Add unicode representation of headers.
if self.__headers:
result.append([unicode(h) for h in self.__headers])
# Add unicode representation of rows.
result.extend(list(map(unicode, row)) for row in self._data)
lens = [list(map(len, row)) for row in result]
field_lens = list(map(max, zip(*lens)))
# delimiter between header and data
if self.__headers:
result.insert(1, ['-' * length for length in field_lens])
format_string = '|'.join('{%s:%s}' % item for item in enumerate(field_lens))
return '\n'.join(format_string.format(*row) for row in result)
def __str__(self):
return self.__unicode__()
# ---------
# Internals
# ---------
@classmethod
def _register_formats(cls):
"""Adds format properties."""
for fmt in formats.available:
try:
try:
setattr(cls, fmt.title, property(fmt.export_set, fmt.import_set))
setattr(cls, 'get_%s' % fmt.title, fmt.export_set)
setattr(cls, 'set_%s' % fmt.title, fmt.import_set)
cls._formats[fmt.title] = (fmt.export_set, fmt.import_set)
except AttributeError:
setattr(cls, fmt.title, property(fmt.export_set))
setattr(cls, 'get_%s' % fmt.title, fmt.export_set)
cls._formats[fmt.title] = (fmt.export_set, None)
except AttributeError:
cls._formats[fmt.title] = (None, None)
def _validate(self, row=None, col=None, safety=False):
"""Assures size of every row in dataset is of proper proportions."""
if row:
is_valid = (len(row) == self.width) if self.width else True
elif col:
if len(col) < 1:
is_valid = True
else:
is_valid = (len(col) == self.height) if self.height else True
else:
is_valid = all((len(x) == self.width for x in self._data))
if is_valid:
return True
else:
if not safety:
raise InvalidDimensions
return False
def _package(self, dicts=True, ordered=True):
"""Packages Dataset into lists of dictionaries for transmission."""
# TODO: Dicts default to false?
_data = list(self._data)
if ordered:
dict_pack = OrderedDict
else:
dict_pack = dict
# Execute formatters
if self._formatters:
for row_i, row in enumerate(_data):
for col, callback in self._formatters:
try:
if col is None:
for j, c in enumerate(row):
_data[row_i][j] = callback(c)
else:
_data[row_i][col] = callback(row[col])
except IndexError:
raise InvalidDatasetIndex
if self.headers:
if dicts:
data = [dict_pack(list(zip(self.headers, data_row))) for data_row in _data]
else:
data = [list(self.headers)] + list(_data)
else:
data = [list(row) for row in _data]
return data
def _get_headers(self):
"""An *optional* list of strings to be used for header rows and attribute names.
This must be set manually. The given list length must equal :class:`Dataset.width`.
"""
return self.__headers
def _set_headers(self, collection):
"""Validating headers setter."""
self._validate(collection)
if collection:
try:
self.__headers = list(collection)
except TypeError:
raise TypeError
else:
self.__headers = None
headers = property(_get_headers, _set_headers)
def _get_dict(self):
"""A native Python representation of the :class:`Dataset` object. If headers have
been set, a list of Python dictionaries will be returned. If no headers have been set,
a list of tuples (rows) will be returned instead.
A dataset object can also be imported by setting the `Dataset.dict` attribute: ::
data = tablib.Dataset()
data.dict = [{'age': 90, 'first_name': 'Kenneth', 'last_name': 'Reitz'}]
"""
return self._package()
def _set_dict(self, pickle):
"""A native Python representation of the Dataset object. If headers have been
set, a list of Python dictionaries will be returned. If no headers have been
set, a list of tuples (rows) will be returned instead.
A dataset object can also be imported by setting the :class:`Dataset.dict` attribute. ::
data = tablib.Dataset()
data.dict = [{'age': 90, 'first_name': 'Kenneth', 'last_name': 'Reitz'}]
"""
if not len(pickle):
return
# if list of rows
if isinstance(pickle[0], list):
self.wipe()
for row in pickle:
self.append(Row(row))
# if list of objects
elif isinstance(pickle[0], dict):
self.wipe()
self.headers = list(pickle[0].keys())
for row in pickle:
self.append(Row(list(row.values())))
else:
raise UnsupportedFormat
dict = property(_get_dict, _set_dict)
def _clean_col(self, col):
"""Prepares the given column for insert/append."""
col = list(col)
if self.headers:
header = [col.pop(0)]
else:
header = []
if len(col) == 1 and hasattr(col[0], '__call__'):
col = list(map(col[0], self._data))
col = tuple(header + col)
return col
@property
def height(self):
"""The number of rows currently in the :class:`Dataset`.
Cannot be directly modified.
"""
return len(self._data)
@property
def width(self):
"""The number of columns currently in the :class:`Dataset`.
Cannot be directly modified.
"""
try:
return len(self._data[0])
except IndexError:
try:
return len(self.headers)
except TypeError:
return 0
def load(self, in_stream, format=None, **kwargs):
"""
Import `in_stream` to the :class:`Dataset` object using the `format`.
:param \\*\\*kwargs: (optional) custom configuration to the format `import_set`.
"""
if not format:
format = detect_format(in_stream)
export_set, import_set = self._formats.get(format, (None, None))
if not import_set:
raise UnsupportedFormat('Format {0} cannot be imported.'.format(format))
import_set(self, in_stream, **kwargs)
return self
def export(self, format, **kwargs):
"""
Export :class:`Dataset` object to `format`.
:param \\*\\*kwargs: (optional) custom configuration to the format `export_set`.
"""
export_set, import_set = self._formats.get(format, (None, None))
if not export_set:
raise UnsupportedFormat('Format {0} cannot be exported.'.format(format))
return export_set(self, **kwargs)
# -------
# Formats
# -------
@property
def xls():
"""A Legacy Excel Spreadsheet representation of the :class:`Dataset` object, with :ref:`separators`. Cannot be set.
.. note::
XLS files are limited to a maximum of 65,000 rows. Use :class:`Dataset.xlsx` to avoid this limitation.
.. admonition:: Binary Warning
:class:`Dataset.xls` contains binary data, so make sure to write in binary mode::
with open('output.xls', 'wb') as f:
f.write(data.xls)
"""
pass
@property
def xlsx():
"""An Excel '07+ Spreadsheet representation of the :class:`Dataset` object, with :ref:`separators`. Cannot be set.
.. admonition:: Binary Warning
:class:`Dataset.xlsx` contains binary data, so make sure to write in binary mode::
with open('output.xlsx', 'wb') as f:
f.write(data.xlsx)
"""
pass
@property
def ods():
"""An OpenDocument Spreadsheet representation of the :class:`Dataset` object, with :ref:`separators`. Cannot be set.
.. admonition:: Binary Warning
:class:`Dataset.ods` contains binary data, so make sure to write in binary mode::
with open('output.ods', 'wb') as f:
f.write(data.ods)
"""
pass
@property
def csv():
"""A CSV representation of the :class:`Dataset` object. The top row will contain
headers, if they have been set. Otherwise, the top row will contain
the first row of the dataset.
A dataset object can also be imported by setting the :class:`Dataset.csv` attribute. ::
data = tablib.Dataset()
data.csv = 'age, first_name, last_name\\n90, John, Adams'
Import assumes (for now) that headers exist.
.. admonition:: Binary Warning for Python 2
:class:`Dataset.csv` uses \\r\\n line endings by default so, in Python 2, make
sure to write in binary mode::
with open('output.csv', 'wb') as f:
f.write(data.csv)
If you do not do this, and you export the file on Windows, your
CSV file will open in Excel with a blank line between each row.
.. admonition:: Line endings for Python 3
:class:`Dataset.csv` uses \\r\\n line endings by default so, in Python 3, make
sure to include newline='' otherwise you will get a blank line between each row
when you open the file in Excel::
with open('output.csv', 'w', newline='') as f:
f.write(data.csv)
If you do not do this, and you export the file on Windows, your
CSV file will open in Excel with a blank line between each row.
"""
pass
@property
def tsv():
"""A TSV representation of the :class:`Dataset` object. The top row will contain
headers, if they have been set. Otherwise, the top row will contain
the first row of the dataset.
A dataset object can also be imported by setting the :class:`Dataset.tsv` attribute. ::
data = tablib.Dataset()
data.tsv = 'age\tfirst_name\tlast_name\\n90\tJohn\tAdams'
Import assumes (for now) that headers exist.
"""
pass
@property
def yaml():
"""A YAML representation of the :class:`Dataset` object. If headers have been
set, a YAML list of objects will be returned. If no headers have
been set, a YAML list of lists (rows) will be returned instead.
A dataset object can also be imported by setting the :class:`Dataset.yaml` attribute: ::
data = tablib.Dataset()
data.yaml = '- {age: 90, first_name: John, last_name: Adams}'
Import assumes (for now) that headers exist.
"""
pass
@property
def df():
"""A DataFrame representation of the :class:`Dataset` object.
A dataset object can also be imported by setting the :class:`Dataset.df` attribute: ::
data = tablib.Dataset()
data.df = DataFrame(np.random.randn(6,4))
Import assumes (for now) that headers exist.
"""
pass
@property
def json():
"""A JSON representation of the :class:`Dataset` object. If headers have been
set, a JSON list of objects will be returned. If no headers have
been set, a JSON list of lists (rows) will be returned instead.
A dataset object can also be imported by setting the :class:`Dataset.json` attribute: ::
data = tablib.Dataset()
data.json = '[{"age": 90, "first_name": "John", "last_name": "Adams"}]'
Import assumes (for now) that headers exist.
"""
pass
@property
def html():
"""A HTML table representation of the :class:`Dataset` object. If
headers have been set, they will be used as table headers.
..notice:: This method can be used for export only.
"""
pass
@property
def dbf():
"""A dBASE representation of the :class:`Dataset` object.
A dataset object can also be imported by setting the
:class:`Dataset.dbf` attribute. ::
# To import data from an existing DBF file:
data = tablib.Dataset()
data.dbf = open('existing_table.dbf', mode='rb').read()
# to import data from an ASCII-encoded bytestring:
data = tablib.Dataset()
data.dbf = '<bytestring of tabular data>'
.. admonition:: Binary Warning
:class:`Dataset.dbf` contains binary data, so make sure to write in binary mode::
with open('output.dbf', 'wb') as f:
f.write(data.dbf)
"""
pass
@property
def latex():
"""A LaTeX booktabs representation of the :class:`Dataset` object. If a
title has been set, it will be exported as the table caption.
.. note:: This method can be used for export only.
"""
pass
@property
def jira():
"""A Jira table representation of the :class:`Dataset` object.
.. note:: This method can be used for export only.
"""
pass
# ----
# Rows
# ----
def insert(self, index, row, tags=list()):
"""Inserts a row to the :class:`Dataset` at the given index.
Rows inserted must be the correct size (height or width).
The default behaviour is to insert the given row to the :class:`Dataset`
object at the given index.
"""
self._validate(row)
self._data.insert(index, Row(row, tags=tags))
def rpush(self, row, tags=list()):
"""Adds a row to the end of the :class:`Dataset`.
See :class:`Dataset.insert` for additional documentation.
"""
self.insert(self.height, row=row, tags=tags)
def lpush(self, row, tags=list()):
"""Adds a row to the top of the :class:`Dataset`.
See :class:`Dataset.insert` for additional documentation.
"""
self.insert(0, row=row, tags=tags)
def append(self, row, tags=list()):
"""Adds a row to the :class:`Dataset`.
See :class:`Dataset.insert` for additional documentation.
"""
self.rpush(row, tags)
def extend(self, rows, tags=list()):
"""Adds a list of rows to the :class:`Dataset` using
:class:`Dataset.append`
"""
for row in rows:
self.append(row, tags)
def lpop(self):
"""Removes and returns the first row of the :class:`Dataset`."""
cache = self[0]
del self[0]
return cache
def rpop(self):
"""Removes and returns the last row of the :class:`Dataset`."""
cache = self[-1]
del self[-1]
return cache
def pop(self):
"""Removes and returns the last row of the :class:`Dataset`."""
return self.rpop()
# -------
# Columns
# -------
def insert_col(self, index, col=None, header=None):
"""Inserts a column to the :class:`Dataset` at the given index.
Columns inserted must be the correct height.
You can also insert a column of a single callable object, which will
add a new column with the return values of the callable each as an
item in the column. ::
data.append_col(col=random.randint)
If inserting a column, and :class:`Dataset.headers` is set, the
header attribute must be set, and will be considered the header for
that row.
See :ref:`dyncols` for an in-depth example.
.. versionchanged:: 0.9.0
If inserting a column, and :class:`Dataset.headers` is set, the
header attribute must be set, and will be considered the header for
that row.
.. versionadded:: 0.9.0
If inserting a row, you can add :ref:`tags <tags>` to the row you are inserting.
This gives you the ability to :class:`filter <Dataset.filter>` your
:class:`Dataset` later.
"""
if col is None:
col = []
# Callable Columns...
if hasattr(col, '__call__'):
col = list(map(col, self._data))
col = self._clean_col(col)
self._validate(col=col)
if self.headers:
# pop the first item off, add to headers
if not header:
raise HeadersNeeded()
# corner case - if header is set without data
elif header and self.height == 0 and len(col):
raise InvalidDimensions
self.headers.insert(index, header)
if self.height and self.width:
for i, row in enumerate(self._data):
row.insert(index, col[i])
self._data[i] = row
else:
self._data = [Row([row]) for row in col]
def rpush_col(self, col, header=None):
"""Adds a column to the end of the :class:`Dataset`.
See :class:`Dataset.insert` for additional documentation.
"""
self.insert_col(self.width, col, header=header)
def lpush_col(self, col, header=None):
"""Adds a column to the top of the :class:`Dataset`.
See :class:`Dataset.insert` for additional documentation.
"""
self.insert_col(0, col, header=header)
def insert_separator(self, index, text='-'):
"""Adds a separator to :class:`Dataset` at given index."""
sep = (index, text)
self._separators.append(sep)
def append_separator(self, text='-'):
"""Adds a :ref:`separator <separators>` to the :class:`Dataset`."""
# change offsets if headers are or aren't defined
if not self.headers:
index = self.height if self.height else 0
else:
index = (self.height + 1) if self.height else 1
self.insert_separator(index, text)
def append_col(self, col, header=None):
"""Adds a column to the :class:`Dataset`.
See :class:`Dataset.insert_col` for additional documentation.
"""
self.rpush_col(col, header)
def get_col(self, index):
"""Returns the column from the :class:`Dataset` at the given index."""
return [row[index] for row in self._data]
# ----
# Misc
# ----
def add_formatter(self, col, handler):
"""Adds a formatter to the :class:`Dataset`.
.. versionadded:: 0.9.5
:param col: column to. Accepts index int or header str.
:param handler: reference to callback function to execute against
each cell value.
"""
if isinstance(col, unicode):
if col in self.headers:
col = self.headers.index(col) # get 'key' index from each data
else:
raise KeyError
if not col > self.width:
self._formatters.append((col, handler))
else:
raise InvalidDatasetIndex
return True
def filter(self, tag):
"""Returns a new instance of the :class:`Dataset`, excluding any rows
that do not contain the given :ref:`tags <tags>`.
"""
_dset = copy(self)
_dset._data = [row for row in _dset._data if row.has_tag(tag)]
return _dset
def sort(self, col, reverse=False):
"""Sort a :class:`Dataset` by a specific column, given string (for
header) or integer (for column index). The order can be reversed by
setting ``reverse`` to ``True``.
Returns a new :class:`Dataset` instance where columns have been
sorted.
"""
if isinstance(col, (str, unicode)):
if not self.headers:
raise HeadersNeeded
_sorted = sorted(self.dict, key=itemgetter(col), reverse=reverse)
_dset = Dataset(headers=self.headers, title=self.title)
for item in _sorted:
row = [item[key] for key in self.headers]
_dset.append(row=row)
else:
if self.headers:
col = self.headers[col]
_sorted = sorted(self.dict, key=itemgetter(col), reverse=reverse)
_dset = Dataset(headers=self.headers, title=self.title)
for item in _sorted:
if self.headers:
row = [item[key] for key in self.headers]
else:
row = item
_dset.append(row=row)
return _dset
def transpose(self):
"""Transpose a :class:`Dataset`, turning rows into columns and vice
versa, returning a new ``Dataset`` instance. The first row of the
original instance becomes the new header row."""
# Don't transpose if there is no data
if not self:
return
_dset = Dataset()
# The first element of the headers stays in the headers,
# it is our "hinge" on which we rotate the data
new_headers = [self.headers[0]] + self[self.headers[0]]
_dset.headers = new_headers
for index, column in enumerate(self.headers):
if column == self.headers[0]:
# It's in the headers, so skip it
continue
# Adding the column name as now they're a regular column
# Use `get_col(index)` in case there are repeated values
row_data = [column] + self.get_col(index)
row_data = Row(row_data)
_dset.append(row=row_data)
return _dset
def stack(self, other):
"""Stack two :class:`Dataset` instances together by
joining at the row level, and return new combined
``Dataset`` instance."""
if not isinstance(other, Dataset):
return
if self.width != other.width:
raise InvalidDimensions
# Copy the source data
_dset = copy(self)
rows_to_stack = [row for row in _dset._data]
other_rows = [row for row in other._data]
rows_to_stack.extend(other_rows)
_dset._data = rows_to_stack
return _dset
def stack_cols(self, other):
"""Stack two :class:`Dataset` instances together by
joining at the column level, and return a new
combined ``Dataset`` instance. If either ``Dataset``
has headers set, than the other must as well."""
if not isinstance(other, Dataset):
return
if self.headers or other.headers:
if not self.headers or not other.headers:
raise HeadersNeeded
if self.height != other.height:
raise InvalidDimensions
try:
new_headers = self.headers + other.headers
except TypeError:
new_headers = None
_dset = Dataset()
for column in self.headers:
_dset.append_col(col=self[column])
for column in other.headers:
_dset.append_col(col=other[column])
_dset.headers = new_headers
return _dset
def remove_duplicates(self):
"""Removes all duplicate rows from the :class:`Dataset` object
while maintaining the original order."""
seen = set()
self._data[:] = [row for row in self._data if not (tuple(row) in seen or seen.add(tuple(row)))]
def wipe(self):
"""Removes all content and headers from the :class:`Dataset` object."""
self._data = list()
self.__headers = None
def subset(self, rows=None, cols=None):
"""Returns a new instance of the :class:`Dataset`,
including only specified rows and columns.
"""
# Don't return if no data
if not self:
return
if rows is None:
rows = list(range(self.height))
if cols is None:
cols = list(self.headers)
#filter out impossible rows and columns
rows = [row for row in rows if row in range(self.height)]
cols = [header for header in cols if header in self.headers]
_dset = Dataset()
#filtering rows and columns
_dset.headers = list(cols)
_dset._data = []
for row_no, row in enumerate(self._data):
data_row = []
for key in _dset.headers:
if key in self.headers:
pos = self.headers.index(key)
data_row.append(row[pos])
else:
raise KeyError
if row_no in rows:
_dset.append(row=Row(data_row))
return _dset
class Databook(object):
"""A book of :class:`Dataset` objects.
"""
_formats = {}
def __init__(self, sets=None):
if sets is None:
self._datasets = list()
else:
self._datasets = sets
self._register_formats()
def __repr__(self):
try:
return '<%s databook>' % (self.title.lower())
except AttributeError:
return '<databook object>'
def wipe(self):
"""Removes all :class:`Dataset` objects from the :class:`Databook`."""
self._datasets = []
@classmethod
def _register_formats(cls):
"""Adds format properties."""
for fmt in formats.available:
try:
try:
setattr(cls, fmt.title, property(fmt.export_book, fmt.import_book))
cls._formats[fmt.title] = (fmt.export_book, fmt.import_book)
except AttributeError:
setattr(cls, fmt.title, property(fmt.export_book))
cls._formats[fmt.title] = (fmt.export_book, None)
except AttributeError:
cls._formats[fmt.title] = (None, None)
def sheets(self):
return self._datasets
def add_sheet(self, dataset):
"""Adds given :class:`Dataset` to the :class:`Databook`."""
if isinstance(dataset, Dataset):
self._datasets.append(dataset)
else:
raise InvalidDatasetType
def _package(self, ordered=True):
"""Packages :class:`Databook` for delivery."""
collector = []
if ordered:
dict_pack = OrderedDict
else:
dict_pack = dict
for dset in self._datasets:
collector.append(dict_pack(
title = dset.title,
data = dset._package(ordered=ordered)
))
return collector
@property
def size(self):
"""The number of the :class:`Dataset` objects within :class:`Databook`."""
return len(self._datasets)
def load(self, in_stream, format, **kwargs):
"""
Import `in_stream` to the :class:`Databook` object using the `format`.
:param \\*\\*kwargs: (optional) custom configuration to the format `import_book`.
"""
if not format:
format = detect_format(in_stream)
export_book, import_book = self._formats.get(format, (None, None))
if not import_book:
raise UnsupportedFormat('Format {0} cannot be loaded.'.format(format))
import_book(self, in_stream, **kwargs)
return self
def export(self, format, **kwargs):
"""
Export :class:`Databook` object to `format`.
:param \\*\\*kwargs: (optional) custom configuration to the format `export_book`.
"""
export_book, import_book = self._formats.get(format, (None, None))
if not export_book:
raise UnsupportedFormat('Format {0} cannot be exported.'.format(format))
return export_book(self, **kwargs)
def detect_format(stream):
"""Return format name of given stream."""
for fmt in formats.available:
try:
if fmt.detect(stream):
return fmt.title
except AttributeError:
pass
def import_set(stream, format=None, **kwargs):
"""Return dataset of given stream."""
return Dataset().load(stream, format, **kwargs)
def import_book(stream, format=None, **kwargs):
"""Return dataset of given stream."""
return Databook().load(stream, format, **kwargs)
class InvalidDatasetType(Exception):
"Only Datasets can be added to a DataBook"
class InvalidDimensions(Exception):
"Invalid size"
class InvalidDatasetIndex(Exception):
"Outside of Dataset size"
class HeadersNeeded(Exception):
"Header parameter must be given when appending a column in this Dataset."
class UnsupportedFormat(NotImplementedError):
"Format is not supported"
|
|
import unittest
import sys
verbose = False
have_unicode=False
# Adapted from Python 2.7.x test_format.py
# testformat(formatstr, *args, **kwargs)
class FormatTest(unittest.TestCase):
def doboth(self, formatstr, *args, **kwargs):
if verbose:
print "Testing ", formatstr, " on ", args[1]
self.assertEqual(formatstr%args[0], args[1])
if have_unicode:
self.assertEqual(unicode(formatstr)%args[0], args[1])
def test_format(self):
# self.doboth("%.1d", (1,), "1")
# self.doboth("%.*d", (sys.maxint,1), overflowok=True) # expect overflow
self.doboth("%.100d", (1,), '00000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000'
'00000001', overflowok=True)
self.doboth("%#.117x", (1,), '0x00000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000'
'0000000000000000000000000001',
overflowok=True)
self.doboth("%#.118x", (1,), '0x00000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000'
'00000000000000000000000000001',
overflowok=True)
# self.doboth("%f", (1.0,), "1.000000")
# these are trying to test the limits of the internal magic-number-length
# formatting buffer, if that number changes then these tests are less
# effective
# self.doboth("%#.*g", (109, -1.e+49/3.))
# self.doboth("%#.*g", (110, -1.e+49/3.))
# self.doboth("%#.*g", (110, -1.e+100/3.))
#
# test some ridiculously large precision, expect overflow
# self.doboth('%12.*f', (123456, 1.0))
# check for internal overflow validation on length of precision
# these tests should no longer cause overflow in Python
# 2.7/3.1 and later.
# self.doboth("%#.*g", (110, -1.e+100/3.))
# self.doboth("%#.*G", (110, -1.e+100/3.))
# self.doboth("%#.*f", (110, -1.e+100/3.))
# self.doboth("%#.*F", (110, -1.e+100/3.))
# Formatting of long integers. Overflow is not ok
self.doboth("%x", 10L, "a")
self.doboth("%x", 100000000000L, "174876e800")
self.doboth("%o", 10L, "12")
self.doboth("%o", 100000000000L, "1351035564000")
self.doboth("%d", 10L, "10")
self.doboth("%d", 100000000000L, "100000000000")
big = 123456789012345678901234567890L
self.doboth("%d", big, "123456789012345678901234567890")
self.doboth("%d", -big, "-123456789012345678901234567890")
self.doboth("%5d", -big, "-123456789012345678901234567890")
self.doboth("%31d", -big, "-123456789012345678901234567890")
self.doboth("%32d", -big, " -123456789012345678901234567890")
self.doboth("%-32d", -big, "-123456789012345678901234567890 ")
self.doboth("%032d", -big, "-0123456789012345678901234567890")
self.doboth("%-032d", -big, "-123456789012345678901234567890 ")
self.doboth("%034d", -big, "-000123456789012345678901234567890")
self.doboth("%034d", big, "0000123456789012345678901234567890")
self.doboth("%0+34d", big, "+000123456789012345678901234567890")
self.doboth("%+34d", big, " +123456789012345678901234567890")
self.doboth("%34d", big, " 123456789012345678901234567890")
self.doboth("%.2d", big, "123456789012345678901234567890")
self.doboth("%.30d", big, "123456789012345678901234567890")
self.doboth("%.31d", big, "0123456789012345678901234567890")
self.doboth("%32.31d", big, " 0123456789012345678901234567890")
# self.doboth("%d", float(big), "123456________________________", 6)
big = 0x1234567890abcdef12345L # 21 hex digits
self.doboth("%x", big, "1234567890abcdef12345")
self.doboth("%x", -big, "-1234567890abcdef12345")
self.doboth("%5x", -big, "-1234567890abcdef12345")
self.doboth("%22x", -big, "-1234567890abcdef12345")
self.doboth("%23x", -big, " -1234567890abcdef12345")
self.doboth("%-23x", -big, "-1234567890abcdef12345 ")
self.doboth("%023x", -big, "-01234567890abcdef12345")
self.doboth("%-023x", -big, "-1234567890abcdef12345 ")
self.doboth("%025x", -big, "-0001234567890abcdef12345")
self.doboth("%025x", big, "00001234567890abcdef12345")
self.doboth("%0+25x", big, "+0001234567890abcdef12345")
self.doboth("%+25x", big, " +1234567890abcdef12345")
self.doboth("%25x", big, " 1234567890abcdef12345")
self.doboth("%.2x", big, "1234567890abcdef12345")
self.doboth("%.21x", big, "1234567890abcdef12345")
self.doboth("%.22x", big, "01234567890abcdef12345")
self.doboth("%23.22x", big, " 01234567890abcdef12345")
self.doboth("%-23.22x", big, "01234567890abcdef12345 ")
self.doboth("%X", big, "1234567890ABCDEF12345")
self.doboth("%#X", big, "0X1234567890ABCDEF12345")
self.doboth("%#x", big, "0x1234567890abcdef12345")
self.doboth("%#x", -big, "-0x1234567890abcdef12345")
self.doboth("%#.23x", -big, "-0x001234567890abcdef12345")
self.doboth("%#+.23x", big, "+0x001234567890abcdef12345")
self.doboth("%# .23x", big, " 0x001234567890abcdef12345")
self.doboth("%#+.23X", big, "+0X001234567890ABCDEF12345")
self.doboth("%#-+.23X", big, "+0X001234567890ABCDEF12345")
self.doboth("%#-+26.23X", big, "+0X001234567890ABCDEF12345")
self.doboth("%#-+27.23X", big, "+0X001234567890ABCDEF12345 ")
self.doboth("%#+27.23X", big, " +0X001234567890ABCDEF12345")
# next one gets two leading zeroes from precision, and another from the
# 0 flag and the width
self.doboth("%#+027.23X", big, "+0X0001234567890ABCDEF12345")
# same, except no 0 flag
self.doboth("%#+27.23X", big, " +0X001234567890ABCDEF12345")
# self.doboth("%x", float(big), "123456_______________", 6)
big = 012345670123456701234567012345670L # 32 octal digits
self.doboth("%o", big, "12345670123456701234567012345670")
self.doboth("%o", -big, "-12345670123456701234567012345670")
self.doboth("%5o", -big, "-12345670123456701234567012345670")
self.doboth("%33o", -big, "-12345670123456701234567012345670")
self.doboth("%34o", -big, " -12345670123456701234567012345670")
self.doboth("%-34o", -big, "-12345670123456701234567012345670 ")
self.doboth("%034o", -big, "-012345670123456701234567012345670")
self.doboth("%-034o", -big, "-12345670123456701234567012345670 ")
self.doboth("%036o", -big, "-00012345670123456701234567012345670")
self.doboth("%036o", big, "000012345670123456701234567012345670")
self.doboth("%0+36o", big, "+00012345670123456701234567012345670")
self.doboth("%+36o", big, " +12345670123456701234567012345670")
self.doboth("%36o", big, " 12345670123456701234567012345670")
self.doboth("%.2o", big, "12345670123456701234567012345670")
self.doboth("%.32o", big, "12345670123456701234567012345670")
self.doboth("%.33o", big, "012345670123456701234567012345670")
self.doboth("%34.33o", big, " 012345670123456701234567012345670")
self.doboth("%-34.33o", big, "012345670123456701234567012345670 ")
self.doboth("%o", big, "12345670123456701234567012345670")
self.doboth("%#o", big, "012345670123456701234567012345670")
self.doboth("%#o", -big, "-012345670123456701234567012345670")
self.doboth("%#.34o", -big, "-0012345670123456701234567012345670")
self.doboth("%#+.34o", big, "+0012345670123456701234567012345670")
self.doboth("%# .34o", big, " 0012345670123456701234567012345670")
self.doboth("%#+.34o", big, "+0012345670123456701234567012345670")
self.doboth("%#-+.34o", big, "+0012345670123456701234567012345670")
self.doboth("%#-+37.34o", big, "+0012345670123456701234567012345670 ")
self.doboth("%#+37.34o", big, " +0012345670123456701234567012345670")
# next one gets one leading zero from precision
self.doboth("%.33o", big, "012345670123456701234567012345670")
# base marker shouldn't change that, since "0" is redundant
self.doboth("%#.33o", big, "012345670123456701234567012345670")
# but reduce precision, and base marker should add a zero
self.doboth("%#.32o", big, "012345670123456701234567012345670")
# one leading zero from precision, and another from "0" flag & width
self.doboth("%034.33o", big, "0012345670123456701234567012345670")
# base marker shouldn't change that
self.doboth("%0#34.33o", big, "0012345670123456701234567012345670")
# self.doboth("%o", float(big), "123456__________________________", 6)
# Some small ints, in both Python int and long flavors).
self.doboth("%d", 42, "42")
self.doboth("%d", -42, "-42")
self.doboth("%d", 42L, "42")
self.doboth("%d", -42L, "-42")
self.doboth("%d", 42.0, "42")
self.doboth("%#x", 1, "0x1")
self.doboth("%#x", 1L, "0x1")
self.doboth("%#X", 1, "0X1")
self.doboth("%#X", 1L, "0X1")
self.doboth("%#x", 1.0, "0x1")
self.doboth("%#o", 1, "01")
self.doboth("%#o", 1L, "01")
self.doboth("%#o", 0, "0")
self.doboth("%#o", 0L, "0")
self.doboth("%o", 0, "0")
self.doboth("%o", 0L, "0")
self.doboth("%d", 0, "0")
self.doboth("%d", 0L, "0")
self.doboth("%#x", 0, "0x0")
self.doboth("%#x", 0L, "0x0")
self.doboth("%#X", 0, "0X0")
self.doboth("%#X", 0L, "0X0")
self.doboth("%x", 0x42, "42")
self.doboth("%x", -0x42, "-42")
self.doboth("%x", 0x42L, "42")
self.doboth("%x", -0x42L, "-42")
self.doboth("%x", float(0x42), "42")
self.doboth("%o", 042, "42")
self.doboth("%o", -042, "-42")
self.doboth("%o", 042L, "42")
self.doboth("%o", -042L, "-42")
self.doboth("%o", float(042), "42")
self.doboth("%4s%4s%4s", ("dez", "okt","hex"), " dez okt hex")
self.doboth("%(thing)s", {'thing': 'abc'}, "abc")
self.doboth("%(thing)4s", {'thing': 'abc'}, " abc")
self.doboth("%s", True, "True")
self.doboth("%s",{'a':1}, "{'a': 1}")
self.doboth("%s",[], "[]")
# alternate float formatting
# testformat('%g', 1.1, '1.1')
# testformat('%#g', 1.1, '1.10000')
if __name__ == "__main__":
print "hello"
unittest.main()
|
|
"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse, os, sys
from time import sleep
from random import randrange
import math
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, parentdir)
import pyOCD
from pyOCD.board import MbedBoard
from pyOCD.target.cortex_m import float2int
addr = 0
size = 0
f = None
binary_file = "l1_"
interface = None
board = None
import logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description='A CMSIS-DAP python debugger')
parser.add_argument('-f', help='binary file', dest = "file")
args = parser.parse_args()
try:
board = MbedBoard.chooseBoard()
target_type = board.getTargetType()
if args.file is None:
binary_file += target_type + ".bin"
binary_file = os.path.join(parentdir, 'binaries', binary_file)
else:
binary_file = args.file
print "binary file: %s" % binary_file
if target_type == "lpc1768":
addr = 0x10000001
size = 0x1102
elif target_type == "lpc11u24":
addr = 0x10000001
size = 0x502
elif target_type == "kl25z":
addr = 0x20000001
size = 0x502
elif target_type == "k64f":
addr = 0x20000001
size = 0x502
elif target_type == "k22f":
addr = 0x20000001
size = 0x502
elif target_type == "k20d50m":
addr = 0x20000001
size = 0x502
elif target_type == "kl46z":
addr = 0x20000001
size = 0x502
elif target_type == "lpc800":
addr = 0x10000001
size = 0x502
elif target_type == "nrf51822":
addr = 0x20000001
size = 0x502
else:
raise Exception("A board is not supported by this test script.")
target = board.target
transport = board.transport
flash = board.flash
interface = board.interface
print "\r\n\r\n------ GET Unique ID ------"
print "Unique ID: %s" % board.getUniqueID()
print "\r\n\r\n------ TEST READ / WRITE CORE REGISTER ------"
pc = target.readCoreRegister('pc')
print "initial pc: 0x%X" % target.readCoreRegister('pc')
# write in pc dummy value
target.writeCoreRegister('pc', 0x3D82)
print "now pc: 0x%X" % target.readCoreRegister('pc')
# write initial pc value
target.writeCoreRegister('pc', pc)
print "initial pc value rewritten: 0x%X" % target.readCoreRegister('pc')
msp = target.readCoreRegister('msp')
psp = target.readCoreRegister('psp')
print "MSP = 0x%08x; PSP = 0x%08x" % (msp, psp)
control = target.readCoreRegister('control')
faultmask = target.readCoreRegister('faultmask')
basepri = target.readCoreRegister('basepri')
primask = target.readCoreRegister('primask')
print "CONTROL = 0x%02x; FAULTMASK = 0x%02x; BASEPRI = 0x%02x; PRIMASK = 0x%02x" % (control, faultmask, basepri, primask)
target.writeCoreRegister('primask', 1)
newPrimask = target.readCoreRegister('primask')
print "New PRIMASK = 0x%02x" % newPrimask
target.writeCoreRegister('primask', primask)
newPrimask = target.readCoreRegister('primask')
print "Restored PRIMASK = 0x%02x" % newPrimask
if target.has_fpu:
s0 = target.readCoreRegister('s0')
print "S0 = %g (0x%08x)" % (s0,float2int(s0))
target.writeCoreRegister('s0', math.pi)
newS0 = target.readCoreRegister('s0')
print "New S0 = %g (0x%08x)" % (newS0, float2int(newS0))
target.writeCoreRegister('s0', s0)
newS0 = target.readCoreRegister('s0')
print "Restored S0 = %g (0x%08x)" % (newS0, float2int(newS0))
print "\r\n\r\n------ TEST HALT / RESUME ------"
print "resume"
target.resume()
sleep(0.2)
print "halt"
target.halt()
print "HALT: pc: 0x%X" % target.readCoreRegister('pc')
sleep(0.2)
print "\r\n\r\n------ TEST READ / WRITE MEMORY ------"
target.halt()
print "READ32/WRITE32"
val = randrange(0, 0xffffffff)
print "write32 0x%X at 0x%X" % (val, addr)
target.writeMemory(addr, val)
res = target.readMemory(addr)
print "read32 at 0x%X: 0x%X" % (addr, res)
if res != val:
print "ERROR in READ/WRITE 32"
print "\r\nREAD16/WRITE16"
val = randrange(0, 0xffff)
print "write16 0x%X at 0x%X" % (val, addr + 2)
target.writeMemory(addr + 2, val, 16)
res = target.readMemory(addr + 2, 16)
print "read16 at 0x%X: 0x%X" % (addr + 2, res)
if res != val:
print "ERROR in READ/WRITE 16"
print "\r\nREAD8/WRITE8"
val = randrange(0, 0xff)
print "write8 0x%X at 0x%X" % (val, addr + 1)
target.writeMemory(addr + 1, val, 8)
res = target.readMemory(addr + 1, 8)
print "read8 at 0x%X: 0x%X" % (addr + 1, res)
if res != val:
print "ERROR in READ/WRITE 8"
print "\r\n\r\n------ TEST READ / WRITE MEMORY BLOCK ------"
data = [randrange(1, 50) for x in range(size)]
target.writeBlockMemoryUnaligned8(addr, data)
block = target.readBlockMemoryUnaligned8(addr, size)
error = False
for i in range(len(block)):
if (block[i] != data[i]):
error = True
print "ERROR: 0x%X, 0x%X, 0x%X!!!" % ((addr + i), block[i], data[i])
if error:
print "TEST FAILED"
else:
print "TEST PASSED"
print "\r\n\r\n------ TEST RESET ------"
target.reset()
sleep(0.1)
target.halt()
for i in range(5):
target.step()
print "pc: 0x%X" % target.readCoreRegister('pc')
print "\r\n\r\n----- FLASH NEW BINARY -----"
flash.flashBinary(binary_file)
target.reset()
finally:
if board != None:
board.uninit()
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables # noqa
from oslo.utils import strutils
import six
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
def create_upload_form_attributes(prefix, input_type, name):
"""Creates attribute dicts for the switchable upload form
:type prefix: str
:param prefix: prefix (environment, template) of field
:type input_type: str
:param input_type: field type (file, raw, url)
:type name: str
:param name: translated text label to display to user
:rtype: dict
:return: an attribute set to pass to form build
"""
attributes = {'class': 'switched', 'data-switch-on': prefix + 'source'}
attributes['data-' + prefix + 'source-' + input_type] = name
return attributes
class TemplateForm(forms.SelfHandlingForm):
class Meta:
name = _('Select Template')
help_text = _('Select a template to launch a stack.')
# TODO(jomara) - update URL choice for template & environment files
# w/ client side download when applicable
base_choices = [('file', _('File')),
('raw', _('Direct Input'))]
url_choice = [('url', _('URL'))]
attributes = {'class': 'switchable', 'data-slug': 'templatesource'}
template_source = forms.ChoiceField(label=_('Template Source'),
choices=base_choices + url_choice,
widget=forms.Select(attrs=attributes))
attributes = create_upload_form_attributes(
'template',
'file',
_('Template File'))
template_upload = forms.FileField(
label=_('Template File'),
help_text=_('A local template to upload.'),
widget=forms.FileInput(attrs=attributes),
required=False)
attributes = create_upload_form_attributes(
'template',
'url',
_('Template URL'))
template_url = forms.URLField(
label=_('Template URL'),
help_text=_('An external (HTTP) URL to load the template from.'),
widget=forms.TextInput(attrs=attributes),
required=False)
attributes = create_upload_form_attributes(
'template',
'raw',
_('Template Data'))
template_data = forms.CharField(
label=_('Template Data'),
help_text=_('The raw contents of the template.'),
widget=forms.widgets.Textarea(attrs=attributes),
required=False)
attributes = {'data-slug': 'envsource', 'class': 'switchable'}
environment_source = forms.ChoiceField(
label=_('Environment Source'),
choices=base_choices,
widget=forms.Select(attrs=attributes),
required=False)
attributes = create_upload_form_attributes(
'env',
'file',
_('Environment File'))
environment_upload = forms.FileField(
label=_('Environment File'),
help_text=_('A local environment to upload.'),
widget=forms.FileInput(attrs=attributes),
required=False)
attributes = create_upload_form_attributes(
'env',
'raw',
_('Environment Data'))
environment_data = forms.CharField(
label=_('Environment Data'),
help_text=_('The raw contents of the environment file.'),
widget=forms.widgets.Textarea(attrs=attributes),
required=False)
def __init__(self, *args, **kwargs):
self.next_view = kwargs.pop('next_view')
super(TemplateForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned = super(TemplateForm, self).clean()
files = self.request.FILES
self.clean_uploaded_files('template', _('template'), cleaned, files)
self.clean_uploaded_files('environment',
_('environment'),
cleaned,
files)
# Validate the template and get back the params.
kwargs = {}
if cleaned['template_data']:
kwargs['template'] = cleaned['template_data']
else:
kwargs['template_url'] = cleaned['template_url']
if cleaned['environment_data']:
kwargs['environment'] = cleaned['environment_data']
try:
validated = api.heat.template_validate(self.request, **kwargs)
cleaned['template_validate'] = validated
except Exception as e:
raise forms.ValidationError(unicode(e))
return cleaned
def clean_uploaded_files(self, prefix, field_label, cleaned, files):
"""Cleans Template & Environment data from form upload.
Does some of the crunchy bits for processing uploads vs raw
data depending on what the user specified. Identical process
for environment data & template data.
:type prefix: str
:param prefix: prefix (environment, template) of field
:type field_label: str
:param field_label: translated prefix str for messages
:type input_type: dict
:param prefix: existing cleaned fields from form
:rtype: dict
:return: cleaned dict including environment & template data
"""
upload_str = prefix + "_upload"
data_str = prefix + "_data"
url = cleaned.get(prefix + '_url')
data = cleaned.get(prefix + '_data')
has_upload = upload_str in files
# Uploaded file handler
if has_upload and not url:
log_template_name = files[upload_str].name
LOG.info('got upload %s' % log_template_name)
tpl = files[upload_str].read()
if tpl.startswith('{'):
try:
json.loads(tpl)
except Exception as e:
msg = _('There was a problem parsing the'
' %(prefix)s: %(error)s')
msg = msg % {'prefix': prefix, 'error': e}
raise forms.ValidationError(msg)
cleaned[data_str] = tpl
# URL handler
elif url and (has_upload or data):
msg = _('Please specify a %s using only one source method.')
msg = msg % field_label
raise forms.ValidationError(msg)
elif prefix == 'template':
# Check for raw template input - blank environment allowed
if not url and not data:
msg = _('You must specify a template via one of the '
'available sources.')
raise forms.ValidationError(msg)
def create_kwargs(self, data):
kwargs = {'parameters': data['template_validate'],
'environment_data': data['environment_data'],
'template_data': data['template_data'],
'template_url': data['template_url']}
if data.get('stack_id'):
kwargs['stack_id'] = data['stack_id']
return kwargs
def handle(self, request, data):
kwargs = self.create_kwargs(data)
# NOTE (gabriel): This is a bit of a hack, essentially rewriting this
# request so that we can chain it as an input to the next view...
# but hey, it totally works.
request.method = 'GET'
return self.next_view.as_view()(request, **kwargs)
class ChangeTemplateForm(TemplateForm):
class Meta:
name = _('Edit Template')
help_text = _('Select a new template to re-launch a stack.')
stack_id = forms.CharField(
label=_('Stack ID'),
widget=forms.widgets.HiddenInput)
stack_name = forms.CharField(
label=_('Stack Name'),
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
class CreateStackForm(forms.SelfHandlingForm):
param_prefix = '__param_'
class Meta:
name = _('Create Stack')
template_data = forms.CharField(
widget=forms.widgets.HiddenInput,
required=False)
template_url = forms.CharField(
widget=forms.widgets.HiddenInput,
required=False)
environment_data = forms.CharField(
widget=forms.widgets.HiddenInput,
required=False)
parameters = forms.CharField(
widget=forms.widgets.HiddenInput)
stack_name = forms.RegexField(
max_length=255,
label=_('Stack Name'),
help_text=_('Name of the stack to create.'),
regex=r"^[a-zA-Z][a-zA-Z0-9_.-]*$",
error_messages={'invalid':
_('Name must start with a letter and may '
'only contain letters, numbers, underscores, '
'periods and hyphens.')})
timeout_mins = forms.IntegerField(
initial=60,
label=_('Creation Timeout (minutes)'),
help_text=_('Stack creation timeout in minutes.'))
enable_rollback = forms.BooleanField(
label=_('Rollback On Failure'),
help_text=_('Enable rollback on create/update failure.'),
required=False)
def __init__(self, *args, **kwargs):
parameters = kwargs.pop('parameters')
# special case: load template data from API, not passed in params
if(kwargs.get('validate_me')):
parameters = kwargs.pop('validate_me')
super(CreateStackForm, self).__init__(*args, **kwargs)
self._build_parameter_fields(parameters)
def _build_parameter_fields(self, template_validate):
self.fields['password'] = forms.CharField(
label=_('Password for user "%s"') % self.request.user.username,
help_text=_('This is required for operations to be performed '
'throughout the lifecycle of the stack'),
widget=forms.PasswordInput())
self.help_text = template_validate['Description']
params = template_validate.get('Parameters', {})
if template_validate.get('ParameterGroups'):
params_in_order = []
for group in template_validate['ParameterGroups']:
for param in group.get('parameters', []):
if param in params:
params_in_order.append((param, params[param]))
else:
# no parameter groups, so no way to determine order
params_in_order = params.items()
for param_key, param in params_in_order:
field = None
field_key = self.param_prefix + param_key
field_args = {
'initial': param.get('Default', None),
'label': param.get('Label', param_key),
'help_text': param.get('Description', ''),
'required': param.get('Default', None) is None
}
param_type = param.get('Type', None)
hidden = strutils.bool_from_string(param.get('NoEcho', 'false'))
if 'AllowedValues' in param:
choices = map(lambda x: (x, x), param['AllowedValues'])
field_args['choices'] = choices
field = forms.ChoiceField(**field_args)
elif param_type in ('CommaDelimitedList', 'String', 'Json'):
if 'MinLength' in param:
field_args['min_length'] = int(param['MinLength'])
field_args['required'] = param.get('MinLength', 0) > 0
if 'MaxLength' in param:
field_args['max_length'] = int(param['MaxLength'])
if hidden:
field_args['widget'] = forms.PasswordInput()
field = forms.CharField(**field_args)
elif param_type == 'Number':
if 'MinValue' in param:
field_args['min_value'] = int(param['MinValue'])
if 'MaxValue' in param:
field_args['max_value'] = int(param['MaxValue'])
field = forms.IntegerField(**field_args)
# heat-api currently returns the boolean type in lowercase
# (see https://bugs.launchpad.net/heat/+bug/1361448)
# so for better compatibility both are checked here
elif param_type in ('Boolean', 'boolean'):
field = forms.BooleanField(**field_args)
if field:
self.fields[field_key] = field
@sensitive_variables('password')
def handle(self, request, data):
prefix_length = len(self.param_prefix)
params_list = [(k[prefix_length:], v) for (k, v) in six.iteritems(data)
if k.startswith(self.param_prefix)]
fields = {
'stack_name': data.get('stack_name'),
'timeout_mins': data.get('timeout_mins'),
'disable_rollback': not(data.get('enable_rollback')),
'parameters': dict(params_list),
'password': data.get('password')
}
if data.get('template_data'):
fields['template'] = data.get('template_data')
else:
fields['template_url'] = data.get('template_url')
if data.get('environment_data'):
fields['environment'] = data.get('environment_data')
try:
api.heat.stack_create(self.request, **fields)
messages.success(request, _("Stack creation started."))
return True
except Exception:
exceptions.handle(request)
class EditStackForm(CreateStackForm):
class Meta:
name = _('Update Stack Parameters')
stack_id = forms.CharField(
label=_('Stack ID'),
widget=forms.widgets.HiddenInput)
stack_name = forms.CharField(
label=_('Stack Name'),
widget=forms.TextInput(attrs={'readonly': 'readonly'}))
@sensitive_variables('password')
def handle(self, request, data):
prefix_length = len(self.param_prefix)
params_list = [(k[prefix_length:], v) for (k, v) in six.iteritems(data)
if k.startswith(self.param_prefix)]
stack_id = data.get('stack_id')
fields = {
'stack_name': data.get('stack_name'),
'timeout_mins': data.get('timeout_mins'),
'disable_rollback': not(data.get('enable_rollback')),
'parameters': dict(params_list),
'password': data.get('password')
}
# if the user went directly to this form, resubmit the existing
# template data. otherwise, submit what they had from the first form
if data.get('template_data'):
fields['template'] = data.get('template_data')
elif data.get('template_url'):
fields['template_url'] = data.get('template_url')
elif data.get('parameters'):
fields['template'] = data.get('parameters')
try:
api.heat.stack_update(self.request, stack_id=stack_id, **fields)
messages.success(request, _("Stack update started."))
return True
except Exception:
exceptions.handle(request)
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.shell_v1.types import cloudshell
from .transports.base import CloudShellServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import CloudShellServiceGrpcAsyncIOTransport
from .client import CloudShellServiceClient
class CloudShellServiceAsyncClient:
"""API for interacting with Google Cloud Shell. Each user of
Cloud Shell has at least one environment, which has the ID
"default". Environment consists of a Docker image defining what
is installed on the environment and a home directory containing
the user's data that will remain across sessions. Clients use
this API to start and fetch information about their environment,
which can then be used to connect to that environment via a
separate SSH client.
"""
_client: CloudShellServiceClient
DEFAULT_ENDPOINT = CloudShellServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = CloudShellServiceClient.DEFAULT_MTLS_ENDPOINT
environment_path = staticmethod(CloudShellServiceClient.environment_path)
parse_environment_path = staticmethod(
CloudShellServiceClient.parse_environment_path
)
common_billing_account_path = staticmethod(
CloudShellServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
CloudShellServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(CloudShellServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
CloudShellServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
CloudShellServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
CloudShellServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(CloudShellServiceClient.common_project_path)
parse_common_project_path = staticmethod(
CloudShellServiceClient.parse_common_project_path
)
common_location_path = staticmethod(CloudShellServiceClient.common_location_path)
parse_common_location_path = staticmethod(
CloudShellServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CloudShellServiceAsyncClient: The constructed client.
"""
return CloudShellServiceClient.from_service_account_info.__func__(CloudShellServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CloudShellServiceAsyncClient: The constructed client.
"""
return CloudShellServiceClient.from_service_account_file.__func__(CloudShellServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return CloudShellServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> CloudShellServiceTransport:
"""Returns the transport used by the client instance.
Returns:
CloudShellServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(CloudShellServiceClient).get_transport_class, type(CloudShellServiceClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, CloudShellServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the cloud shell service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.CloudShellServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = CloudShellServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def get_environment(
self,
request: Union[cloudshell.GetEnvironmentRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> cloudshell.Environment:
r"""Gets an environment. Returns NOT_FOUND if the environment does
not exist.
.. code-block:: python
from google.cloud import shell_v1
def sample_get_environment():
# Create a client
client = shell_v1.CloudShellServiceClient()
# Initialize request argument(s)
request = shell_v1.GetEnvironmentRequest(
name="name_value",
)
# Make the request
response = client.get_environment(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.shell_v1.types.GetEnvironmentRequest, dict]):
The request object. Request message for
[GetEnvironment][google.cloud.shell.v1.CloudShellService.GetEnvironment].
name (:class:`str`):
Required. Name of the requested resource, for example
``users/me/environments/default`` or
``users/someone@example.com/environments/default``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.shell_v1.types.Environment:
A Cloud Shell environment, which is
defined as the combination of a Docker
image specifying what is installed on
the environment and a home directory
containing the user's data that will
remain across sessions. Each user has at
least an environment with the ID
"default".
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = cloudshell.GetEnvironmentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_environment,
default_retry=retries.Retry(
initial=1.0,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable, core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def start_environment(
self,
request: Union[cloudshell.StartEnvironmentRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Starts an existing environment, allowing clients to
connect to it. The returned operation will contain an
instance of StartEnvironmentMetadata in its metadata
field. Users can wait for the environment to start by
polling this operation via GetOperation. Once the
environment has finished starting and is ready to accept
connections, the operation will contain a
StartEnvironmentResponse in its response field.
.. code-block:: python
from google.cloud import shell_v1
def sample_start_environment():
# Create a client
client = shell_v1.CloudShellServiceClient()
# Initialize request argument(s)
request = shell_v1.StartEnvironmentRequest(
)
# Make the request
operation = client.start_environment(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.shell_v1.types.StartEnvironmentRequest, dict]):
The request object. Request message for
[StartEnvironment][google.cloud.shell.v1.CloudShellService.StartEnvironment].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.shell_v1.types.StartEnvironmentResponse` Message included in the response field of operations returned from
[StartEnvironment][google.cloud.shell.v1.CloudShellService.StartEnvironment]
once the operation is complete.
"""
# Create or coerce a protobuf request object.
request = cloudshell.StartEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.start_environment,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloudshell.StartEnvironmentResponse,
metadata_type=cloudshell.StartEnvironmentMetadata,
)
# Done; return the response.
return response
async def authorize_environment(
self,
request: Union[cloudshell.AuthorizeEnvironmentRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Sends OAuth credentials to a running environment on
behalf of a user. When this completes, the environment
will be authorized to run various Google Cloud command
line tools without requiring the user to manually
authenticate.
.. code-block:: python
from google.cloud import shell_v1
def sample_authorize_environment():
# Create a client
client = shell_v1.CloudShellServiceClient()
# Initialize request argument(s)
request = shell_v1.AuthorizeEnvironmentRequest(
)
# Make the request
operation = client.authorize_environment(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.shell_v1.types.AuthorizeEnvironmentRequest, dict]):
The request object. Request message for
[AuthorizeEnvironment][google.cloud.shell.v1.CloudShellService.AuthorizeEnvironment].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.shell_v1.types.AuthorizeEnvironmentResponse` Response message for
[AuthorizeEnvironment][google.cloud.shell.v1.CloudShellService.AuthorizeEnvironment].
"""
# Create or coerce a protobuf request object.
request = cloudshell.AuthorizeEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.authorize_environment,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloudshell.AuthorizeEnvironmentResponse,
metadata_type=cloudshell.AuthorizeEnvironmentMetadata,
)
# Done; return the response.
return response
async def add_public_key(
self,
request: Union[cloudshell.AddPublicKeyRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Adds a public SSH key to an environment, allowing clients with
the corresponding private key to connect to that environment via
SSH. If a key with the same content already exists, this will
error with ALREADY_EXISTS.
.. code-block:: python
from google.cloud import shell_v1
def sample_add_public_key():
# Create a client
client = shell_v1.CloudShellServiceClient()
# Initialize request argument(s)
request = shell_v1.AddPublicKeyRequest(
)
# Make the request
operation = client.add_public_key(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.shell_v1.types.AddPublicKeyRequest, dict]):
The request object. Request message for
[AddPublicKey][google.cloud.shell.v1.CloudShellService.AddPublicKey].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.shell_v1.types.AddPublicKeyResponse` Response message for
[AddPublicKey][google.cloud.shell.v1.CloudShellService.AddPublicKey].
"""
# Create or coerce a protobuf request object.
request = cloudshell.AddPublicKeyRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.add_public_key,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("environment", request.environment),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloudshell.AddPublicKeyResponse,
metadata_type=cloudshell.AddPublicKeyMetadata,
)
# Done; return the response.
return response
async def remove_public_key(
self,
request: Union[cloudshell.RemovePublicKeyRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Removes a public SSH key from an environment. Clients will no
longer be able to connect to the environment using the
corresponding private key. If a key with the same content is not
present, this will error with NOT_FOUND.
.. code-block:: python
from google.cloud import shell_v1
def sample_remove_public_key():
# Create a client
client = shell_v1.CloudShellServiceClient()
# Initialize request argument(s)
request = shell_v1.RemovePublicKeyRequest(
)
# Make the request
operation = client.remove_public_key(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.shell_v1.types.RemovePublicKeyRequest, dict]):
The request object. Request message for
[RemovePublicKey][google.cloud.shell.v1.CloudShellService.RemovePublicKey].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.shell_v1.types.RemovePublicKeyResponse` Response message for
[RemovePublicKey][google.cloud.shell.v1.CloudShellService.RemovePublicKey].
"""
# Create or coerce a protobuf request object.
request = cloudshell.RemovePublicKeyRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.remove_public_key,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("environment", request.environment),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
cloudshell.RemovePublicKeyResponse,
metadata_type=cloudshell.RemovePublicKeyMetadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-shell",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("CloudShellServiceAsyncClient",)
|
|
from pykechain.utils import __dict__inherited__
class Enum:
"""Custom enumeration class to support class attributes as options.
Example
-------
>>> class Toppings(Enum):
... CHEESE = "Cheese"
... SALAMI = "Salami"
>>> topping_choice = Toppings.CHEESE
"""
@classmethod
def options(cls):
"""Provide a sorted list of options."""
return sorted(
(value, name) for (name, value) in __dict__inherited__(cls=cls, stop=Enum).items()
)
@classmethod
def values(cls):
"""Provide a (sorted) list of values."""
return [value for (value, name) in cls.options()]
class Multiplicity(Enum):
"""The various multiplicities that are accepted by KE-chain.
For more information on the representation in KE-chain, please consult the KE-chain `Part documentation`_.
:cvar ZERO_ONE: Multiplicity 0 to 1
:cvar ONE: Multiplicity 1
:cvar ZERO_MANY: Multiplicity 0 to infinity
:cvar ONE_MANY: Multiplicity 1 to infinity
"""
ZERO_ONE = "ZERO_ONE"
ONE = "ONE"
ZERO_MANY = "ZERO_MANY"
ONE_MANY = "ONE_MANY"
# M_N = "M_N" # not implemented
class Category(Enum):
"""The various categories of Parts that are accepted by KE-chain.
For more information on the representation in KE-chain, please consult the KE-chain `Part documentation`_.
:cvar INSTANCE: Category of Instance
:cvar MODEL: Category of Model
"""
INSTANCE = "INSTANCE"
MODEL = "MODEL"
class Classification(Enum):
"""The various classification of Parts that are accepted by KE-chain.
For more information on the representation in KE-chain, please consult the KE-chain `Part documentation`_.
:cvar PRODUCT: Classification of the part object is Product
:cvar CATALOG: Classification of the part object is a CATALOG
.. _Part documentation: https://support.ke-chain.com/confluence/dosearchsite.action?queryString=concept+part
"""
PRODUCT = "PRODUCT"
CATALOG = "CATALOG"
class PropertyType(Enum):
"""The various property types that are accepted by KE-chain.
For more information on the representation in KE-chain, please consult the KE-chain `Property documentation`_.
:cvar CHAR_VALUE: a charfield property (single line text)
:cvar TEXT_VALUE: text property (long text, may span multiple lines)
:cvar BOOLEAN_VALUE: a boolean value property (True/False)
:cvar INT_VALUE: integer property (whole number)
:cvar FLOAT_VALUE: floating point number property (with digits)
:cvar DATETIME_VALUE: a datetime value property
:cvar ATTACHMENT_VALUE: an attachment property
:cvar LINK_VALUE: url property
:cvar REFERENCE_VALUE: a reference property, a UUID value referring to other part model
.. versionadded:: 1.14
:cvar SINGLE_SELECT_VALUE: single select list property (choose from a list)
:cvar REFERENCES_VALUE: a multi reference property, a list of UUID values referring to other part models
.. versionadded:: 3.6
:cvar ACTIVITY_REFERENCES_VALUE: Activity References Property
:cvar SCOPE_REFERENCES_VALUE: Scope References Property
:cvar SERVICE_REFERENCES_VALUE: Service Referenes Property
:cvar TEAM_REFERENCES_VALUE: Team References Property
:cvar USER_REFERENCES_VALUE: User References Property
:cvar JSON_VALUE: Generic JSON storage Property
:cvar GEOJSON_VALUE: GEOJSON property to store map data
:cvar WEATHER_VALUE: Weather JSON property compatible with the response of weatherapi.com
.. _Property documentation: https://support.ke-chain.com/confluence/dosearchsite.action?queryString=concept+property
"""
CHAR_VALUE = "CHAR_VALUE"
TEXT_VALUE = "TEXT_VALUE"
BOOLEAN_VALUE = "BOOLEAN_VALUE"
INT_VALUE = "INT_VALUE"
FLOAT_VALUE = "FLOAT_VALUE"
DATETIME_VALUE = "DATETIME_VALUE"
DATE_VALUE = "DATE_VALUE"
TIME_VALUE = "TIME_VALUE"
ATTACHMENT_VALUE = "ATTACHMENT_VALUE"
LINK_VALUE = "LINK_VALUE"
SINGLE_SELECT_VALUE = "SINGLE_SELECT_VALUE"
MULTI_SELECT_VALUE = "MULTI_SELECT_VALUE"
REFERENCE_VALUE = "REFERENCE_VALUE"
REFERENCES_VALUE = "REFERENCES_VALUE"
ACTIVITY_REFERENCES_VALUE = "ACTIVITY_REFERENCES_VALUE"
SCOPE_REFERENCES_VALUE = "SCOPE_REFERENCES_VALUE"
SERVICE_REFERENCES_VALUE = "SERVICE_REFERENCES_VALUE"
TEAM_REFERENCES_VALUE = "TEAM_REFERENCES_VALUE"
USER_REFERENCES_VALUE = "USER_REFERENCES_VALUE"
JSON_VALUE = "JSON_VALUE"
GEOJSON_VALUE = "GEOJSON_VALUE"
WEATHER_VALUE = "WEATHER_VALUE"
class ActivityType(Enum):
"""The various Activity types that are accepted by KE-chain.
.. versionadded:: 2.0
:cvar TASK: a normal task
:cvar PROCESS: a subprocess (container) containing other tasks
"""
PROCESS = "PROCESS"
TASK = "TASK"
class ActivityClassification(Enum):
"""The classification of Activities that are accepted by KE-chain.
.. versionadded:: 2.0
.. versionchanged:: 3.2
Add 'APP' environment for KE-chain versions > 3.1
.. versionchanged:: 3.14
Add 'FORM' envornment for KE-chain versions > v2021.10
:cvar WORKFLOW: Classification of the activity is WORKFLOW
:cvar CATALOG: Classification of the activity is CATALOG
:cvar APP: Classification of the activity is APP
:cvar FORM: Classification of the activity is FORM
"""
WORKFLOW = "WORKFLOW"
CATALOG = "CATALOG"
APP = "APP"
FORM = "FORM"
class ActivityRootNames(Enum):
"""The classification of Activities that are accepted by KE-chain.
.. versionadded:: 2.0
.. versionchanged:: 3.2
Add 'APP' environment for KE-chain versions > 3.1
.. versionchanged:: 3.14
Add 'FORM' environment for KE-chain versions >= v2021.10
:cvar WORKFLOW_ROOT: Root of the activity is WORKFLOW_ROOT
:cvar CATALOG_ROOT: Root of the activity is CATALOG_ROOT (below are CATALOG tasks)
:cvar APP_ROOT: Root of the activity is APP_ROOT (below are APP 'tasks' ie. 'screems')
"""
WORKFLOW_ROOT = "WORKFLOW_ROOT"
CATALOG_ROOT = "CATALOG_ROOT"
APP_ROOT = "APP_ROOT"
FORM_ROOT = "FORM_ROOT"
activity_root_name_by_classification = {
ActivityClassification.WORKFLOW: ActivityRootNames.WORKFLOW_ROOT,
ActivityClassification.CATALOG: ActivityRootNames.CATALOG_ROOT,
ActivityClassification.APP: ActivityRootNames.APP_ROOT,
ActivityClassification.FORM: ActivityRootNames.FORM_ROOT,
}
class WidgetNames(Enum):
"""The various Names of the Widget that can be configured.
.. versionchanged:: 3.14
Added FORMMETAPANEL for KE-chain versions >= v2021.10
:cvar SUPERGRIDWIDGET: superGridWidget
:cvar PROPERTYGRIDWIDGET: propertyGridWidget
:cvar HTMLWIDGET: htmlWidget
:cvar FILTEREDGRIDWIDGET: filteredGridWidget
:cvar SERVICEWIDGET: serviceWidget
:cvar NOTEBOOKWIDGET: notebookWidget
:cvar ATTACHMENTVIEWERWIDGET: attachmentViewerWidget
:cvar TASKNAVIGATIONBARWIDGET: taskNavigationBarWidget
:cvar JSONWIDGET: jsonWidget
# KE-chain 3 only
:cvar SIGNATUREWIDGET: signatureWidget
:cvar CARDWIDGET: cardWidget
:cvar METAPANELWIDGET: metaPanelWidget
:cvar FORMMETAPANEL: formMetaPanelWidget
:cvar MULTICOLUMNWIDGET: multiColumnWidget
:cvar PROGRESSWIDGET: progressWidget
:cvar TASKSWIDGET: tasksWidget
:cvar SERVICECARDWIDGET: serviceCardWidget
:cvar DASHBOARDWIDGET: 'dashboardWidget'
:cvar SCOPEMEMBERS: 'scopeMembersWidget'
"""
SUPERGRIDWIDGET = "superGridWidget"
PROPERTYGRIDWIDGET = "propertyGridWidget"
HTMLWIDGET = "htmlWidget"
FILTEREDGRIDWIDGET = "filteredGridWidget"
SERVICEWIDGET = "serviceWidget"
NOTEBOOKWIDGET = "notebookWidget"
ATTACHMENTVIEWERWIDGET = "attachmentViewerWidget"
TASKNAVIGATIONBARWIDGET = "taskNavigationBarWidget"
JSONWIDGET = "jsonWidget"
METAPANELWIDGET = "metaPanelWidget"
FORMMETAPANELWIDGET = "formMetaPanelWidget"
MULTICOLUMNWIDGET = "multiColumnWidget"
SIGNATUREWIDGET = "signatureWidget"
CARDWIDGET = "cardWidget"
PROGRESSWIDGET = "progressWidget"
TASKSWIDGET = "taskWidget"
SERVICECARDWIDGET = "serviceCardWidget"
DASHBOARDWIDGET = "dashboardWidget"
SCOPEMEMBERS = "scopeMembersWidget"
class WidgetTypes(Enum):
"""The various widget types for the widget definitions available to the widget api.
.. versionchanged:: 3.14
Added FORMMETAPANEL for KE-chain versions >= v2021.10
:cvar UNDEFINED: Undefined Widget
:cvar PROPERTYGRID: Propertygrid widget
:cvar SUPERGRID: Supergrid widget
:cvar HTML: Html widget
:cvar FILTEREDGRID: Filteredgrid widget
:cvar SERVICE: Service widget
:cvar NOTEBOOK: Notebook widget
:cvar ATTACHMENTVIEWER: Attachmentviewer widget
:cvar TASKNAVIGATIONBAR: Tasknavigationbar widget
:cvar JSON: Json widget
:cvar METAPANEL: Metapanel widget
:cvar FORMMETAPANEL: The FormMetapanel widget
:cvar MULTICOLUMN: Multicolumn widget
:cvar SCOPE: Scope widget
:cvar THIRDPARTY: Thirdparty widget
:cvar PROGRESS: Progress widget
:cvar SIGNATURE: Signature widget
:cvar CARD: Card widget
:cvar TASKS: Tasks widget
:cvar WEATHER: Weather widget
:cvar SERVICECARD: Servicecard widget
:cvar DASHBOARD: Dashboard widget
:cvar SCOPEMEMBERS: Scopemembers widget
"""
UNDEFINED = "UNDEFINED"
PROPERTYGRID = "PROPERTYGRID"
SUPERGRID = "SUPERGRID"
HTML = "HTML"
FILTEREDGRID = "FILTEREDGRID"
SERVICE = "SERVICE"
NOTEBOOK = "NOTEBOOK"
ATTACHMENTVIEWER = "ATTACHMENTVIEWER"
TASKNAVIGATIONBAR = "TASKNAVIGATIONBAR"
JSON = "JSON"
METAPANEL = "METAPANEL"
FORMMETAPANEL = "FORMMETAPANEL"
MULTICOLUMN = "MULTICOLUMN"
SCOPE = "SCOPE"
THIRDPARTY = "THIRDPARTY"
PROGRESS = "PROGRESS"
SIGNATURE = "SIGNATURE"
CARD = "CARD"
TASKS = "TASKS"
WEATHER = "WEATHER"
SERVICECARD = "SERVICECARD"
DASHBOARD = "DASHBOARD"
SCOPEMEMBERS = "SCOPEMEMBERS"
WidgetCompatibleTypes = {
WidgetNames.SUPERGRIDWIDGET: WidgetTypes.SUPERGRID,
WidgetNames.PROPERTYGRIDWIDGET: WidgetTypes.PROPERTYGRID,
WidgetNames.HTMLWIDGET: WidgetTypes.HTML,
WidgetNames.FILTEREDGRIDWIDGET: WidgetTypes.FILTEREDGRID,
WidgetNames.SERVICEWIDGET: WidgetTypes.SERVICE,
WidgetNames.NOTEBOOKWIDGET: WidgetTypes.NOTEBOOK,
WidgetNames.ATTACHMENTVIEWERWIDGET: WidgetTypes.ATTACHMENTVIEWER,
WidgetNames.TASKNAVIGATIONBARWIDGET: WidgetTypes.TASKNAVIGATIONBAR,
WidgetNames.JSONWIDGET: WidgetTypes.JSON,
WidgetNames.METAPANELWIDGET: WidgetTypes.METAPANEL,
WidgetNames.FORMMETAPANELWIDGET: WidgetTypes.FORMMETAPANEL,
WidgetNames.MULTICOLUMNWIDGET: WidgetTypes.MULTICOLUMN,
WidgetNames.PROGRESSWIDGET: WidgetTypes.PROGRESS,
WidgetNames.SIGNATUREWIDGET: WidgetTypes.SIGNATURE,
WidgetNames.CARDWIDGET: WidgetTypes.CARD,
WidgetNames.TASKSWIDGET: WidgetTypes.TASKS,
WidgetNames.SERVICECARDWIDGET: WidgetTypes.SERVICECARD,
WidgetNames.DASHBOARDWIDGET: WidgetTypes.DASHBOARD,
WidgetNames.SCOPEMEMBERS: WidgetTypes.SCOPEMEMBERS,
}
default_metapanel_widget = dict(
name=WidgetNames.METAPANELWIDGET,
config=dict(),
meta=dict(
showAll=True,
),
)
class ActivityStatus(Enum):
"""The various Activity statuses that are accepted by KE-chain.
:cvar OPEN: status of activity is open
:cvar COMPLETED: status of activity is completed
"""
OPEN = "OPEN"
COMPLETED = "COMPLETED"
class ScopeStatus(Enum):
"""The various status of a scope.
.. versionchanged:: 3.0
The `TEMPLATE` ScopeStatus is deprecated in KE-chain 3
:cvar ACTIVE: Status of a scope is active (default)
:cvar CLOSED: Status of a scope is closed
:cvar TEMPLATE: Status of a scope is a template (not actively used)(deprecated in KE-chain 3.0)
:cvar DELETING: Status of a scope when the scope is being deleted
"""
ACTIVE = "ACTIVE"
CLOSED = "CLOSED"
TEMPLATE = "TEMPLATE"
DELETING = "DELETING"
class ScopeCategory(Enum):
"""The various categories of a scope.
.. versionadded::3.0
:cvar LIBRARY_SCOPE: The scope is a library scope
:cvar USER_SCOPE: The scope is a normal user scope
:cvar TEMPLATE_SCOPE: The scope is a template scope
"""
LIBRARY_SCOPE = "LIBRARY_SCOPE"
USER_SCOPE = "USER_SCOPE"
TEMPLATE_SCOPE = "TEMPLATE_SCOPE"
class ServiceType(Enum):
"""The file types of sim script.
:cvar PYTHON_SCRIPT: service is a python script
:cvar NOTEBOOK: service is a jupyter notebook
"""
PYTHON_SCRIPT = "PYTHON SCRIPT"
NOTEBOOK = "NOTEBOOK"
class ServiceEnvironmentVersion(Enum):
"""The acceptable versions of python where services run on.
:cvar PYTHON_3_6: Service execution environment is a python 3.6 container
:cvar PYTHON_3_7: Service execution environment is a python 3.7 container
:cvar PYTHON_3_8: Service execution environment is a python 3.8 container
:cvar PYTHON_3_6_NOTEBOOKS: execution environment is a python 3.6 container with jupyter notebook preinstalled
:cvar PYTHON_3_8_NOTEBOOKS: execution environment is a python 3.8 container with jupyter notebook preinstalled
"""
PYTHON_3_6 = "3.6"
PYTHON_3_7 = "3.7"
PYTHON_3_8 = "3.8"
PYTHON_3_9 = "3.9"
PYTHON_3_6_NOTEBOOKS = "3.6_notebook"
PYTHON_3_8_NOTEBOOKS = "3.8_notebook"
PYTHON_3_9_NOTEBOOKS = "3.9_notebook"
class ServiceScriptUser(Enum):
"""The acceptable usertypes under which a (trusted) service is run.
:cvar KENODE_USER: Run as "kenode" user. Equivalent to a manager in a scope.
:cvar TEAMMANAGER_USER: Run as "kenode_team". Equivalent to a manager in a team. (disabled until available)
:cvar CONFIGURATOR_USER: Run as "kenode_configurator". Equivalent to GG:Configurator.
"""
KENODE_USER = "kenode"
# TEAMMANAGER_USER = "kenode_team"
CONFIGURATOR_USER = "kenode_configurator"
class ServiceExecutionStatus(Enum):
"""The acceptable states of a running service.
:cvar LOADING: Execution is in LOADING state (next RUNNING, FAILED)
:cvar RUNNING: Execution is in RUNNING state (next COMPLETED, FAILED, TERMINATING)
:cvar COMPLETED: Execution is in COMPLETED state
:cvar FAILED: Execution is in FAILED state
:cvar TERMINATING: Execution is in TERMINATING state (next TERMINATED)
:cvar TERMINATED: Execution is in TERMINATED state
"""
LOADING = "LOADING"
RUNNING = "RUNNING"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
TERMINATING = "TERMINATING"
TERMINATED = "TERMINATED"
class TeamRoles(Enum):
"""Roles that exist for a team member.
:cvar MEMBER: A normal team member
:cvar MANAGER: A team member that may manage the team (add or remove members, change team)
:cvar OWNER: The owner of a team
"""
MEMBER = "MEMBER"
MANAGER = "MANAGER"
OWNER = "OWNER"
class ScopeRoles(Enum):
"""
Roles that exist for a member of a scope.
:cvar MANAGER: owner of the scope, has full rights
:cvar SUPERVISOR: supervisor member of a scope, has the rights as leadmember and rights to manage catalog tasks.
:cvar LEADMEMBER: elevated member, has assignment rights, no rights on App tasks or Catalog tasks.
:cvar MEMBER: normal member, only has viewing rights
"""
MANAGER = "manager"
SUPERVISOR = "supervisor"
LEADMEMBER = "leadmember"
MEMBER = "member"
class ScopeMemberActions(Enum):
"""
Actions to be performed on the members of a scope.
:cvar ADD: add a member to the scope
:cvar REMOVE: delete a member from the scope
"""
ADD = "add"
REMOVE = "remove"
class ContextType(Enum):
"""Types of Contexts.
:cvar STATIC_LOCATION: Geolocation / Featurecollection context with a geolocation.
:cvar TIME_PERIOD: Time Period Context with start_date and due_date
:cvar TEXT_LABEL: generic textual label
"""
STATIC_LOCATION = "STATIC_LOCATION"
TIME_PERIOD = "TIME_PERIOD"
TEXT_LABEL = "TEXT_LABEL"
class ContextGroup(Enum):
"""
Context may have a context_group.
..versionadded: 3.11
This is for context API versions 1.2.0 or later.
:cvar UNDEFINED: UNDEFINED
:cvar DISCIPLINE: Discipline, in nl: Discipline
:cvar ASSET: Asset, in nl: Object, Kunstwerk
:cvar DEPARTMENT: Department, in nl: Onderdeel, Afdeling
:cvar PERIOD: Workperiod, in nl: Werkperiode
:cvar LOCATION: Location, in nl: Locatie
:cvar PHASE: Phase, in nl: Fase
:cvar REQUIREMENT: Requirement, in nl: Eis
:cvar EXTERNALID: External identifier, to be used to provide a generic link to an external application
:cvar WORKPACKAGE: Workpackage, in nl: Werkpakket
"""
UNDEFINED = "UNDEFINED"
DISCIPLINE = "DISCIPLINE" # nl: Discipline
ASSET = "ASSET" # nl: Object, Kunstwerk
DEPARTMENT = "DEPARTMENT" # nl: Onderdeel, Afdeling
PERIOD = "WORKPERIOD" # nl: Werkperiode
LOCATION = "LOCATION" # nl: Locatie
PHASE = "PHASE" # nl: Fase
REQUIREMENT = "REQUIREMENT" # nl: Eis
EXTERNALID = "EXTERNALID"
WORKPACKAGE = "WORKPACKAGE" # nl: Werkpakket
class KechainEnv(Enum):
"""Environment variables that can be set for pykechain.
:cvar KECHAIN_URL: full url of KE-chain where to connect to eg: 'https://<some>.ke-chain.com'
:cvar KECHAIN_TOKEN: authentication token for the KE-chain user provided from KE-chain user account control
:cvar KECHAIN_USERNAME: the username for the credentials
:cvar KECHAIN_PASSWORD: the password for the credentials
:cvar KECHAIN_SCOPE: the name of the project / scope. Should be unique, otherwise use scope_id
:cvar KECHAIN_SCOPE_ID: the UUID of the project / scope.
:cvar KECHAIN_FORCE_ENV_USE: set to 'true', '1', 'ok', or 'yes' to always use the environment variables.
:cvar KECHAIN_SCOPE_STATUS: the status of the Scope to retrieve, defaults to None to retrieve all scopes
:cvar KECHAIN_CHECK_CERTIFICATES: if the certificates of the URL should be checked.
"""
KECHAIN_FORCE_ENV_USE = "KECHAIN_FORCE_ENV_USE"
KECHAIN_URL = "KECHAIN_URL"
KECHAIN_TOKEN = "KECHAIN_TOKEN"
KECHAIN_USERNAME = "KECHAIN_USERNAME"
KECHAIN_PASSWORD = "KECHAIN_PASSWORD"
KECHAIN_SCOPE = "KECHAIN_SCOPE"
KECHAIN_SCOPE_ID = "KECHAIN_SCOPE_ID"
KECHAIN_SCOPE_STATUS = "KECHAIN_SCOPE_STATUS"
KECHAIN_CHECK_CERTIFICATES = "KECHAIN_CHECK_CERTIFICATES"
class SortTable(Enum):
"""The acceptable sorting options for a grid/table.
:cvar ASCENDING: Table is sorted in ASCENDING ORDER
:cvar DESCENDING: Table is sorted in DESCENDING ORDER
"""
ASCENDING = "ASC"
DESCENDING = "DESC"
class Alignment(Enum):
"""The acceptable alignment options for attachment viewer, navigation bar widgets and service widgets.
:cvar LEFT: Aligned to the left
:cvar CENTER: Aligned to the center
:cvar RIGHT: Aligned to the right
"""
LEFT = "left"
CENTER = "center"
RIGHT = "right"
class NavigationBarAlignment(Alignment):
"""The acceptable alignment options for a Navigation Bar Widget."""
pass
class PaperSize(Enum):
"""The acceptable paper sizes options for a downloaded PDF.
:cvar A0: Paper of size A0
:cvar A1: Paper of size A1
:cvar A2: Paper of size A2
:cvar A3: Paper of size A3
:cvar A4: Paper of size A4
"""
A0 = "a0paper"
A1 = "a1paper"
A2 = "a2paper"
A3 = "a3paper"
A4 = "a4paper"
AUTO = "automatic"
class PaperOrientation(Enum):
"""The acceptable paper orientation options for a downloaded PDF.
:cvar PORTRAIT: Paper of orientation 'portrait'
:cvar LANDSCAPE: Paper of orientation 'landscape'
"""
PORTRAIT = "portrait"
LANDSCAPE = "landscape"
class PropertyVTypes(Enum):
"""The VTypes (or validator types) that are allowed in the json.
This corresponds to the various validator classes which SHOULD be named:
`vtype[0].upper() + vtype[1:]`
eg: 'numbericRangeValidator' has an implementation class of 'NumericRangeValidator'
.. versionadded:: 2.2
:cvar NONEVALIDATOR: noneValidator - No validation is done
:cvar NUMERICRANGE: numericRangeValidator
:cvar BOOLEANFIELD: booleanFieldValidator
:cvar REQUIREDFIELD: requiredFieldValidator
:cvar EVENNUMBER: evenNumberValidator
:cvar ODDNUMBER: oddNumberValidator
:cvar REGEXSTRING: regexStringValidator
:cvar SINGLEREFERENCE: 'singleReferenceValidator'
:cvar FILEEXTENSION: 'fileExtensionValidator'
:cvar FILESIZE: 'fileSizeValidator'
"""
NONEVALIDATOR = "noneValidator"
NUMERICRANGE = "numericRangeValidator"
BOOLEANFIELD = "booleanFieldValidator"
REQUIREDFIELD = "requiredFieldValidator"
EVENNUMBER = "evenNumberValidator"
ODDNUMBER = "oddNumberValidator"
REGEXSTRING = "regexStringValidator"
SINGLEREFERENCE = "singleReferenceValidator"
FILEEXTENSION = "fileExtensionValidator"
FILESIZE = "fileSizeValidator"
# fallback
ALWAYSALLOW = "alwaysAllowValidator"
class ValidatorEffectTypes(Enum):
"""The effects that can be attached to a validator.
.. versionadded:: 2.2
:cvar NONE_EFFECT: noneEffect
:cvar VISUALEFFECT: visualEffect
:cvar TEXT_EFFECT: textEffect
:cvar ERRORTEXT_EFFECT: errorTextEffect
:cvar HELPTEXT_EFFECT: helpTextEffect
"""
NONE_EFFECT = "noneEffect"
VISUALEFFECT = "visualEffect"
TEXT_EFFECT = "textEffect"
ERRORTEXT_EFFECT = "errorTextEffect"
HELPTEXT_EFFECT = "helpTextEffect"
class PropertyRepresentation(Enum):
"""
The Representation configuration to display a property value.
.. versionadded:: 3.0
.. versionchanged:: 3.11 added geocoordinate in line with KE-chain v2021.5.0
:cvar DECIMAL_PLACES: Amount of decimal places to show the number
:cvar SIGNIFICANT_DIGITS: Number (count) of significant digits to display the number
:cvar LINK_TARGET: configuration of a link to open the link in a new browsertab or not.
:cvar BUTTON: options to represent the choices of a select-list
:cvar THOUSANDS_SEPARATOR: option to display the thousand separator
:cvar AUTOFILL: option to autofill the content of the property
:cvar GEOCOORDINATE: option to display an alternative representation for the geocoordinate
:cvar USE_PROPERTY_NAME: option to display the name of a property for a part actvity ref prop
"""
DECIMAL_PLACES = "decimalPlaces"
SIGNIFICANT_DIGITS = "significantDigits"
LINK_TARGET = "linkTarget"
BUTTON = "buttonRepresentation"
THOUSANDS_SEPARATOR = "thousandsSeparator"
AUTOFILL = "autofill"
GEOCOORDINATE = "geoCoordinate"
USE_PROPERTY_NAME = "usePropertyName"
CAMERA_SCANNER_INPUT = "cameraScannerInput"
class GeoCoordinateConfig(Enum):
"""GeoCoordinate Configuration Enumerations.
:cvar APPROX_ADDRESS: represent the coordinate as approximate address (lookup by Google)
:cvar RD_AMERSFOORT: represent the coordinate as Amersfoort / RN New (epsg: 28992)
:cvar DD: represent the coordinate as Decimal Degrees (WGS84, epsg:4326)
:cvar DMS: represent the coordinate as as Degrees Minutes Seconds (WGS84, epsg:4326)
"""
APPROX_ADDRESS = "approx_address" # As approximated address
# Amersfoort/RD (epsg: 28992) https://www.spatialreference.org/ref/epsg/amersfoort-rd-new/
RD_AMERSFOORT = "rd_amersfoort"
DD = ( # As WSG84 (epsg:4326) decimal degrees representation first lat (-90,+90) then lng (-180,+180)
"dd"
)
DMS = ( # As WSG84 (epsg:4326) degrees, minutes, seconds representation first lat N/S then lng E/W
"dms"
)
class OtherRepresentations(Enum):
"""
Other representations used in KE-chain.
:cvar CUSTOM_ICON: different font-awesome icons
"""
CUSTOM_ICON = "customIcon"
class _AllRepresentations(PropertyRepresentation, OtherRepresentations):
pass
class ShowColumnTypes(Enum):
"""The columns that can be shown in a Property grid.
.. versionadded:: 2.3
:cvar UNIT: unit
:cvar DESCRIPTION: description
"""
UNIT = "unit"
DESCRIPTION = "description"
class ScopeWidgetColumnTypes(Enum):
"""The columns that can be shown in a Scope widget grid.
.. versionadded:: 3.0
:cvar PROJECT_NAME: Name
:cvar START_DATE: Start date
:cvar DUE_DATE: Due date
:cvar PROGRESS: Progress
:cvar STATUS: Status
:cvar TAGS: Tags
"""
PROJECT_NAME = "Name"
START_DATE = "Start date"
DUE_DATE = "Due date"
PROGRESS = "Progress"
STATUS = "Status"
TAGS = "Tags"
class FilterType(Enum):
"""The type of pre-filters that can be set on a Multi Reference Property.
.. versionadded:: 3.0
:cvar GREATER_THAN_EQUAL: 'gte'
:cvar LOWER_THAN_EQUAL: 'lte'
:cvar CONTAINS: 'icontains'
:cvar EXACT: 'exact'
"""
GREATER_THAN_EQUAL = "gte"
LOWER_THAN_EQUAL = "lte"
CONTAINS = "icontains"
CONTAINS_SET = "contains"
EXACT = "exact"
class ProgressBarColors(Enum):
"""
Some basic colors that can be set on a Progress Bar inside a Progress Bar Widget.
.. versionadded:: 3.0
:cvar BLACK: '#000000'
:cvar WHITE: '#FFFFFF'
:cvar RED: 'FF0000'
:cvar LIME: '#00FF00'
:cvar BLUE: '#0000FF'
:cvar YELLOW: '#FFFF00'
:cvar CYAN: '#00FFFF'
:cvar MAGENTA: '#FF00FF'
:cvar SILVER: '#C0C0C0'
:cvar GRAY: '#808080'
:cvar MAROON: '#800000'
:cvar OLIVE: '#808000'
:cvar GREEN: '#008000'
:cvar PURPLE: '#800080'
:cvar TEAL: '#008080'
:cvar NAVY: '#000080'
:cvar DEFAULT_COMPLETED: '#339447'
:cvar DEFAULT_IN_PROGRESS: '#FF6600'
:cvar DEFAULT_NO_PROGRESS: '#EEEEEE'
:cvar DEFAULT_IN_PROGRESS_BACKGROUND: '#FC7C3D'
"""
BLACK = "#000000"
WHITE = "#FFFFFF"
RED = "#FF0000"
LIME = "#00FF00"
BLUE = "#0000FF"
YELLOW = "#FFFF00"
CYAN = "#00FFFF"
MAGENTA = "#FF00FF"
SILVER = "#C0C0C0"
GRAY = "#808080"
MAROON = "#800000"
OLIVE = "#808000"
GREEN = "#008000"
PURPLE = "#800080"
TEAL = "#008080"
NAVY = "#000080"
DEFAULT_COMPLETED = "#339447"
DEFAULT_IN_PROGRESS = "#FF6600"
DEFAULT_NO_PROGRESS = "#EEEEEE"
DEFAULT_IN_PROGRESS_BACKGROUND = "#FC7C3D"
class LinkTargets(Enum):
"""
Target for the CardWidget link and Link property representations.
.. versionadded:: 3.0
:cvar SAME_TAB: "_self"
:cvar NEW_TAB: "_blank"
"""
SAME_TAB = "_self"
NEW_TAB = "_blank"
class CardWidgetLinkTarget(LinkTargets):
"""Target for the CardWidget, remaining for backwards compatibility."""
pass
class CardWidgetLinkValue(Enum):
"""
Link Value for the CardWidget.
.. versionadded:: 3.0
:cvar EXTERNAL_LINK: "External link"
:cvar TASK_LINK: "Task link"
:cvar NO_LINK: "No link"
"""
EXTERNAL_LINK = "External link"
TASK_LINK = "Task link"
TREE_VIEW = "Tree view"
NO_LINK = "No link"
class CardWidgetImageValue(Enum):
"""
Image for the CardWidget.
.. versionadded:: 3.0
:cvar CUSTOM_IMAGE: "Custom image"
:cvar NO_IMAGE: "No image"
"""
CUSTOM_IMAGE = "Custom image"
NO_IMAGE = "No image"
class KEChainPages(Enum):
"""
URL names of built-in KE-chain pages.
:cvar DETAIL: "detail"
:cvar TASKS: "activities"
:cvar WORK_BREAKDOWN: "activitytree"
:cvar DATA_MODEL: "productmodel"
:cvar EXPLORER: "product"
:cvar SERVICES: "scripts"
"""
DETAIL = "detail"
TASKS = "activities"
WORK_BREAKDOWN = "activitytree"
DATA_MODEL = "productmodel"
EXPLORER = "product"
SERVICES = "scripts"
CATALOG_WBS = "catalogtree"
APP_WBS = "apptree"
KEChainPageLabels = {
KEChainPages.DETAIL: "Project details",
KEChainPages.TASKS: "Tasks",
KEChainPages.WORK_BREAKDOWN: "Work Breakdown",
KEChainPages.CATALOG_WBS: "Catalog",
KEChainPages.APP_WBS: "App Screens",
KEChainPages.DATA_MODEL: "Data model",
KEChainPages.EXPLORER: "Explorer",
KEChainPages.SERVICES: "Scripts",
}
KEChainPageLabels_nl = {
KEChainPages.DETAIL: "Project details",
KEChainPages.TASKS: "Taken",
KEChainPages.WORK_BREAKDOWN: "Taakverdeling",
KEChainPages.CATALOG_WBS: "Catalogus",
KEChainPages.APP_WBS: "App schermen",
KEChainPages.DATA_MODEL: "Data model",
KEChainPages.EXPLORER: "Explorer",
KEChainPages.SERVICES: "Scripts",
}
CardWidgetKEChainPageLink = {
KEChainPages.DETAIL: "Project",
KEChainPages.TASKS: "Tasks",
KEChainPages.DATA_MODEL: "Model",
KEChainPages.EXPLORER: "Explorer",
KEChainPages.SERVICES: "Script",
KEChainPages.WORK_BREAKDOWN: "Work Breakdown",
KEChainPages.CATALOG_WBS: "Catalog Tasks",
KEChainPages.APP_WBS: "App Tasks",
}
KEChainPageIcons = {
KEChainPages.DETAIL: "bookmark",
KEChainPages.TASKS: "edit",
KEChainPages.WORK_BREAKDOWN: "sitemap",
KEChainPages.CATALOG_WBS: "books",
KEChainPages.APP_WBS: "tablet-alt",
KEChainPages.DATA_MODEL: "cube",
KEChainPages.EXPLORER: "folder",
KEChainPages.SERVICES: "file-code",
}
class SubprocessDisplayMode(Enum):
"""
URL variations to vary the display of a subprocess activity.
:cvar ACTIVITIES: "activities"
:cvar TREEVIEW: "treeview"
"""
ACTIVITIES = "activities"
TREEVIEW = "treeview"
class URITarget(Enum):
"""
Side-bar button redirect options.
:cvar INTERNAL: "internal"
:cvar EXTERNAL: "external"
"""
INTERNAL = "internal"
EXTERNAL = "external"
class FontAwesomeMode(Enum):
"""
Options to display the same icon.
Source:
https://fontawesome.com/how-to-use/on-the-web/setup/getting-started
:cvar SOLID: "solid"
:cvar REGULAR: "regular"
:cvar LIGHT: "light"
"""
SOLID = "solid"
REGULAR = "regular"
LIGHT = "light"
class SelectListRepresentations(Enum):
"""
Options in which a single-select list property options are displayed.
:cvar DROP_DOWN: "dropdown"
:cvar CHECK_BOXES: "checkboxes"
:cvar BUTTONS: "buttons"
"""
DROP_DOWN = "dropdown"
CHECK_BOXES = "checkboxes"
BUTTONS = "buttons"
class ImageFitValue(Enum):
"""
Options to fit an image on a CardWidget or AttachmentViewerWidget.
This is a subset from the `object-fit property`_ in HTML.
:cvar CONTAIN: scale the image to fit within the widget
:cvar COVER: scale the image to cover the entire widget
.. _object-fit property: https://developer.mozilla.org/en-US/docs/Web/CSS/object-fit
"""
CONTAIN = "contain"
COVER = "cover"
class WidgetTitleValue(Enum):
"""
Options to configure the title of a widget.
:cvar DEFAULT: Use the default title of the widget type.
:cvar NO_TITLE: Show no title.
:cvar CUSTOM_TITLE: Show a custom title text.
"""
DEFAULT = "Default"
NO_TITLE = "No title"
CUSTOM_TITLE = "Custom title"
class NotificationStatus(Enum):
"""
Options to retrieve a Notification based on its status.
normal lifecycle:
- DRAFT, when a message is first saved to the backend and the status is still in draft.
next states: READY
- READY: when the message is ready for processing, it is complete and is to be processed
next states: PROCESSING
- PROCESSING: when the message is in the process of being send out
next states: COMPLETED, FAILED
- COMPLETED: when the message is successfully sent out
next states: ARCHIVED
- FAILED: when the message is not successfully sent out
next states: ARCHIVED
- ARCHIVED: when the message is archives and waiting for its deletion against a certain retention policy
next states: None
:cvar ARCHIVED: "archived" notifications
:cvar COMPLETED: "completed" notifications
:cvar DRAFT: "draft" notifications
:cvar FAILED: "failed" notifications
:cvar PROCESSING: "processing" notifications
:cvar READY: "ready" notifications
"""
ARCHIVED = "ARCHIVED"
COMPLETED = "COMPLETED"
DRAFT = "DRAFT"
FAILED = "FAILED"
PROCESSING = "PROCESSING"
READY = "READY"
class NotificationEvent(Enum):
"""
Options to retrieve a Notification based on its event.
:cvar SHARE_ACTIVITY_LINK: notifications generated by sharing the link of an `Activity`
:cvar EXPORT_ACTIVITY_ASYNC: notifications generated by exporting an `Activity`
:cvar SHARE_ACTIVITY_PDF: notifications generated by sharing the pdf of an `Activity`
"""
SHARE_ACTIVITY_LINK = "SHARE_ACTIVITY_LINK"
EXPORT_ACTIVITY_ASYNC = "EXPORT_ACTIVITY_ASYNC"
SHARE_ACTIVITY_PDF = "SHARE_ACTIVITY_PDF"
class NotificationChannels(Enum):
"""
Options to retrieve a Notification based on its channel.
:cvar EMAIL: email notification
:cvar APP: app notification
"""
EMAIL = "EMAIL"
APP = "APP"
class LanguageCodes(Enum):
"""
Options for the language setting of a user.
:cvar ENGLISH: English
:cvar FRENCH: French
:cvar GERMAN: German
:cvar DUTCH: Dutch
:cvar ITALIAN: Italian
"""
ENGLISH = "en"
FRENCH = "fr"
GERMAN = "de"
DUTCH = "nl"
ITALIAN = "it"
|
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Provider tests."""
from __future__ import absolute_import, print_function
import uuid
import pytest
from datacite.errors import DataCiteError, DataCiteGoneError, \
DataCiteNoContentError, DataCiteNotFoundError, HttpError
from mock import MagicMock, patch
from invenio_pidstore.models import PIDStatus
from invenio_pidstore.providers.base import BaseProvider
from invenio_pidstore.providers.datacite import DataCiteProvider
from invenio_pidstore.providers.recordid import RecordIdProvider
def test_base_provider(app, db):
"""Test base provider."""
with app.app_context():
provider = BaseProvider.create(pid_type='test', pid_value='test')
assert provider.pid
assert provider.pid.pid_type == 'test'
assert provider.pid.pid_value == 'test'
assert provider.pid.pid_provider is None
assert provider.pid.status == PIDStatus.NEW
assert provider.pid.object_type is None
assert provider.pid.object_uuid is None
provider.reserve()
assert provider.pid.is_reserved()
provider.register()
assert provider.pid.is_registered()
provider.delete()
assert provider.pid.is_deleted()
provider.sync_status()
provider.update()
class TestProvider(BaseProvider):
pid_type = 't'
pid_provider = 'testpr'
default_status = PIDStatus.RESERVED
with app.app_context():
provider = TestProvider.create(pid_value='test')
assert provider.pid
assert provider.pid.pid_type == 't'
assert provider.pid.pid_provider == 'testpr'
assert provider.pid.pid_value == 'test'
assert provider.pid.status == PIDStatus.RESERVED
assert TestProvider.get('test')
def test_recordid_provider(app, db):
"""Test record id provider."""
with app.app_context():
provider = RecordIdProvider.create()
assert provider.pid
assert provider.pid.pid_type == 'recid'
assert provider.pid.pid_value == '1'
assert provider.pid.pid_provider is None
assert provider.pid.status == PIDStatus.RESERVED
assert provider.pid.object_type is None
assert provider.pid.object_uuid is None
# Assign to object immediately
rec_uuid = uuid.uuid4()
provider = RecordIdProvider.create(
object_type='rec', object_uuid=rec_uuid)
assert provider.pid
assert provider.pid.pid_type == 'recid'
assert provider.pid.pid_value == '2'
assert provider.pid.pid_provider is None
assert provider.pid.status == PIDStatus.REGISTERED
assert provider.pid.object_type == 'rec'
assert provider.pid.object_uuid == rec_uuid
pytest.raises(AssertionError, RecordIdProvider.create, pid_value='3')
def test_datacite_create_get(app, db):
"""Test datacite provider create/get."""
with app.app_context():
provider = DataCiteProvider.create('10.1234/a')
assert provider.pid.status == PIDStatus.NEW
assert provider.pid.pid_provider == 'datacite'
# Crete passing client kwarg to provider object creation
provider = DataCiteProvider.create('10.1234/b', client=MagicMock())
assert provider.pid.status == PIDStatus.NEW
assert provider.pid.pid_provider == 'datacite'
assert isinstance(provider.api, MagicMock)
provider = DataCiteProvider.get('10.1234/a')
assert provider.pid.status == PIDStatus.NEW
assert provider.pid.pid_provider == 'datacite'
provider = DataCiteProvider.get('10.1234/a', client=MagicMock())
assert isinstance(provider.api, MagicMock)
def test_datacite_reserve_register_update_delete(app, db):
"""Test datacite provider reserve."""
with app.app_context():
api = MagicMock()
provider = DataCiteProvider.create('10.1234/a', client=api)
assert provider.reserve('mydoc')
assert provider.pid.status == PIDStatus.RESERVED
api.metadata_post.assert_called_with('mydoc')
assert provider.register('myurl', 'anotherdoc')
assert provider.pid.status == PIDStatus.REGISTERED
api.metadata_post.assert_called_with('anotherdoc')
api.doi_post.assert_called_with('10.1234/a', 'myurl')
assert provider.update('anotherurl', 'yetanother')
assert provider.pid.status == PIDStatus.REGISTERED
api.metadata_post.assert_called_with('yetanother')
api.doi_post.assert_called_with('10.1234/a', 'anotherurl')
assert provider.delete()
assert provider.pid.status == PIDStatus.DELETED
api.metadata_delete.assert_called_with('10.1234/a')
assert provider.update('newurl', 'newdoc')
assert provider.pid.status == PIDStatus.REGISTERED
api.metadata_post.assert_called_with('newdoc')
api.doi_post.assert_called_with('10.1234/a', 'newurl')
@patch('invenio_pidstore.providers.datacite.logger')
def test_datacite_error_reserve(logger, app, db):
"""Test reserve errors."""
with app.app_context():
api = MagicMock()
provider = DataCiteProvider.create('10.1234/a', client=api)
api.metadata_post.side_effect = DataCiteError
pytest.raises(DataCiteError, provider.reserve, "testdoc")
assert logger.exception.call_args[0][0] == \
"Failed to reserve in DataCite"
@patch('invenio_pidstore.providers.datacite.logger')
def test_datacite_error_register_update(logger, app, db):
"""Test register errors."""
with app.app_context():
api = MagicMock()
provider = DataCiteProvider.create('10.1234/a', client=api)
api.doi_post.side_effect = DataCiteError
pytest.raises(DataCiteError, provider.register, "testurl", "testdoc")
assert logger.exception.call_args[0][0] == \
"Failed to register in DataCite"
pytest.raises(DataCiteError, provider.update, "testurl", "testdoc")
assert logger.exception.call_args[0][0] == \
"Failed to update in DataCite"
@patch('invenio_pidstore.providers.datacite.logger')
def test_datacite_error_delete(logger, app, db):
"""Test reserve errors."""
with app.app_context():
api = MagicMock()
provider = DataCiteProvider.create('10.1234/a', client=api)
# DOIs in new state doesn't contact datacite
api.metadata_delete.side_effect = DataCiteError
assert provider.delete()
# Already registered DOIs do contact datacite to delete
provider = DataCiteProvider.create('10.1234/b', client=api,
status=PIDStatus.REGISTERED)
api.metadata_delete.side_effect = DataCiteError
pytest.raises(DataCiteError, provider.delete)
assert logger.exception.call_args[0][0] == \
"Failed to delete in DataCite"
@patch('invenio_pidstore.providers.datacite.logger')
def test_datacite_sync(logger, app, db):
"""Test sync."""
with app.app_context():
api = MagicMock()
provider = DataCiteProvider.create('10.1234/a', client=api)
assert provider.pid.status == PIDStatus.NEW
# Status can be set from api.doi_get reply
assert provider.sync_status()
assert provider.pid.status == PIDStatus.REGISTERED
api.doi_get.assert_called_with(provider.pid.pid_value)
api.doi_get.side_effect = DataCiteGoneError
assert provider.sync_status()
assert provider.pid.status == PIDStatus.DELETED
api.doi_get.side_effect = DataCiteNoContentError
assert provider.sync_status()
assert provider.pid.status == PIDStatus.REGISTERED
# Status *cannot/ be set from api.doi_get reply
# Try with api.metadata_get
api.doi_get.side_effect = DataCiteNotFoundError
assert provider.sync_status()
assert provider.pid.status == PIDStatus.RESERVED
api.metadata_get.assert_called_with(provider.pid.pid_value)
api.doi_get.side_effect = DataCiteNotFoundError
api.metadata_get.side_effect = DataCiteGoneError
assert provider.sync_status()
assert provider.pid.status == PIDStatus.DELETED
api.doi_get.side_effect = DataCiteNotFoundError
api.metadata_get.side_effect = DataCiteNoContentError
assert provider.sync_status()
assert provider.pid.status == PIDStatus.REGISTERED
api.doi_get.side_effect = DataCiteNotFoundError
api.metadata_get.side_effect = DataCiteNotFoundError
assert provider.sync_status()
assert provider.pid.status == PIDStatus.NEW
api.doi_get.side_effect = HttpError
assert provider.pid.status == PIDStatus.NEW
pytest.raises(HttpError, provider.sync_status)
assert provider.pid.status == PIDStatus.NEW
assert logger.exception.call_args[0][0] == \
"Failed to sync status from DataCite"
|
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.proxy.v1.service.phone_number import PhoneNumberList
from twilio.rest.proxy.v1.service.session import SessionList
from twilio.rest.proxy.v1.service.short_code import ShortCodeList
class ServiceList(ListResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version):
"""
Initialize the ServiceList
:param Version version: Version that contains the resource
:returns: twilio.rest.proxy.v1.service.ServiceList
:rtype: twilio.rest.proxy.v1.service.ServiceList
"""
super(ServiceList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/Services'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams ServiceInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.proxy.v1.service.ServiceInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists ServiceInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.proxy.v1.service.ServiceInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of ServiceInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServicePage
"""
params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(
'GET',
self._uri,
params=params,
)
return ServicePage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of ServiceInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServicePage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return ServicePage(self._version, response, self._solution)
def create(self, unique_name, default_ttl=values.unset,
callback_url=values.unset, geo_match_level=values.unset,
number_selection_behavior=values.unset,
intercept_callback_url=values.unset,
out_of_session_callback_url=values.unset,
chat_instance_sid=values.unset):
"""
Create a new ServiceInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode default_ttl: Default TTL for a Session, in seconds
:param unicode callback_url: The URL we should call when the interaction status changes
:param ServiceInstance.GeoMatchLevel geo_match_level: Where a proxy number must be located relative to the participant identifier
:param ServiceInstance.NumberSelectionBehavior number_selection_behavior: The preference for Proxy Number selection for the Service instance
:param unicode intercept_callback_url: The URL we call on each interaction
:param unicode out_of_session_callback_url: The URL we call when an inbound call or SMS action occurs on a closed or non-existent Session
:param unicode chat_instance_sid: The SID of the Chat Service Instance
:returns: Newly created ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
data = values.of({
'UniqueName': unique_name,
'DefaultTtl': default_ttl,
'CallbackUrl': callback_url,
'GeoMatchLevel': geo_match_level,
'NumberSelectionBehavior': number_selection_behavior,
'InterceptCallbackUrl': intercept_callback_url,
'OutOfSessionCallbackUrl': out_of_session_callback_url,
'ChatInstanceSid': chat_instance_sid,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return ServiceInstance(self._version, payload, )
def get(self, sid):
"""
Constructs a ServiceContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.proxy.v1.service.ServiceContext
:rtype: twilio.rest.proxy.v1.service.ServiceContext
"""
return ServiceContext(self._version, sid=sid, )
def __call__(self, sid):
"""
Constructs a ServiceContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.proxy.v1.service.ServiceContext
:rtype: twilio.rest.proxy.v1.service.ServiceContext
"""
return ServiceContext(self._version, sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Proxy.V1.ServiceList>'
class ServicePage(Page):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, response, solution):
"""
Initialize the ServicePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.proxy.v1.service.ServicePage
:rtype: twilio.rest.proxy.v1.service.ServicePage
"""
super(ServicePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of ServiceInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.proxy.v1.service.ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
return ServiceInstance(self._version, payload, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Proxy.V1.ServicePage>'
class ServiceContext(InstanceContext):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
def __init__(self, version, sid):
"""
Initialize the ServiceContext
:param Version version: Version that contains the resource
:param sid: The unique string that identifies the resource
:returns: twilio.rest.proxy.v1.service.ServiceContext
:rtype: twilio.rest.proxy.v1.service.ServiceContext
"""
super(ServiceContext, self).__init__(version)
# Path Solution
self._solution = {'sid': sid, }
self._uri = '/Services/{sid}'.format(**self._solution)
# Dependents
self._sessions = None
self._phone_numbers = None
self._short_codes = None
def fetch(self):
"""
Fetch a ServiceInstance
:returns: Fetched ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return ServiceInstance(self._version, payload, sid=self._solution['sid'], )
def delete(self):
"""
Deletes the ServiceInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def update(self, unique_name=values.unset, default_ttl=values.unset,
callback_url=values.unset, geo_match_level=values.unset,
number_selection_behavior=values.unset,
intercept_callback_url=values.unset,
out_of_session_callback_url=values.unset,
chat_instance_sid=values.unset):
"""
Update the ServiceInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode default_ttl: Default TTL for a Session, in seconds
:param unicode callback_url: The URL we should call when the interaction status changes
:param ServiceInstance.GeoMatchLevel geo_match_level: Where a proxy number must be located relative to the participant identifier
:param ServiceInstance.NumberSelectionBehavior number_selection_behavior: The preference for Proxy Number selection for the Service instance
:param unicode intercept_callback_url: The URL we call on each interaction
:param unicode out_of_session_callback_url: The URL we call when an inbound call or SMS action occurs on a closed or non-existent Session
:param unicode chat_instance_sid: The SID of the Chat Service Instance
:returns: Updated ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
data = values.of({
'UniqueName': unique_name,
'DefaultTtl': default_ttl,
'CallbackUrl': callback_url,
'GeoMatchLevel': geo_match_level,
'NumberSelectionBehavior': number_selection_behavior,
'InterceptCallbackUrl': intercept_callback_url,
'OutOfSessionCallbackUrl': out_of_session_callback_url,
'ChatInstanceSid': chat_instance_sid,
})
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return ServiceInstance(self._version, payload, sid=self._solution['sid'], )
@property
def sessions(self):
"""
Access the sessions
:returns: twilio.rest.proxy.v1.service.session.SessionList
:rtype: twilio.rest.proxy.v1.service.session.SessionList
"""
if self._sessions is None:
self._sessions = SessionList(self._version, service_sid=self._solution['sid'], )
return self._sessions
@property
def phone_numbers(self):
"""
Access the phone_numbers
:returns: twilio.rest.proxy.v1.service.phone_number.PhoneNumberList
:rtype: twilio.rest.proxy.v1.service.phone_number.PhoneNumberList
"""
if self._phone_numbers is None:
self._phone_numbers = PhoneNumberList(self._version, service_sid=self._solution['sid'], )
return self._phone_numbers
@property
def short_codes(self):
"""
Access the short_codes
:returns: twilio.rest.proxy.v1.service.short_code.ShortCodeList
:rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeList
"""
if self._short_codes is None:
self._short_codes = ShortCodeList(self._version, service_sid=self._solution['sid'], )
return self._short_codes
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Proxy.V1.ServiceContext {}>'.format(context)
class ServiceInstance(InstanceResource):
""" PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. """
class GeoMatchLevel(object):
AREA_CODE = "area-code"
OVERLAY = "overlay"
RADIUS = "radius"
COUNTRY = "country"
class NumberSelectionBehavior(object):
AVOID_STICKY = "avoid-sticky"
PREFER_STICKY = "prefer-sticky"
def __init__(self, version, payload, sid=None):
"""
Initialize the ServiceInstance
:returns: twilio.rest.proxy.v1.service.ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
super(ServiceInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload.get('sid'),
'unique_name': payload.get('unique_name'),
'account_sid': payload.get('account_sid'),
'chat_instance_sid': payload.get('chat_instance_sid'),
'callback_url': payload.get('callback_url'),
'default_ttl': deserialize.integer(payload.get('default_ttl')),
'number_selection_behavior': payload.get('number_selection_behavior'),
'geo_match_level': payload.get('geo_match_level'),
'intercept_callback_url': payload.get('intercept_callback_url'),
'out_of_session_callback_url': payload.get('out_of_session_callback_url'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'url': payload.get('url'),
'links': payload.get('links'),
}
# Context
self._context = None
self._solution = {'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: ServiceContext for this ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceContext
"""
if self._context is None:
self._context = ServiceContext(self._version, sid=self._solution['sid'], )
return self._context
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def unique_name(self):
"""
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
"""
return self._properties['unique_name']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def chat_instance_sid(self):
"""
:returns: The SID of the Chat Service Instance
:rtype: unicode
"""
return self._properties['chat_instance_sid']
@property
def callback_url(self):
"""
:returns: The URL we call when the interaction status changes
:rtype: unicode
"""
return self._properties['callback_url']
@property
def default_ttl(self):
"""
:returns: Default TTL for a Session, in seconds
:rtype: unicode
"""
return self._properties['default_ttl']
@property
def number_selection_behavior(self):
"""
:returns: The preference for Proxy Number selection for the Service instance
:rtype: ServiceInstance.NumberSelectionBehavior
"""
return self._properties['number_selection_behavior']
@property
def geo_match_level(self):
"""
:returns: Where a proxy number must be located relative to the participant identifier
:rtype: ServiceInstance.GeoMatchLevel
"""
return self._properties['geo_match_level']
@property
def intercept_callback_url(self):
"""
:returns: The URL we call on each interaction
:rtype: unicode
"""
return self._properties['intercept_callback_url']
@property
def out_of_session_callback_url(self):
"""
:returns: The URL we call when an inbound call or SMS action occurs on a closed or non-existent Session
:rtype: unicode
"""
return self._properties['out_of_session_callback_url']
@property
def date_created(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def url(self):
"""
:returns: The absolute URL of the Service resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The URLs of resources related to the Service
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a ServiceInstance
:returns: Fetched ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the ServiceInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, unique_name=values.unset, default_ttl=values.unset,
callback_url=values.unset, geo_match_level=values.unset,
number_selection_behavior=values.unset,
intercept_callback_url=values.unset,
out_of_session_callback_url=values.unset,
chat_instance_sid=values.unset):
"""
Update the ServiceInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode default_ttl: Default TTL for a Session, in seconds
:param unicode callback_url: The URL we should call when the interaction status changes
:param ServiceInstance.GeoMatchLevel geo_match_level: Where a proxy number must be located relative to the participant identifier
:param ServiceInstance.NumberSelectionBehavior number_selection_behavior: The preference for Proxy Number selection for the Service instance
:param unicode intercept_callback_url: The URL we call on each interaction
:param unicode out_of_session_callback_url: The URL we call when an inbound call or SMS action occurs on a closed or non-existent Session
:param unicode chat_instance_sid: The SID of the Chat Service Instance
:returns: Updated ServiceInstance
:rtype: twilio.rest.proxy.v1.service.ServiceInstance
"""
return self._proxy.update(
unique_name=unique_name,
default_ttl=default_ttl,
callback_url=callback_url,
geo_match_level=geo_match_level,
number_selection_behavior=number_selection_behavior,
intercept_callback_url=intercept_callback_url,
out_of_session_callback_url=out_of_session_callback_url,
chat_instance_sid=chat_instance_sid,
)
@property
def sessions(self):
"""
Access the sessions
:returns: twilio.rest.proxy.v1.service.session.SessionList
:rtype: twilio.rest.proxy.v1.service.session.SessionList
"""
return self._proxy.sessions
@property
def phone_numbers(self):
"""
Access the phone_numbers
:returns: twilio.rest.proxy.v1.service.phone_number.PhoneNumberList
:rtype: twilio.rest.proxy.v1.service.phone_number.PhoneNumberList
"""
return self._proxy.phone_numbers
@property
def short_codes(self):
"""
Access the short_codes
:returns: twilio.rest.proxy.v1.service.short_code.ShortCodeList
:rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeList
"""
return self._proxy.short_codes
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Proxy.V1.ServiceInstance {}>'.format(context)
|
|
import functools
from datetime import datetime
from functools import partial
from django import http
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.forms import PasswordResetForm
from django.contrib.auth.tokens import default_token_generator
from django.db import IntegrityError
from django.db.models import Q, Sum
from django.db.transaction import non_atomic_requests
from django.shortcuts import (get_list_or_404, get_object_or_404, redirect,
render)
from django.template import Context, loader
from django.utils.http import is_safe_url, urlsafe_base64_decode
from django.views.decorators.cache import never_cache
import commonware.log
import waffle
from mobility.decorators import mobile_template
from session_csrf import anonymous_csrf, anonymous_csrf_exempt
from tower import ugettext as _
from waffle.decorators import waffle_switch
from olympia import amo
from olympia.users import notifications as notifications
from olympia.abuse.models import send_abuse_report
from olympia.access import acl
from olympia.access.middleware import ACLMiddleware
from olympia.addons.decorators import addon_view_factory
from olympia.addons.models import Addon, AddonUser, Category
from olympia.amo import messages
from olympia.amo.decorators import (
json_view, login_required, permission_required,
post_required, write)
from olympia.amo.forms import AbuseForm
from olympia.amo.urlresolvers import get_url_prefix, reverse
from olympia.amo.utils import escape_all, log_cef, send_mail, urlparams
from olympia.bandwagon.models import Collection
from olympia.browse.views import PersonasFilter
from olympia.translations.query import order_by_translation
from olympia.users.models import UserNotification
from . import forms, tasks
from .models import UserProfile
from .signals import logged_out
from .utils import EmailResetCode, UnsubscribeCode
log = commonware.log.getLogger('z.users')
addon_view = addon_view_factory(qs=Addon.objects.valid)
THEMES_LIMIT = 20
def user_view(f):
@functools.wraps(f)
def wrapper(request, user_id, *args, **kw):
"""Provides a user object given a user ID or username."""
if user_id.isdigit():
key = 'id'
else:
key = 'username'
# If the username is `me` then show the current user's profile.
if (user_id == 'me' and request.user and
request.user.username):
user_id = request.user.username
user = get_object_or_404(UserProfile, **{key: user_id})
return f(request, user, *args, **kw)
return wrapper
@login_required(redirect=False)
@json_view
@non_atomic_requests
def ajax(request):
"""Query for a user matching a given email."""
if 'q' not in request.GET:
raise http.Http404()
data = {'status': 0, 'message': ''}
email = request.GET.get('q', '').strip()
if not email:
data.update(message=_('An email address is required.'))
return data
user = UserProfile.objects.filter(email=email)
msg = _('A user with that email address does not exist.')
if user:
data.update(status=1, id=user[0].id, name=user[0].name)
else:
data['message'] = msg
return escape_all(data)
@user_view
def confirm(request, user, token):
if not user.confirmationcode:
return redirect('users.login')
if user.confirmationcode != token:
log.info(u"Account confirmation failed for user (%s)", user)
messages.error(request, _('Invalid confirmation code!'))
return redirect('users.login')
user.confirmationcode = ''
user.save()
messages.success(request, _('Successfully verified!'))
log.info(u"Account confirmed for user (%s)", user)
return redirect('users.login')
@user_view
def confirm_resend(request, user):
if not user.confirmationcode:
return redirect('users.login')
# Potential for flood here if someone requests a confirmationcode and then
# re-requests confirmations. We may need to track requests in the future.
log.info(u"Account confirm re-requested for user (%s)", user)
user.email_confirmation_code()
msg = _(u'An email has been sent to your address to confirm '
u'your account. Before you can log in, you have to activate '
u'your account by clicking on the link provided in this '
u'email.')
messages.info(request, _('Confirmation Email Sent'), msg)
return redirect('users.login')
@login_required
def delete(request):
amouser = request.user
if request.method == 'POST':
form = forms.UserDeleteForm(request.POST, request=request)
if form.is_valid():
messages.success(request, _('Profile Deleted'))
amouser.anonymize()
logout(request)
form = None
return http.HttpResponseRedirect(reverse('users.login'))
else:
form = forms.UserDeleteForm(request=request)
return render(request, 'users/delete.html',
{'form': form, 'amouser': amouser})
@login_required
def delete_photo(request):
u = request.user
if request.method == 'POST':
u.picture_type = ''
u.save()
log.debug(u"User (%s) deleted photo" % u)
tasks.delete_photo.delay(u.picture_path)
messages.success(request, _('Photo Deleted'))
return http.HttpResponseRedirect(reverse('users.edit') +
'#user-profile')
return render(request, 'users/delete_photo.html', dict(user=u))
@write
@login_required
def edit(request):
# Don't use request.user since it has too much caching.
amouser = UserProfile.objects.get(pk=request.user.id)
if request.method == 'POST':
# ModelForm alters the instance you pass in. We need to keep a copy
# around in case we need to use it below (to email the user)
original_email = amouser.email
form = forms.UserEditForm(request.POST, request.FILES, request=request,
instance=amouser)
if form.is_valid():
messages.success(request, _('Profile Updated'))
if amouser.email != original_email:
l = {'user': amouser,
'mail1': original_email,
'mail2': amouser.email}
log.info(u"User (%(user)s) has requested email change from "
u"(%(mail1)s) to (%(mail2)s)" % l)
messages.info(
request, _('Email Confirmation Sent'),
_(u'An email has been sent to {0} to confirm your new '
u'email address. For the change to take effect, you '
u'need to click on the link provided in this email. '
u'Until then, you can keep logging in with your '
u'current email address.').format(amouser.email))
token, hash_ = EmailResetCode.create(amouser.id, amouser.email)
url = '%s%s' % (settings.SITE_URL,
reverse('users.emailchange',
args=[amouser.id, token, hash_]))
t = loader.get_template('users/email/emailchange.ltxt')
c = {'domain': settings.DOMAIN, 'url': url}
send_mail(
_('Please confirm your email address '
'change at %s' % settings.DOMAIN),
t.render(Context(c)), None, [amouser.email],
use_blacklist=False, real_email=True)
# Reset the original email back. We aren't changing their
# address until they confirm the new one
amouser.email = original_email
form.save()
return redirect('users.edit')
else:
messages.error(
request,
_('Errors Found'),
_('There were errors in the changes you made. Please correct '
'them and resubmit.'))
else:
form = forms.UserEditForm(instance=amouser, request=request)
return render(request, 'users/edit.html',
{'form': form, 'amouser': amouser})
def tshirt_eligible(user):
MIN_PERSONA_ADU = 10000
return (
user.t_shirt_requested or
AddonUser.objects.filter(
user=user,
role__in=(amo.AUTHOR_ROLE_OWNER, amo.AUTHOR_ROLE_DEV),
addon__type=amo.ADDON_EXTENSION,
addon__disabled_by_user=False)
.filter(
Q(addon__is_listed=True,
addon___current_version__files__status__in=amo.REVIEWED_STATUSES,
addon__status__in=amo.REVIEWED_STATUSES) |
Q(addon__is_listed=False,
addon__versions__files__is_signed=True))
.exists() or
Addon.objects.filter(
authors=user,
type=amo.ADDON_PERSONA,
status=amo.STATUS_PUBLIC,
disabled_by_user=False)
.aggregate(users=Sum('average_daily_users'))['users'] >=
MIN_PERSONA_ADU)
@write
@login_required
def t_shirt(request):
if not waffle.switch_is_active('t-shirt-orders'):
raise http.Http404()
user = request.user
eligible = tshirt_eligible(user)
if request.method == 'POST':
if not eligible:
messages.error(request,
_("We're sorry, but you are not eligible to "
"request a t-shirt at this time."))
return redirect('users.t-shirt')
if not user.t_shirt_requested:
user.update(t_shirt_requested=datetime.now())
return render(request, 'users/t-shirt.html',
{'eligible': eligible, 'user': user})
@write
@login_required
@permission_required('Users', 'Edit')
@user_view
def admin_edit(request, user):
if request.method == 'POST':
form = forms.AdminUserEditForm(request.POST, request.FILES,
request=request, instance=user)
if form.is_valid():
form.save()
messages.success(request, _('Profile Updated'))
return http.HttpResponseRedirect(reverse('zadmin.index'))
else:
form = forms.AdminUserEditForm(instance=user, request=request)
return render(request, 'users/edit.html', {'form': form, 'amouser': user})
@user_view
def emailchange(request, user, token, hash):
try:
_uid, newemail = EmailResetCode.parse(token, hash)
except ValueError:
return http.HttpResponse(status=400)
if _uid != user.id:
# I'm calling this a warning because invalid hashes up to this point
# could be any number of things, but this is a targeted attack from
# one user account to another
log.warning((u"[Tampering] Valid email reset code for UID (%s) "
u"attempted to change email address for user (%s)") %
(_uid, user))
return http.HttpResponse(status=400)
if UserProfile.objects.filter(email=newemail).exists():
log.warning((u"[Tampering] User (%s) tries to change his email to "
u"an existing account with the same email address (%s)") %
(user, newemail))
return http.HttpResponse(status=400)
user.email = newemail
user.save()
l = {'user': user, 'newemail': newemail}
log.info(u"User (%(user)s) confirmed new email address (%(newemail)s)" % l)
messages.success(
request, _('Your email address was changed successfully'),
_(u'From now on, please use {0} to log in.').format(newemail))
return http.HttpResponseRedirect(reverse('users.edit'))
def _clean_next_url(request):
gets = request.GET.copy()
url = gets.get('to', settings.LOGIN_REDIRECT_URL)
if not is_safe_url(url, host=request.get_host()):
log.info(u'Unsafe redirect to %s' % url)
url = settings.LOGIN_REDIRECT_URL
domain = gets.get('domain', None)
if domain in settings.VALID_LOGIN_REDIRECTS.keys():
url = settings.VALID_LOGIN_REDIRECTS[domain] + url
gets['to'] = url
request.GET = gets
return request
@anonymous_csrf
@mobile_template('users/{mobile/}login_modal.html')
def login_modal(request, template=None):
return _login(request, template=template)
@anonymous_csrf
@mobile_template('users/{mobile/}login.html')
def login(request, template=None):
return _login(request, template=template)
def _login(request, template=None, data=None, dont_redirect=False):
data = data or {}
# In case we need it later. See below.
get_copy = request.GET.copy()
if 'to' in request.GET:
request = _clean_next_url(request)
if request.user.is_authenticated():
return http.HttpResponseRedirect(
request.GET.get('to', settings.LOGIN_REDIRECT_URL))
data['login_source_form'] = (waffle.switch_is_active('fxa-auth') and
not request.POST)
limited = getattr(request, 'limited', 'recaptcha_shown' in request.POST)
user = None
login_status = None
if 'username' in request.POST:
try:
# We are doing all this before we try and validate the form.
user = UserProfile.objects.get(email=request.POST['username'])
limited = ((user.failed_login_attempts >=
settings.LOGIN_RATELIMIT_USER) or limited)
login_status = False
except UserProfile.DoesNotExist:
log_cef('Authentication Failure', 5, request,
username=request.POST['username'],
signature='AUTHFAIL',
msg='The username was invalid')
pass
partial_form = partial(forms.AuthenticationForm, use_recaptcha=limited)
r = auth.views.login(request, template_name=template,
redirect_field_name='to',
authentication_form=partial_form,
extra_context=data)
if isinstance(r, http.HttpResponseRedirect):
# Django's auth.views.login has security checks to prevent someone from
# redirecting to another domain. Since we want to allow this in
# certain cases, we have to make a new response object here to replace
# the above.
request.GET = get_copy
request = _clean_next_url(request)
next_path = request.GET['to']
if waffle.switch_is_active('fxa-auth'):
if next_path == '/':
next_path = None
next_path = urlparams(reverse('users.migrate'), to=next_path)
r = http.HttpResponseRedirect(next_path)
# Succsesful log in according to django. Now we do our checks. I do
# the checks here instead of the form's clean() because I want to use
# the messages framework and it's not available in the request there.
if user.deleted:
logout(request)
log.warning(u'Attempt to log in with deleted account (%s)' % user)
messages.error(request, _('Wrong email address or password!'))
data.update({'form': partial_form()})
user.log_login_attempt(False)
log_cef('Authentication Failure', 5, request,
username=request.user,
signature='AUTHFAIL',
msg='Account is deactivated')
return render(request, template, data)
if user.confirmationcode:
logout(request)
log.info(u'Attempt to log in with unconfirmed account (%s)' % user)
msg1 = _(u'A link to activate your user account was sent by email '
u'to your address {0}. You have to click it before you '
u'can log in.').format(user.email)
url = "%s%s" % (settings.SITE_URL,
reverse('users.confirm.resend', args=[user.id]))
msg2 = _('If you did not receive the confirmation email, make '
'sure your email service did not mark it as "junk '
'mail" or "spam". If you need to, you can have us '
'<a href="%s">resend the confirmation message</a> '
'to your email address mentioned above.') % url
messages.error(request, _('Activation Email Sent'), msg1)
messages.info(request, _('Having Trouble?'), msg2,
title_safe=True, message_safe=True)
data.update({'form': partial_form()})
user.log_login_attempt(False)
return render(request, template, data)
rememberme = request.POST.get('rememberme', None)
if rememberme:
request.session.set_expiry(settings.SESSION_COOKIE_AGE)
log.debug(
u'User (%s) logged in successfully with "remember me" set' %
user)
login_status = True
if dont_redirect:
# We're recalling the middleware to re-initialize user
ACLMiddleware().process_request(request)
r = render(request, template, data)
if login_status is not None:
user.log_login_attempt(login_status)
log_cef('Authentication Failure', 5, request,
username=request.POST['username'],
signature='AUTHFAIL',
msg='The password was incorrect')
return r
def logout(request):
user = request.user
if not user.is_anonymous():
log.debug(u"User (%s) logged out" % user)
auth.logout(request)
if 'to' in request.GET:
request = _clean_next_url(request)
next = request.GET.get('to')
if not next:
next = settings.LOGOUT_REDIRECT_URL
prefixer = get_url_prefix()
if prefixer:
next = prefixer.fix(next)
response = http.HttpResponseRedirect(next)
# Fire logged out signal.
logged_out.send(None, request=request, response=response)
return response
@user_view
@non_atomic_requests
def profile(request, user):
# Get user's own and favorite collections, if they allowed that.
own_coll = fav_coll = []
if user.display_collections:
own_coll = (Collection.objects.listed().filter(author=user)
.order_by('-created'))[:10]
if user.display_collections_fav:
fav_coll = (Collection.objects.listed()
.filter(following__user=user)
.order_by('-following__created'))[:10]
edit_any_user = acl.action_allowed(request, 'Users', 'Edit')
own_profile = (request.user.is_authenticated() and
request.user.id == user.id)
addons = []
personas = []
limited_personas = False
if user.is_developer:
addons = user.addons.reviewed().filter(
addonuser__user=user, addonuser__listed=True)
personas = addons.filter(type=amo.ADDON_PERSONA).order_by(
'-persona__popularity')
if personas.count() > THEMES_LIMIT:
limited_personas = True
personas = personas[:THEMES_LIMIT]
addons = addons.exclude(type=amo.ADDON_PERSONA).order_by(
'-weekly_downloads')
addons = amo.utils.paginate(request, addons, 5)
reviews = amo.utils.paginate(request, user.reviews.all())
data = {'profile': user, 'own_coll': own_coll, 'reviews': reviews,
'fav_coll': fav_coll, 'edit_any_user': edit_any_user,
'addons': addons, 'own_profile': own_profile,
'personas': personas, 'limited_personas': limited_personas,
'THEMES_LIMIT': THEMES_LIMIT}
if not own_profile:
data['abuse_form'] = AbuseForm(request=request)
return render(request, 'users/profile.html', data)
@user_view
@non_atomic_requests
def themes(request, user, category=None):
cats = Category.objects.filter(type=amo.ADDON_PERSONA)
ctx = {
'profile': user,
'categories': order_by_translation(cats, 'name'),
'search_cat': 'themes'
}
if user.is_artist:
base = user.addons.reviewed().filter(
type=amo.ADDON_PERSONA,
addonuser__user=user, addonuser__listed=True)
if category:
qs = cats.filter(slug=category)
ctx['category'] = cat = get_list_or_404(qs)[0]
base = base.filter(categories__id=cat.id)
else:
base = Addon.objects.none()
filter_ = PersonasFilter(request, base, key='sort',
default='popular')
addons = amo.utils.paginate(request, filter_.qs, 30,
count=base.count())
ctx.update({
'addons': addons,
'filter': filter_,
'sorting': filter_.field,
'sort_opts': filter_.opts
})
return render(request, 'browse/personas/grid.html', ctx)
@anonymous_csrf
def register(request):
if waffle.switch_is_active('fxa-auth'):
return login(request)
if request.user.is_authenticated():
messages.info(request, _('You are already logged in to an account.'))
form = None
elif request.method == 'POST':
form = forms.UserRegisterForm(request.POST)
mkt_user = UserProfile.objects.filter(email=form.data['email'],
password='')
if form.is_valid():
try:
u = form.save(commit=False)
u.set_password(form.cleaned_data['password'])
u.generate_confirmationcode()
u.lang = request.LANG
u.save()
log.info(u'Registered new account for user (%s)', u)
log_cef('New Account', 5, request, username=u.username,
signature='AUTHNOTICE',
msg='User created a new account')
u.email_confirmation_code()
msg = _('Congratulations! Your user account was '
'successfully created.')
messages.success(request, msg)
msg = _(u'An email has been sent to your address {0} to '
'confirm your account. Before you can log in, you '
'have to activate your account by clicking on the '
'link provided in this email.').format(u.email)
messages.info(request, _('Confirmation Email Sent'), msg)
except IntegrityError, e:
# I was unable to reproduce this, but I suspect it happens
# when they POST twice quickly and the slaves don't have the
# new info yet (total guess). Anyway, I'm assuming the
# first one worked properly, so this is still a success
# case to the end user so we just log it...
log.error('Failed to register new user (%s): %s' % (u, e))
return http.HttpResponseRedirect(reverse('users.login'))
elif mkt_user.exists():
f = PasswordResetForm()
f.users_cache = [mkt_user[0]]
f.save(use_https=request.is_secure(),
email_template_name='users/email/pwreset.ltxt',
request=request)
return render(request, 'users/newpw_sent.html', {})
else:
messages.error(request, _('There are errors in this form'),
_('Please correct them and resubmit.'))
else:
form = forms.UserRegisterForm()
reg_action = reverse('users.register')
return render(request, 'users/register.html',
{'form': form, 'register_action': reg_action})
@anonymous_csrf_exempt
@user_view
def report_abuse(request, user):
form = AbuseForm(request.POST or None, request=request)
if request.method == 'POST' and form.is_valid():
send_abuse_report(request, user, form.cleaned_data['text'])
messages.success(request, _('User reported.'))
else:
return render(request, 'users/report_abuse_full.html',
{'profile': user, 'abuse_form': form})
return redirect(user.get_url_path())
@post_required
@user_view
def remove_locale(request, user):
"""Remove a locale from the user's translations."""
POST = request.POST
if 'locale' in POST and POST['locale'] != settings.LANGUAGE_CODE:
user.remove_locale(POST['locale'])
return http.HttpResponse()
return http.HttpResponseBadRequest()
@never_cache
@anonymous_csrf
def password_reset_confirm(request, uidb64=None, token=None):
"""
Pulled from django contrib so that we can add user into the form
so then we can show relevant messages about the user.
"""
assert uidb64 is not None and token is not None
user = None
try:
uid_int = urlsafe_base64_decode(uidb64)
user = UserProfile.objects.get(id=uid_int)
except (ValueError, UserProfile.DoesNotExist, TypeError):
pass
if (user is not None and user.fxa_migrated()
and waffle.switch_is_active('fxa-auth')):
migrated = True
validlink = False
form = None
elif user is not None and default_token_generator.check_token(user, token):
migrated = False
validlink = True
if request.method == 'POST':
form = forms.SetPasswordForm(user, request.POST)
if form.is_valid():
form.save()
log_cef('Password Changed', 5, request,
username=user.username,
signature='PASSWORDCHANGED',
msg='User changed password')
return redirect(reverse('django.contrib.auth.'
'views.password_reset_complete'))
else:
form = forms.SetPasswordForm(user)
else:
migrated = False
validlink = False
form = None
return render(request, 'users/pwreset_confirm.html',
{'form': form, 'validlink': validlink, 'migrated': migrated})
@never_cache
def unsubscribe(request, hash=None, token=None, perm_setting=None):
"""
Pulled from django contrib so that we can add user into the form
so then we can show relevant messages about the user.
"""
assert hash is not None and token is not None
user = None
try:
email = UnsubscribeCode.parse(token, hash)
user = UserProfile.objects.get(email=email)
except (ValueError, UserProfile.DoesNotExist):
pass
perm_settings = []
if user is not None:
unsubscribed = True
if not perm_setting:
# TODO: make this work. nothing currently links to it, though.
perm_settings = [l for l in notifications.NOTIFICATIONS
if not l.mandatory]
else:
perm_setting = notifications.NOTIFICATIONS_BY_SHORT[perm_setting]
UserNotification.update_or_create(
update={'enabled': False},
user=user, notification_id=perm_setting.id)
perm_settings = [perm_setting]
else:
unsubscribed = False
email = ''
return render(request, 'users/unsubscribe.html',
{'unsubscribed': unsubscribed, 'email': email,
'perm_settings': perm_settings})
@waffle_switch('fxa-auth')
@mobile_template('users/{mobile/}fxa_migration.html')
def migrate(request, template=None):
next_path = request.GET.get('to')
if not next_path or not is_safe_url(next_path):
next_path = reverse('home')
if not request.user.is_authenticated() or request.user.fxa_migrated():
return redirect(next_path)
else:
return render(request, template, {'to': next_path})
|
|
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
# Due to the PEX_ properties, disable checkstyle.
# checkstyle: noqa
import os
import sys
from contextlib import contextmanager
from .common import die
__all__ = ('ENV', 'Variables')
class Variables(object):
"""Environment variables supported by the PEX runtime."""
@classmethod
def process_pydoc(cls, pydoc):
if pydoc is None:
return 'Unknown', 'Unknown'
pydoc = pydoc.splitlines()
variable_type = pydoc[0]
variable_text = ' '.join(filter(None, (line.strip() for line in pydoc[2:])))
return variable_type, variable_text
@classmethod
def iter_help(cls):
for variable_name, value in sorted(cls.__dict__.items()):
if not variable_name.startswith('PEX_'):
continue
variable_type, variable_text = cls.process_pydoc(getattr(value, '__doc__'))
yield variable_name, variable_type, variable_text
def __init__(self, environ=None, rc='~/.pexrc', use_defaults=True):
self._use_defaults = use_defaults
self._environ = environ.copy() if environ else os.environ
if not self.PEX_IGNORE_RCFILES:
rc_values = self._from_rc(rc).copy()
rc_values.update(self._environ)
self._environ = rc_values
def copy(self):
return self._environ.copy()
def delete(self, variable):
self._environ.pop(variable, None)
def set(self, variable, value):
self._environ[variable] = str(value)
def _from_rc(self, rc):
ret_vars = {}
for filename in [rc, os.path.join(os.path.dirname(sys.argv[0]), '.pexrc')]:
try:
with open(os.path.expanduser(filename)) as fh:
rc_items = map(self._get_kv, fh)
ret_vars.update(dict(filter(None, rc_items)))
except IOError:
continue
return ret_vars
def _get_kv(self, variable):
kv = variable.strip().split('=')
if len(list(filter(None, kv))) == 2:
return kv
def _defaulted(self, default):
return default if self._use_defaults else None
def _get_bool(self, variable, default=False):
value = self._environ.get(variable)
if value is not None:
if value.lower() in ('0', 'false'):
return False
elif value.lower() in ('1', 'true'):
return True
else:
die('Invalid value for %s, must be 0/1/false/true, got %r' % (variable, value))
else:
return self._defaulted(default)
def _get_string(self, variable, default=None):
return self._environ.get(variable, self._defaulted(default))
def _get_path(self, variable, default=None):
value = self._get_string(variable, default=default)
if value is not None:
return os.path.realpath(os.path.expanduser(value))
def _get_int(self, variable, default=None):
try:
return int(self._environ[variable])
except ValueError:
die('Invalid value for %s, must be an integer, got %r' % (variable, self._environ[variable]))
except KeyError:
return self._defaulted(default)
def strip_defaults(self):
"""Returns a copy of these variables but with defaults stripped.
Any variables not explicitly set in the environment will have a value of `None`.
"""
return Variables(environ=self.copy(), use_defaults=False)
@contextmanager
def patch(self, **kw):
"""Update the environment for the duration of a context."""
old_environ = self._environ
self._environ = self._environ.copy()
self._environ.update(kw)
yield
self._environ = old_environ
@property
def PEX_ALWAYS_CACHE(self):
"""Boolean
Always write PEX dependencies to disk prior to invoking regardless whether or not the
dependencies are zip-safe. For certain dependencies that are very large such as numpy, this
can reduce the RAM necessary to launch the PEX. The data will be written into $PEX_ROOT,
which by default is $HOME/.pex. Default: false.
"""
return self._get_bool('PEX_ALWAYS_CACHE', default=False)
@property
def PEX_COVERAGE(self):
"""Boolean
Enable coverage reporting for this PEX file. This requires that the "coverage" module is
available in the PEX environment. Default: false.
"""
return self._get_bool('PEX_COVERAGE', default=False)
@property
def PEX_COVERAGE_FILENAME(self):
"""Filename
Write the coverage data to the specified filename. If PEX_COVERAGE_FILENAME is not specified
but PEX_COVERAGE is, coverage information will be printed to stdout and not saved.
"""
return self._get_path('PEX_COVERAGE_FILENAME', default=None)
@property
def PEX_FORCE_LOCAL(self):
"""Boolean
Force this PEX to be not-zip-safe. This forces all code and dependencies to be written into
$PEX_ROOT prior to invocation. This is an option for applications with static assets that
refer to paths relative to __file__ instead of using pkgutil/pkg_resources. Default: false.
"""
return self._get_bool('PEX_FORCE_LOCAL', default=False)
@property
def PEX_IGNORE_ERRORS(self):
"""Boolean
Ignore any errors resolving dependencies when invoking the PEX file. This can be useful if you
know that a particular failing dependency is not necessary to run the application. Default:
false.
"""
return self._get_bool('PEX_IGNORE_ERRORS', default=False)
@property
def PEX_INHERIT_PATH(self):
"""Boolean
Allow inheriting packages from site-packages. By default, PEX scrubs any packages and
namespace packages from sys.path prior to invoking the application. This is generally not
advised, but can be used in situations when certain dependencies do not conform to standard
packaging practices and thus cannot be bundled into PEX files. Default: false.
"""
return self._get_bool('PEX_INHERIT_PATH', default=False)
@property
def PEX_INTERPRETER(self):
"""Boolean
Drop into a REPL instead of invoking the predefined entry point of this PEX. This can be
useful for inspecting the PEX environment interactively. It can also be used to treat the PEX
file as an interpreter in order to execute other scripts in the context of the PEX file, e.g.
"PEX_INTERPRETER=1 ./app.pex my_script.py". Equivalent to setting PEX_MODULE to empty.
Default: false.
"""
return self._get_bool('PEX_INTERPRETER', default=False)
@property
def PEX_MODULE(self):
"""String
Override the entry point into the PEX file. Can either be a module, e.g. 'SimpleHTTPServer',
or a specific entry point in module:symbol form, e.g. "myapp.bin:main".
"""
return self._get_string('PEX_MODULE', default=None)
@property
def PEX_PROFILE(self):
"""Boolean
Enable application profiling. If specified and PEX_PROFILE_FILENAME is not specified, PEX will
print profiling information to stdout.
"""
return self._get_path('PEX_PROFILE', default=None)
@property
def PEX_PROFILE_FILENAME(self):
"""Filename
Profile the application and dump a profile into the specified filename in the standard
"profile" module format.
"""
return self._get_path('PEX_PROFILE_FILENAME', default=None)
@property
def PEX_PROFILE_SORT(self):
"""String
Toggle the profile sorting algorithm used to print out profile columns. Default:
'cumulative'.
"""
return self._get_string('PEX_PROFILE_SORT', default='cumulative')
@property
def PEX_PYTHON(self):
"""String
Override the Python interpreter used to invoke this PEX. Can be either an absolute path to an
interpreter or a base name e.g. "python3.3". If a base name is provided, the $PATH will be
searched for an appropriate match.
"""
return self._get_string('PEX_PYTHON', default=None)
@property
def PEX_ROOT(self):
"""Directory
The directory location for PEX to cache any dependencies and code. PEX must write
not-zip-safe eggs and all wheels to disk in order to activate them. Default: ~/.pex
"""
return self._get_path('PEX_ROOT', default=None)
@property
def PEX_PATH(self):
"""A set of one or more PEX files
Merge the packages from other PEX files into the current environment. This allows you to
do things such as create a PEX file containing the "coverage" module or create PEX files
containing plugin entry points to be consumed by a main application. Paths should be
specified in the same manner as $PATH, e.g. PEX_PATH=/path/to/pex1.pex:/path/to/pex2.pex
and so forth.
"""
return self._get_string('PEX_PATH', default='')
@property
def PEX_SCRIPT(self):
"""String
The script name within the PEX environment to execute. This must either be an entry point as
defined in a distribution's console_scripts, or a script as defined in a distribution's
scripts section. While Python supports any script including shell scripts, PEX only supports
invocation of Python scripts in this fashion.
"""
return self._get_string('PEX_SCRIPT', default=None)
@property
def PEX_TEARDOWN_VERBOSE(self):
"""Boolean
Enable verbosity for when the interpreter shuts down. This is mostly only useful for
debugging PEX itself. Default: false.
"""
return self._get_bool('PEX_TEARDOWN_VERBOSE', default=False)
@property
def PEX_VERBOSE(self):
"""Integer
Set the verbosity level of PEX debug logging. The higher the number, the more logging, with 0
being disabled. This environment variable can be extremely useful in debugging PEX
environment issues. Default: 0
"""
return self._get_int('PEX_VERBOSE', default=0)
# TODO(wickman) Remove and push into --flags. #94
@property
def PEX_HTTP_RETRIES(self):
"""Integer
The number of HTTP retries when performing dependency resolution when building a PEX file.
Default: 5.
"""
return self._get_int('PEX_HTTP_RETRIES', default=5)
@property
def PEX_IGNORE_RCFILES(self):
"""Boolean
Explicitly disable the reading/parsing of pexrc files (~/.pexrc). Default: false.
"""
return self._get_bool('PEX_IGNORE_RCFILES', default=False)
# Global singleton environment
ENV = Variables()
|
|
# Copyright 2011-2015 ZackZK
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: ZackZK <silajoin@sina.com>
"""
import Queue
import datetime
import threading
import time
from collections import deque
import pytz
import tushare as ts
# from tushare.util.dateu import is_holiday # use our own is_holiday currently as tushare does not include 2016 holiday
import pyalgotrade.logger
from pyalgotrade import bar
from pyalgotrade import barfeed
from pyalgotrade import dataseries
from pyalgotrade import resamplebase
from pyalgotrade.utils import dt
from pyalgotrade.bar import Frequency
from pyalgotrade.xignite.barfeed import utcnow
logger = pyalgotrade.logger.getLogger("tushare")
def to_market_datetime(dateTime):
timezone = pytz.timezone('Asia/Shanghai')
return dt.localize(dateTime, timezone)
holiday = ['2015-01-01', '2015-01-02', '2015-02-18', '2015-02-19', '2015-02-20', '2015-02-23', '2015-02-24',
'2015-04-06', '2015-05-01', '2015-06-22', '2015-09-03', '2015-09-04', '2015-10-01', '2015-10-02',
'2015-10-05', '2015-10-06', '2015-10-07',
'2016-01-01', '2016-02-08', '2016-02-09', '2016-02-10', '2016-02-11', '2016-02-12', '2016-04-04',
'2016-05-02', '2016-06-09', '2016-06-10', '2016-09-15', '2016-09-16', '2016-10-03', '2016-10-04',
'2016-10-05', '2016-10-06', '2016-10-07']
def is_holiday(date):
if isinstance(date, str):
today = datetime.datetime.strptime(date, '%Y-%m-%d')
if today.isoweekday() in [6, 7] or date in holiday:
return True
else:
return False
class TickDataSeries(object):
def __init__(self):
self.__priceDS = deque()
self.__volumeDS = deque()
self.__amountDS = deque()
self.__dateTimes = deque() # just for debug
def reset(self):
self.__priceDS.clear()
self.__volumeDS.clear()
self.__amountDS.clear()
self.__dateTimes.clear()
def getPriceDS(self):
return self.__priceDS
def getAmountDS(self):
return self.__amountDS
def getVolumeDS(self):
return self.__volumeDS
def getDateTimes(self):
return self.__dateTimes
def append(self, price, volume, amount, dateTime):
assert(bar is not None)
self.__priceDS.append(price)
self.__volumeDS.append(volume)
self.__amountDS.append(amount)
self.__dateTimes.append(dateTime)
def empty(self):
return len(self.__priceDS) == 0
def get_trading_days(start_day, days):
try:
df = ts.get_hist_data('sh')
except Exception, e:
logger.error("Tushare get hist data exception", exc_info=e)
return []
trading_days = list()
holiday = 0
for i in range(days):
while True:
day = start_day - datetime.timedelta(days=i+1+holiday)
if day.date().isoformat() in df.index:
trading_days.append(day)
break
else:
holiday += 1
trading_days.reverse() # oldest date is put to head
return trading_days
def build_bar(dateTime, ds):
prices = ds.getPriceDS()
volumes = ds.getVolumeDS()
amounts = ds.getAmountDS()
open_ = float(prices[0])
high = float(max(prices))
low = float(min(prices))
close = float(prices[-1])
volume = sum(int(v) for v in volumes)
amount = sum(float(a) for a in amounts)
return bar.BasicBar(dateTime, open_, high, low, close, volume, None, Frequency.DAY, amount)
class TuSharePollingThread(threading.Thread):
# Not using xignite polling thread is because two underscores functions can't be override, e.g. __wait()
TUSHARE_INQUERY_PERIOD = 3 # tushare read period, default is 3s
def __init__(self, identifiers):
super(TuSharePollingThread, self).__init__()
self._identifiers = identifiers
self._tickDSDict = {}
self._last_quotation_time = {}
for identifier in self._identifiers:
self._tickDSDict[identifier] = TickDataSeries()
self._last_quotation_time[identifier] = None
self.__stopped = False
def __wait(self):
# first reset ticks info in one cycle, maybe we need save it if NO quotation in this period
for identifier in self._identifiers:
self._tickDSDict[identifier].reset()
nextCall = self.getNextCallDateTime()
while not self.__stopped and utcnow() < nextCall:
start_time = datetime.datetime.now()
self.get_tushare_tick_data()
end_time = datetime.datetime.now()
time_diff = (end_time - start_time).seconds
if time_diff < TuSharePollingThread.TUSHARE_INQUERY_PERIOD:
time.sleep(TuSharePollingThread.TUSHARE_INQUERY_PERIOD - time_diff)
def valid_tick_data(self, identifier, tick_info):
if self._last_quotation_time[identifier] is None or \
self._last_quotation_time[identifier] < tick_info.time:
self._last_quotation_time[identifier] = tick_info.time
else:
return False
return float(tick_info.pre_close) * 0.9 <= float(tick_info.price) <= float(tick_info.pre_close) * 1.1
def get_tushare_tick_data(self):
try:
df = ts.get_realtime_quotes(self._identifiers)
for index, identifier in enumerate(self._identifiers):
tick_info = df.ix[index]
if self.valid_tick_data(identifier, tick_info):
# tushare use unicode type, another way is convert it to int/float here. refer to build_bar
self._tickDSDict[identifier].append(tick_info.price, tick_info.volume, tick_info.amount,
tick_info.time)
except Exception, e:
logger.error("Tushare polling exception", exc_info=e)
def stop(self):
self.__stopped = True
def stopped(self):
return self.__stopped
def run(self):
logger.debug("Thread started.")
while not self.__stopped:
self.__wait()
if not self.__stopped:
try:
self.doCall()
except Exception, e:
logger.critical("Unhandled exception", exc_info=e)
logger.debug("Thread finished.")
# Must return a non-naive datetime.
def getNextCallDateTime(self):
raise NotImplementedError()
def doCall(self):
raise NotImplementedError()
class TushareBarFeedThread(TuSharePollingThread):
# Events
ON_BARS = 1
def __init__(self, queue, identifiers, frequency):
super(TushareBarFeedThread, self).__init__(identifiers)
self.__queue = queue
self.__frequency = frequency
self.__updateNextBarClose()
def __updateNextBarClose(self):
self.__nextBarClose = resamplebase.build_range(utcnow(), self.__frequency).getEnding()
def getNextCallDateTime(self):
return self.__nextBarClose
def doCall(self):
endDateTime = self.__nextBarClose
self.__updateNextBarClose()
bar_dict = {}
for identifier in self._identifiers:
try:
if not self._tickDSDict[identifier].empty():
bar_dict[identifier] = build_bar(to_market_datetime(endDateTime), self._tickDSDict[identifier])
except Exception, e:
logger.error(e)
if len(bar_dict):
bars = bar.Bars(bar_dict)
self.__queue.put((TushareBarFeedThread.ON_BARS, bars))
def get_bar_list(df, frequency, date=None):
bar_list = []
end_time = df.ix[0].time
if date is None:
date = datetime.datetime.now()
slice_start_time = to_market_datetime(datetime.datetime(date.year, date.month , date.day, 9, 30, 0))
while slice_start_time.strftime("%H:%M:%S") < end_time:
slice_end_time = slice_start_time + datetime.timedelta(seconds=frequency)
ticks_slice = df.ix[(df.time < slice_end_time.strftime("%H:%M:%S")) &
(df.time >= slice_start_time.strftime("%H:%M:%S"))]
if not ticks_slice.empty:
open_ = ticks_slice.price.get_values()[-1]
high = max(ticks_slice.price)
low = min(ticks_slice.price)
close = ticks_slice.price.get_values()[0]
volume = sum(ticks_slice.volume)
amount = sum(ticks_slice.amount)
#mid original code give 10 arguments,but the function takes exactly 9
#bar_list.append(bar.BasicBar(slice_start_time, open_, high, low,close,volume, 0, frequency, amount))
bar_list.append(bar.BasicBar(slice_start_time, open_, high, low,close,volume, 0, frequency))
else:
bar_list.append(None)
slice_start_time = slice_end_time
return bar_list
class TuShareLiveFeed(barfeed.BaseBarFeed):
QUEUE_TIMEOUT = 0.01
def __init__(self, identifiers, frequency, maxLen=dataseries.DEFAULT_MAX_LEN, replayDays=-1):
barfeed.BaseBarFeed.__init__(self, frequency, maxLen)
if not isinstance(identifiers, list):
raise Exception("identifiers must be a list")
self.__identifiers = identifiers
self.__frequency = frequency
self.__queue = Queue.Queue()
self.__fill_today_history_bars(replayDays) # should run before polling thread start
self.__thread = TushareBarFeedThread(self.__queue, identifiers, frequency)
for instrument in identifiers:
self.registerInstrument(instrument)
######################################################################
# observer.Subject interface
def start(self):
if self.__thread.is_alive():
raise Exception("Already strated")
# Start the thread that runs the client.
self.__thread.start()
def stop(self):
self.__thread.stop()
def join(self):
if self.__thread.is_alive():
self.__thread.join()
def eof(self):
return self.__thread.stopped()
def peekDateTime(self):
return None
######################################################################
# barfeed.BaseBarFeed interface
def getCurrentDateTime(self):
return utcnow()
def barsHaveAdjClose(self):
return False
def getNextBars(self):
ret = None
try:
eventType, eventData = self.__queue.get(True, TuShareLiveFeed.QUEUE_TIMEOUT)
if eventType == TushareBarFeedThread.ON_BARS:
ret = eventData
else:
logger.error("Invalid event received: %s - %s" % (eventType, eventData))
except Queue.Empty:
pass
return ret
######################################################################
# TuShareLiveFeed own interface
def _fill_today_bars(self):
today = datetime.date.today().isoformat()
if is_holiday(today): # do nothing if holiday
return
today_bars = {}
for identifier in self.__identifiers:
try:
df = ts.get_today_ticks(identifier)
today_bars[identifier] = get_bar_list(df, self.__frequency, None)
except Exception, e:
logger.error(e)
self.__fill_bars(today_bars)
def __fill_bars(self, bars_dict):
for index, value in enumerate(bars_dict[self.__identifiers[0]]):
bar_dict = dict()
for identifier in self.__identifiers:
if bars_dict[identifier][index] is not None:
bar_dict[identifier] = bars_dict[identifier][index]
if len(bar_dict):
bars = bar.Bars(bar_dict)
self.__queue.put((TushareBarFeedThread.ON_BARS, bars))
def _fill_history_bars(self, replay_days):
now = datetime.datetime.now()
for day in get_trading_days(now, replay_days):
bars_dict = {}
for identifier in self.__identifiers:
df = ts.get_tick_data(identifier, date=day.date().isoformat())
bars_dict[identifier] = get_bar_list(df, self.__frequency, day)
self.__fill_bars(bars_dict)
def __fill_today_history_bars(self, replayDays):
if replayDays < 0: # only allow -1 and >=0 integer value
replayDays = -1
if replayDays == -1:
pass
elif replayDays == 0: # replay today's quotation
self._fill_today_bars()
else:
self._fill_history_bars(replayDays)
self._fill_today_bars()
if __name__ == '__main__':
liveFeed = TuShareLiveFeed(['000581'], Frequency.MINUTE, dataseries.DEFAULT_MAX_LEN, 2)
liveFeed.start()
while not liveFeed.eof():
bars = liveFeed.getNextBars()
if bars is not None:
print bars['000581'].getHigh(), bars['000581'].getDateTime()
# test/
|
|
# -*- coding: utf-8 -*-
"""Maximum flow algorithms test suite.
"""
from nose.tools import *
import networkx as nx
from networkx.algorithms.flow import build_flow_dict, build_residual_network
from networkx.algorithms.flow import boykov_kolmogorov
from networkx.algorithms.flow import edmonds_karp
from networkx.algorithms.flow import preflow_push
from networkx.algorithms.flow import shortest_augmenting_path
from networkx.algorithms.flow import dinitz
flow_funcs = [boykov_kolmogorov, dinitz, edmonds_karp, preflow_push, shortest_augmenting_path]
max_min_funcs = [nx.maximum_flow, nx.minimum_cut]
flow_value_funcs = [nx.maximum_flow_value, nx.minimum_cut_value]
interface_funcs = sum([max_min_funcs, flow_value_funcs], [])
all_funcs = sum([flow_funcs, interface_funcs], [])
msg = "Assertion failed in function: {0}"
msgi = "Assertion failed in function: {0} in interface {1}"
def compute_cutset(G, partition):
reachable, non_reachable = partition
cutset = set()
for u, nbrs in ((n, G[n]) for n in reachable):
cutset.update((u, v) for v in nbrs if v in non_reachable)
return cutset
def validate_flows(G, s, t, flowDict, solnValue, capacity, flow_func):
assert_equal(set(G), set(flowDict), msg=msg.format(flow_func.__name__))
for u in G:
assert_equal(set(G[u]), set(flowDict[u]),
msg=msg.format(flow_func.__name__))
excess = dict((u, 0) for u in flowDict)
for u in flowDict:
for v, flow in flowDict[u].items():
if capacity in G[u][v]:
ok_(flow <= G[u][v][capacity])
ok_(flow >= 0, msg=msg.format(flow_func.__name__))
excess[u] -= flow
excess[v] += flow
for u, exc in excess.items():
if u == s:
assert_equal(exc, -solnValue, msg=msg.format(flow_func.__name__))
elif u == t:
assert_equal(exc, solnValue, msg=msg.format(flow_func.__name__))
else:
assert_equal(exc, 0, msg=msg.format(flow_func.__name__))
def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func):
assert_true(all(n in G for n in partition[0]),
msg=msg.format(flow_func.__name__))
assert_true(all(n in G for n in partition[1]),
msg=msg.format(flow_func.__name__))
cutset = compute_cutset(G, partition)
assert_true(all(G.has_edge(u, v) for (u, v) in cutset),
msg=msg.format(flow_func.__name__))
assert_equal(solnValue, sum(G[u][v][capacity] for (u, v) in cutset),
msg=msg.format(flow_func.__name__))
H = G.copy()
H.remove_edges_from(cutset)
if not G.is_directed():
assert_false(nx.is_connected(H), msg=msg.format(flow_func.__name__))
else:
assert_false(nx.is_strongly_connected(H),
msg=msg.format(flow_func.__name__))
def compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity='capacity'):
for flow_func in flow_funcs:
R = flow_func(G, s, t, capacity)
# Test both legacy and new implementations.
flow_value = R.graph['flow_value']
flow_dict = build_flow_dict(G, R)
assert_equal(flow_value, solnValue, msg=msg.format(flow_func.__name__))
validate_flows(G, s, t, flow_dict, solnValue, capacity, flow_func)
# Minimum cut
cut_value, partition = nx.minimum_cut(G, s, t, capacity=capacity,
flow_func=flow_func)
validate_cuts(G, s, t, solnValue, partition, capacity, flow_func)
class TestMaxflowMinCutCommon:
def test_graph1(self):
# Trivial undirected graph
G = nx.Graph()
G.add_edge(1,2, capacity = 1.0)
solnFlows = {1: {2: 1.0},
2: {1: 1.0}}
compare_flows_and_cuts(G, 1, 2, solnFlows, 1.0)
def test_graph2(self):
# A more complex undirected graph
# adapted from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow
G = nx.Graph()
G.add_edge('x','a', capacity = 3.0)
G.add_edge('x','b', capacity = 1.0)
G.add_edge('a','c', capacity = 3.0)
G.add_edge('b','c', capacity = 5.0)
G.add_edge('b','d', capacity = 4.0)
G.add_edge('d','e', capacity = 2.0)
G.add_edge('c','y', capacity = 2.0)
G.add_edge('e','y', capacity = 3.0)
H = {'x': {'a': 3, 'b': 1},
'a': {'c': 3, 'x': 3},
'b': {'c': 1, 'd': 2, 'x': 1},
'c': {'a': 3, 'b': 1, 'y': 2},
'd': {'b': 2, 'e': 2},
'e': {'d': 2, 'y': 2},
'y': {'c': 2, 'e': 2}}
compare_flows_and_cuts(G, 'x', 'y', H, 4.0)
def test_digraph1(self):
# The classic directed graph example
G = nx.DiGraph()
G.add_edge('a','b', capacity = 1000.0)
G.add_edge('a','c', capacity = 1000.0)
G.add_edge('b','c', capacity = 1.0)
G.add_edge('b','d', capacity = 1000.0)
G.add_edge('c','d', capacity = 1000.0)
H = {'a': {'b': 1000.0, 'c': 1000.0},
'b': {'c': 0, 'd': 1000.0},
'c': {'d': 1000.0},
'd': {}}
compare_flows_and_cuts(G, 'a', 'd', H, 2000.0)
def test_digraph2(self):
# An example in which some edges end up with zero flow.
G = nx.DiGraph()
G.add_edge('s', 'b', capacity = 2)
G.add_edge('s', 'c', capacity = 1)
G.add_edge('c', 'd', capacity = 1)
G.add_edge('d', 'a', capacity = 1)
G.add_edge('b', 'a', capacity = 2)
G.add_edge('a', 't', capacity = 2)
H = {'s': {'b': 2, 'c': 0},
'c': {'d': 0},
'd': {'a': 0},
'b': {'a': 2},
'a': {'t': 2},
't': {}}
compare_flows_and_cuts(G, 's', 't', H, 2)
def test_digraph3(self):
# A directed graph example from Cormen et al.
G = nx.DiGraph()
G.add_edge('s','v1', capacity = 16.0)
G.add_edge('s','v2', capacity = 13.0)
G.add_edge('v1','v2', capacity = 10.0)
G.add_edge('v2','v1', capacity = 4.0)
G.add_edge('v1','v3', capacity = 12.0)
G.add_edge('v3','v2', capacity = 9.0)
G.add_edge('v2','v4', capacity = 14.0)
G.add_edge('v4','v3', capacity = 7.0)
G.add_edge('v3','t', capacity = 20.0)
G.add_edge('v4','t', capacity = 4.0)
H = {'s': {'v1': 12.0, 'v2': 11.0},
'v2': {'v1': 0, 'v4': 11.0},
'v1': {'v2': 0, 'v3': 12.0},
'v3': {'v2': 0, 't': 19.0},
'v4': {'v3': 7.0, 't': 4.0},
't': {}}
compare_flows_and_cuts(G, 's', 't', H, 23.0)
def test_digraph4(self):
# A more complex directed graph
# from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow
G = nx.DiGraph()
G.add_edge('x','a', capacity = 3.0)
G.add_edge('x','b', capacity = 1.0)
G.add_edge('a','c', capacity = 3.0)
G.add_edge('b','c', capacity = 5.0)
G.add_edge('b','d', capacity = 4.0)
G.add_edge('d','e', capacity = 2.0)
G.add_edge('c','y', capacity = 2.0)
G.add_edge('e','y', capacity = 3.0)
H = {'x': {'a': 2.0, 'b': 1.0},
'a': {'c': 2.0},
'b': {'c': 0, 'd': 1.0},
'c': {'y': 2.0},
'd': {'e': 1.0},
'e': {'y': 1.0},
'y': {}}
compare_flows_and_cuts(G, 'x', 'y', H, 3.0)
def test_wikipedia_dinitz_example(self):
# Nice example from https://en.wikipedia.org/wiki/Dinic's_algorithm
G = nx.DiGraph()
G.add_edge('s', 1, capacity=10)
G.add_edge('s', 2, capacity=10)
G.add_edge(1, 3, capacity=4)
G.add_edge(1, 4, capacity=8)
G.add_edge(1, 2, capacity=2)
G.add_edge(2, 4, capacity=9)
G.add_edge(3, 't', capacity=10)
G.add_edge(4, 3, capacity=6)
G.add_edge(4, 't', capacity=10)
solnFlows = {1: {2: 0, 3: 4, 4: 6},
2: {4: 9},
3: {'t': 9},
4: {3: 5, 't': 10},
's': {1: 10, 2: 9},
't': {}}
compare_flows_and_cuts(G, 's', 't', solnFlows, 19)
def test_optional_capacity(self):
# Test optional capacity parameter.
G = nx.DiGraph()
G.add_edge('x','a', spam = 3.0)
G.add_edge('x','b', spam = 1.0)
G.add_edge('a','c', spam = 3.0)
G.add_edge('b','c', spam = 5.0)
G.add_edge('b','d', spam = 4.0)
G.add_edge('d','e', spam = 2.0)
G.add_edge('c','y', spam = 2.0)
G.add_edge('e','y', spam = 3.0)
solnFlows = {'x': {'a': 2.0, 'b': 1.0},
'a': {'c': 2.0},
'b': {'c': 0, 'd': 1.0},
'c': {'y': 2.0},
'd': {'e': 1.0},
'e': {'y': 1.0},
'y': {}}
solnValue = 3.0
s = 'x'
t = 'y'
compare_flows_and_cuts(G, s, t, solnFlows, solnValue, capacity = 'spam')
def test_digraph_infcap_edges(self):
# DiGraph with infinite capacity edges
G = nx.DiGraph()
G.add_edge('s', 'a')
G.add_edge('s', 'b', capacity = 30)
G.add_edge('a', 'c', capacity = 25)
G.add_edge('b', 'c', capacity = 12)
G.add_edge('a', 't', capacity = 60)
G.add_edge('c', 't')
H = {'s': {'a': 85, 'b': 12},
'a': {'c': 25, 't': 60},
'b': {'c': 12},
'c': {'t': 37},
't': {}}
compare_flows_and_cuts(G, 's', 't', H, 97)
# DiGraph with infinite capacity digon
G = nx.DiGraph()
G.add_edge('s', 'a', capacity = 85)
G.add_edge('s', 'b', capacity = 30)
G.add_edge('a', 'c')
G.add_edge('c', 'a')
G.add_edge('b', 'c', capacity = 12)
G.add_edge('a', 't', capacity = 60)
G.add_edge('c', 't', capacity = 37)
H = {'s': {'a': 85, 'b': 12},
'a': {'c': 25, 't': 60},
'c': {'a': 0, 't': 37},
'b': {'c': 12},
't': {}}
compare_flows_and_cuts(G, 's', 't', H, 97)
def test_digraph_infcap_path(self):
# Graph with infinite capacity (s, t)-path
G = nx.DiGraph()
G.add_edge('s', 'a')
G.add_edge('s', 'b', capacity = 30)
G.add_edge('a', 'c')
G.add_edge('b', 'c', capacity = 12)
G.add_edge('a', 't', capacity = 60)
G.add_edge('c', 't')
for flow_func in all_funcs:
assert_raises(nx.NetworkXUnbounded,
flow_func, G, 's', 't')
def test_graph_infcap_edges(self):
# Undirected graph with infinite capacity edges
G = nx.Graph()
G.add_edge('s', 'a')
G.add_edge('s', 'b', capacity = 30)
G.add_edge('a', 'c', capacity = 25)
G.add_edge('b', 'c', capacity = 12)
G.add_edge('a', 't', capacity = 60)
G.add_edge('c', 't')
H = {'s': {'a': 85, 'b': 12},
'a': {'c': 25, 's': 85, 't': 60},
'b': {'c': 12, 's': 12},
'c': {'a': 25, 'b': 12, 't': 37},
't': {'a': 60, 'c': 37}}
compare_flows_and_cuts(G, 's', 't', H, 97)
def test_digraph4(self):
# From ticket #429 by mfrasca.
G = nx.DiGraph()
G.add_edge('s', 'a', capacity = 2)
G.add_edge('s', 'b', capacity = 2)
G.add_edge('a', 'b', capacity = 5)
G.add_edge('a', 't', capacity = 1)
G.add_edge('b', 'a', capacity = 1)
G.add_edge('b', 't', capacity = 3)
flowSoln = {'a': {'b': 1, 't': 1},
'b': {'a': 0, 't': 3},
's': {'a': 2, 'b': 2},
't': {}}
compare_flows_and_cuts(G, 's', 't', flowSoln, 4)
def test_disconnected(self):
G = nx.Graph()
G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity')
G.remove_node(1)
assert_equal(nx.maximum_flow_value(G,0,3), 0)
flowSoln = {0: {}, 2: {3: 0}, 3: {2: 0}}
compare_flows_and_cuts(G, 0, 3, flowSoln, 0)
def test_source_target_not_in_graph(self):
G = nx.Graph()
G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity')
G.remove_node(0)
for flow_func in all_funcs:
assert_raises(nx.NetworkXError, flow_func, G, 0, 3)
G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity')
G.remove_node(3)
for flow_func in all_funcs:
assert_raises(nx.NetworkXError, flow_func, G, 0, 3)
def test_source_target_coincide(self):
G = nx.Graph()
G.add_node(0)
for flow_func in all_funcs:
assert_raises(nx.NetworkXError, flow_func, G, 0, 0)
def test_multigraphs_raise(self):
G = nx.MultiGraph()
M = nx.MultiDiGraph()
G.add_edges_from([(0, 1), (1, 0)], capacity=True)
for flow_func in all_funcs:
assert_raises(nx.NetworkXError, flow_func, G, 0, 0)
class TestMaxFlowMinCutInterface:
def setup(self):
G = nx.DiGraph()
G.add_edge('x','a', capacity = 3.0)
G.add_edge('x','b', capacity = 1.0)
G.add_edge('a','c', capacity = 3.0)
G.add_edge('b','c', capacity = 5.0)
G.add_edge('b','d', capacity = 4.0)
G.add_edge('d','e', capacity = 2.0)
G.add_edge('c','y', capacity = 2.0)
G.add_edge('e','y', capacity = 3.0)
self.G = G
H = nx.DiGraph()
H.add_edge(0, 1, capacity = 1.0)
H.add_edge(1, 2, capacity = 1.0)
self.H = H
def test_flow_func_not_callable(self):
elements = ['this_should_be_callable', 10, set([1,2,3])]
G = nx.Graph()
G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)], weight='capacity')
for flow_func in interface_funcs:
for element in elements:
assert_raises(nx.NetworkXError,
flow_func, G, 0, 1, flow_func=element)
assert_raises(nx.NetworkXError,
flow_func, G, 0, 1, flow_func=element)
def test_flow_func_parameters(self):
G = self.G
fv = 3.0
for interface_func in interface_funcs:
for flow_func in flow_funcs:
result = interface_func(G, 'x', 'y', flow_func=flow_func)
if interface_func in max_min_funcs:
result = result[0]
assert_equal(fv, result, msg=msgi.format(flow_func.__name__,
interface_func.__name__))
def test_minimum_cut_no_cutoff(self):
G = self.G
for flow_func in flow_funcs:
assert_raises(nx.NetworkXError, nx.minimum_cut, G, 'x', 'y',
flow_func=flow_func, cutoff=1.0)
assert_raises(nx.NetworkXError, nx.minimum_cut_value, G, 'x', 'y',
flow_func=flow_func, cutoff=1.0)
def test_kwargs(self):
G = self.H
fv = 1.0
to_test = (
(shortest_augmenting_path, dict(two_phase=True)),
(preflow_push, dict(global_relabel_freq=5)),
)
for interface_func in interface_funcs:
for flow_func, kwargs in to_test:
result = interface_func(G, 0, 2, flow_func=flow_func, **kwargs)
if interface_func in max_min_funcs:
result = result[0]
assert_equal(fv, result, msg=msgi.format(flow_func.__name__,
interface_func.__name__))
def test_kwargs_default_flow_func(self):
G = self.H
for interface_func in interface_funcs:
assert_raises(nx.NetworkXError, interface_func,
G, 0, 1, global_relabel_freq=2)
def test_reusing_residual(self):
G = self.G
fv = 3.0
s, t = 'x', 'y'
R = build_residual_network(G, 'capacity')
for interface_func in interface_funcs:
for flow_func in flow_funcs:
for i in range(3):
result = interface_func(G, 'x', 'y', flow_func=flow_func,
residual=R)
if interface_func in max_min_funcs:
result = result[0]
assert_equal(fv, result,
msg=msgi.format(flow_func.__name__,
interface_func.__name__))
# Tests specific to one algorithm
def test_preflow_push_global_relabel_freq():
G = nx.DiGraph()
G.add_edge(1, 2, capacity=1)
R = preflow_push(G, 1, 2, global_relabel_freq=None)
assert_equal(R.graph['flow_value'], 1)
assert_raises(nx.NetworkXError, preflow_push, G, 1, 2,
global_relabel_freq=-1)
def test_preflow_push_makes_enough_space():
#From ticket #1542
G = nx.DiGraph()
nx.add_path(G, [0, 1, 3], capacity=1)
nx.add_path(G, [1, 2, 3], capacity=1)
R = preflow_push(G, 0, 3, value_only=False)
assert_equal(R.graph['flow_value'], 1)
def test_shortest_augmenting_path_two_phase():
k = 5
p = 1000
G = nx.DiGraph()
for i in range(k):
G.add_edge('s', (i, 0), capacity=1)
nx.add_path(G, ((i, j) for j in range(p)), capacity=1)
G.add_edge((i, p - 1), 't', capacity=1)
R = shortest_augmenting_path(G, 's', 't', two_phase=True)
assert_equal(R.graph['flow_value'], k)
R = shortest_augmenting_path(G, 's', 't', two_phase=False)
assert_equal(R.graph['flow_value'], k)
class TestCutoff:
def test_cutoff(self):
k = 5
p = 1000
G = nx.DiGraph()
for i in range(k):
G.add_edge('s', (i, 0), capacity=2)
nx.add_path(G, ((i, j) for j in range(p)), capacity=2)
G.add_edge((i, p - 1), 't', capacity=2)
R = shortest_augmenting_path(G, 's', 't', two_phase=True, cutoff=k)
ok_(k <= R.graph['flow_value'] <= 2 * k)
R = shortest_augmenting_path(G, 's', 't', two_phase=False, cutoff=k)
ok_(k <= R.graph['flow_value'] <= 2 * k)
R = edmonds_karp(G, 's', 't', cutoff=k)
ok_(k <= R.graph['flow_value'] <= 2 * k)
def test_complete_graph_cutoff(self):
G = nx.complete_graph(5)
nx.set_edge_attributes(G, 'capacity',
dict(((u, v), 1) for u, v in G.edges()))
for flow_func in [shortest_augmenting_path, edmonds_karp]:
for cutoff in [3, 2, 1]:
result = nx.maximum_flow_value(G, 0, 4, flow_func=flow_func,
cutoff=cutoff)
assert_equal(cutoff, result,
msg="cutoff error in {0}".format(flow_func.__name__))
|
|
#!/usr/bin/env python3
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'bitsend_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
# Regexp to check for Bitsend addresses
ADDRESS_REGEXP = re.compile('([13]|bc1)[a-zA-Z0-9]{30,}')
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
sys.exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
sys.exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def contains_bitsend_addr(text, errors):
if text != None and ADDRESS_REGEXP.search(text) != None:
errors.append('Translation "%s" contains a bitsend address. This will be removed.' % (text))
return True
return False
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus) and not contains_bitsend_addr(translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
from collections.abc import Iterable
import json
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from requests import Response
from requests import Request, PreparedRequest
from requests.sessions import Session
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.compute_v1.services.node_types import NodeTypesClient
from google.cloud.compute_v1.services.node_types import pagers
from google.cloud.compute_v1.services.node_types import transports
from google.cloud.compute_v1.types import compute
from google.oauth2 import service_account
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert NodeTypesClient._get_default_mtls_endpoint(None) is None
assert NodeTypesClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
NodeTypesClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
NodeTypesClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
NodeTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert NodeTypesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class,transport_name", [(NodeTypesClient, "rest"),])
def test_node_types_client_from_service_account_info(client_class, transport_name):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info, transport=transport_name)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == (
"compute.googleapis.com{}".format(":443")
if transport_name in ["grpc", "grpc_asyncio"]
else "https://{}".format("compute.googleapis.com")
)
@pytest.mark.parametrize(
"transport_class,transport_name", [(transports.NodeTypesRestTransport, "rest"),]
)
def test_node_types_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class,transport_name", [(NodeTypesClient, "rest"),])
def test_node_types_client_from_service_account_file(client_class, transport_name):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json(
"dummy/file/path.json", transport=transport_name
)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == (
"compute.googleapis.com{}".format(":443")
if transport_name in ["grpc", "grpc_asyncio"]
else "https://{}".format("compute.googleapis.com")
)
def test_node_types_client_get_transport_class():
transport = NodeTypesClient.get_transport_class()
available_transports = [
transports.NodeTypesRestTransport,
]
assert transport in available_transports
transport = NodeTypesClient.get_transport_class("rest")
assert transport == transports.NodeTypesRestTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(NodeTypesClient, transports.NodeTypesRestTransport, "rest"),],
)
@mock.patch.object(
NodeTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTypesClient)
)
def test_node_types_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(NodeTypesClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(NodeTypesClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class(transport=transport_name)
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class(transport=transport_name)
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(NodeTypesClient, transports.NodeTypesRestTransport, "rest", "true"),
(NodeTypesClient, transports.NodeTypesRestTransport, "rest", "false"),
],
)
@mock.patch.object(
NodeTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTypesClient)
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_node_types_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class", [NodeTypesClient])
@mock.patch.object(
NodeTypesClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeTypesClient)
)
def test_node_types_client_get_mtls_endpoint_and_cert_source(client_class):
mock_client_cert_source = mock.Mock()
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source == mock_client_cert_source
# Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
mock_client_cert_source = mock.Mock()
mock_api_endpoint = "foo"
options = client_options.ClientOptions(
client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint
)
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(
options
)
assert api_endpoint == mock_api_endpoint
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_ENDPOINT
assert cert_source is None
# Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=mock_client_cert_source,
):
(
api_endpoint,
cert_source,
) = client_class.get_mtls_endpoint_and_cert_source()
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(NodeTypesClient, transports.NodeTypesRestTransport, "rest"),],
)
def test_node_types_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[(NodeTypesClient, transports.NodeTypesRestTransport, "rest", None),],
)
def test_node_types_client_client_options_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"request_type", [compute.AggregatedListNodeTypesRequest, dict,]
)
def test_aggregated_list_rest(request_type):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.NodeTypeAggregatedList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
unreachables=["unreachables_value"],
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeTypeAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.AggregatedListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
assert response.unreachables == ["unreachables_value"]
def test_aggregated_list_rest_required_fields(
request_type=compute.AggregatedListNodeTypesRequest,
):
transport_class = transports.NodeTypesRestTransport
request_init = {}
request_init["project"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).aggregated_list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).aggregated_list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
(
"filter",
"include_all_scopes",
"max_results",
"order_by",
"page_token",
"return_partial_success",
)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.NodeTypeAggregatedList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeTypeAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.aggregated_list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_aggregated_list_rest_unset_required_fields():
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.aggregated_list._get_unset_required_fields({})
assert set(unset_fields) == (
set(
(
"filter",
"includeAllScopes",
"maxResults",
"orderBy",
"pageToken",
"returnPartialSuccess",
)
)
& set(("project",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_aggregated_list_rest_interceptors(null_interceptor):
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None if null_interceptor else transports.NodeTypesRestInterceptor(),
)
client = NodeTypesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.NodeTypesRestInterceptor, "post_aggregated_list"
) as post, mock.patch.object(
transports.NodeTypesRestInterceptor, "pre_aggregated_list"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.NodeTypeAggregatedList.to_json(
compute.NodeTypeAggregatedList()
)
request = compute.AggregatedListNodeTypesRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.NodeTypeAggregatedList
client.aggregated_list(
request, metadata=[("key", "val"), ("cephalopod", "squid"),]
)
pre.assert_called_once()
post.assert_called_once()
def test_aggregated_list_rest_bad_request(
transport: str = "rest", request_type=compute.AggregatedListNodeTypesRequest
):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.aggregated_list(request)
def test_aggregated_list_rest_flattened():
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.NodeTypeAggregatedList()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1"}
# get truthy value for each flattened field
mock_args = dict(project="project_value",)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeTypeAggregatedList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.aggregated_list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/aggregated/nodeTypes"
% client.transport._host,
args[1],
)
def test_aggregated_list_rest_flattened_error(transport: str = "rest"):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.aggregated_list(
compute.AggregatedListNodeTypesRequest(), project="project_value",
)
def test_aggregated_list_rest_pager(transport: str = "rest"):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.NodeTypeAggregatedList(
items={
"a": compute.NodeTypesScopedList(),
"b": compute.NodeTypesScopedList(),
"c": compute.NodeTypesScopedList(),
},
next_page_token="abc",
),
compute.NodeTypeAggregatedList(items={}, next_page_token="def",),
compute.NodeTypeAggregatedList(
items={"g": compute.NodeTypesScopedList(),}, next_page_token="ghi",
),
compute.NodeTypeAggregatedList(
items={
"h": compute.NodeTypesScopedList(),
"i": compute.NodeTypesScopedList(),
},
),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.NodeTypeAggregatedList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1"}
pager = client.aggregated_list(request=sample_request)
assert isinstance(pager.get("a"), compute.NodeTypesScopedList)
assert pager.get("h") is None
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, tuple) for i in results)
for result in results:
assert isinstance(result, tuple)
assert tuple(type(t) for t in result) == (str, compute.NodeTypesScopedList)
assert pager.get("a") is None
assert isinstance(pager.get("h"), compute.NodeTypesScopedList)
pages = list(client.aggregated_list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.parametrize("request_type", [compute.GetNodeTypeRequest, dict,])
def test_get_rest(request_type):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "node_type": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.NodeType(
cpu_platform="cpu_platform_value",
creation_timestamp="creation_timestamp_value",
description="description_value",
guest_cpus=1090,
id=205,
kind="kind_value",
local_ssd_gb=1244,
memory_mb=967,
name="name_value",
self_link="self_link_value",
zone="zone_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeType.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
# Establish that the response is the type that we expect.
assert isinstance(response, compute.NodeType)
assert response.cpu_platform == "cpu_platform_value"
assert response.creation_timestamp == "creation_timestamp_value"
assert response.description == "description_value"
assert response.guest_cpus == 1090
assert response.id == 205
assert response.kind == "kind_value"
assert response.local_ssd_gb == 1244
assert response.memory_mb == 967
assert response.name == "name_value"
assert response.self_link == "self_link_value"
assert response.zone == "zone_value"
def test_get_rest_required_fields(request_type=compute.GetNodeTypeRequest):
transport_class = transports.NodeTypesRestTransport
request_init = {}
request_init["node_type"] = ""
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["nodeType"] = "node_type_value"
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).get._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "nodeType" in jsonified_request
assert jsonified_request["nodeType"] == "node_type_value"
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.NodeType()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeType.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.get(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_get_rest_unset_required_fields():
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.get._get_unset_required_fields({})
assert set(unset_fields) == (set(()) & set(("nodeType", "project", "zone",)))
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_get_rest_interceptors(null_interceptor):
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None if null_interceptor else transports.NodeTypesRestInterceptor(),
)
client = NodeTypesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.NodeTypesRestInterceptor, "post_get"
) as post, mock.patch.object(
transports.NodeTypesRestInterceptor, "pre_get"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.NodeType.to_json(compute.NodeType())
request = compute.GetNodeTypeRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.NodeType
client.get(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
pre.assert_called_once()
post.assert_called_once()
def test_get_rest_bad_request(
transport: str = "rest", request_type=compute.GetNodeTypeRequest
):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2", "node_type": "sample3"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.get(request)
def test_get_rest_flattened():
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.NodeType()
# get arguments that satisfy an http rule for this method
sample_request = {
"project": "sample1",
"zone": "sample2",
"node_type": "sample3",
}
# get truthy value for each flattened field
mock_args = dict(
project="project_value", zone="zone_value", node_type="node_type_value",
)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeType.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.get(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/zones/{zone}/nodeTypes/{node_type}"
% client.transport._host,
args[1],
)
def test_get_rest_flattened_error(transport: str = "rest"):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get(
compute.GetNodeTypeRequest(),
project="project_value",
zone="zone_value",
node_type="node_type_value",
)
def test_get_rest_error():
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest"
)
@pytest.mark.parametrize("request_type", [compute.ListNodeTypesRequest, dict,])
def test_list_rest(request_type):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.NodeTypeList(
id="id_value",
kind="kind_value",
next_page_token="next_page_token_value",
self_link="self_link_value",
)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeTypeList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPager)
assert response.id == "id_value"
assert response.kind == "kind_value"
assert response.next_page_token == "next_page_token_value"
assert response.self_link == "self_link_value"
def test_list_rest_required_fields(request_type=compute.ListNodeTypesRequest):
transport_class = transports.NodeTypesRestTransport
request_init = {}
request_init["project"] = ""
request_init["zone"] = ""
request = request_type(request_init)
jsonified_request = json.loads(
request_type.to_json(
request, including_default_value_fields=False, use_integers_for_enums=False
)
)
# verify fields with default values are dropped
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
jsonified_request.update(unset_fields)
# verify required fields with default values are now present
jsonified_request["project"] = "project_value"
jsonified_request["zone"] = "zone_value"
unset_fields = transport_class(
credentials=ga_credentials.AnonymousCredentials()
).list._get_unset_required_fields(jsonified_request)
# Check that path parameters and body parameters are not mixing in.
assert not set(unset_fields) - set(
("filter", "max_results", "order_by", "page_token", "return_partial_success",)
)
jsonified_request.update(unset_fields)
# verify required fields with non-default values are left alone
assert "project" in jsonified_request
assert jsonified_request["project"] == "project_value"
assert "zone" in jsonified_request
assert jsonified_request["zone"] == "zone_value"
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
request = request_type(request_init)
# Designate an appropriate value for the returned response.
return_value = compute.NodeTypeList()
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# We need to mock transcode() because providing default values
# for required fields will fail the real version if the http_options
# expect actual values for those fields.
with mock.patch.object(path_template, "transcode") as transcode:
# A uri without fields and an empty body will force all the
# request fields to show up in the query_params.
transcode_result = {
"uri": "v1/sample_method",
"method": "get",
"query_params": request_init,
}
transcode.return_value = transcode_result
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeTypeList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
response = client.list(request)
expected_params = []
actual_params = req.call_args.kwargs["params"]
assert expected_params == actual_params
def test_list_rest_unset_required_fields():
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials
)
unset_fields = transport.list._get_unset_required_fields({})
assert set(unset_fields) == (
set(("filter", "maxResults", "orderBy", "pageToken", "returnPartialSuccess",))
& set(("project", "zone",))
)
@pytest.mark.parametrize("null_interceptor", [True, False])
def test_list_rest_interceptors(null_interceptor):
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
interceptor=None if null_interceptor else transports.NodeTypesRestInterceptor(),
)
client = NodeTypesClient(transport=transport)
with mock.patch.object(
type(client.transport._session), "request"
) as req, mock.patch.object(
path_template, "transcode"
) as transcode, mock.patch.object(
transports.NodeTypesRestInterceptor, "post_list"
) as post, mock.patch.object(
transports.NodeTypesRestInterceptor, "pre_list"
) as pre:
pre.assert_not_called()
post.assert_not_called()
transcode.return_value = {
"method": "post",
"uri": "my_uri",
"body": None,
"query_params": {},
}
req.return_value = Response()
req.return_value.status_code = 200
req.return_value.request = PreparedRequest()
req.return_value._content = compute.NodeTypeList.to_json(compute.NodeTypeList())
request = compute.ListNodeTypesRequest()
metadata = [
("key", "val"),
("cephalopod", "squid"),
]
pre.return_value = request, metadata
post.return_value = compute.NodeTypeList
client.list(request, metadata=[("key", "val"), ("cephalopod", "squid"),])
pre.assert_called_once()
post.assert_called_once()
def test_list_rest_bad_request(
transport: str = "rest", request_type=compute.ListNodeTypesRequest
):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# send a request that will satisfy transcoding
request_init = {"project": "sample1", "zone": "sample2"}
request = request_type(request_init)
# Mock the http request call within the method and fake a BadRequest error.
with mock.patch.object(Session, "request") as req, pytest.raises(
core_exceptions.BadRequest
):
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 400
response_value.request = Request()
req.return_value = response_value
client.list(request)
def test_list_rest_flattened():
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport="rest",
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(type(client.transport._session), "request") as req:
# Designate an appropriate value for the returned response.
return_value = compute.NodeTypeList()
# get arguments that satisfy an http rule for this method
sample_request = {"project": "sample1", "zone": "sample2"}
# get truthy value for each flattened field
mock_args = dict(project="project_value", zone="zone_value",)
mock_args.update(sample_request)
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
json_return_value = compute.NodeTypeList.to_json(return_value)
response_value._content = json_return_value.encode("UTF-8")
req.return_value = response_value
client.list(**mock_args)
# Establish that the underlying call was made with the expected
# request object values.
assert len(req.mock_calls) == 1
_, args, _ = req.mock_calls[0]
assert path_template.validate(
"%s/compute/v1/projects/{project}/zones/{zone}/nodeTypes"
% client.transport._host,
args[1],
)
def test_list_rest_flattened_error(transport: str = "rest"):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list(
compute.ListNodeTypesRequest(), project="project_value", zone="zone_value",
)
def test_list_rest_pager(transport: str = "rest"):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Mock the http request call within the method and fake a response.
with mock.patch.object(Session, "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
response = (
compute.NodeTypeList(
items=[compute.NodeType(), compute.NodeType(), compute.NodeType(),],
next_page_token="abc",
),
compute.NodeTypeList(items=[], next_page_token="def",),
compute.NodeTypeList(items=[compute.NodeType(),], next_page_token="ghi",),
compute.NodeTypeList(items=[compute.NodeType(), compute.NodeType(),],),
)
# Two responses for two calls
response = response + response
# Wrap the values into proper Response objs
response = tuple(compute.NodeTypeList.to_json(x) for x in response)
return_values = tuple(Response() for i in response)
for return_val, response_val in zip(return_values, response):
return_val._content = response_val.encode("UTF-8")
return_val.status_code = 200
req.side_effect = return_values
sample_request = {"project": "sample1", "zone": "sample2"}
pager = client.list(request=sample_request)
results = list(pager)
assert len(results) == 6
assert all(isinstance(i, compute.NodeType) for i in results)
pages = list(client.list(request=sample_request).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = NodeTypesClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide an api_key and a transport instance.
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = NodeTypesClient(client_options=options, transport=transport,)
# It is an error to provide an api_key and a credential.
options = mock.Mock()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = NodeTypesClient(
client_options=options, credentials=ga_credentials.AnonymousCredentials()
)
# It is an error to provide scopes and a transport instance.
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = NodeTypesClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.NodeTypesRestTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = NodeTypesClient(transport=transport)
assert client.transport is transport
@pytest.mark.parametrize("transport_class", [transports.NodeTypesRestTransport,])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_node_types_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.NodeTypesTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_node_types_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.NodeTypesTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"aggregated_list",
"get",
"list",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
def test_node_types_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.NodeTypesTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id="octopus",
)
def test_node_types_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.NodeTypesTransport()
adc.assert_called_once()
def test_node_types_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
NodeTypesClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
"https://www.googleapis.com/auth/compute.readonly",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/cloud-platform",
),
quota_project_id=None,
)
def test_node_types_http_transport_client_cert_source_for_mtls():
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
transports.NodeTypesRestTransport(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
@pytest.mark.parametrize("transport_name", ["rest",])
def test_node_types_host_no_port(transport_name):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com"
),
transport=transport_name,
)
assert client.transport._host == (
"compute.googleapis.com:443"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com"
)
@pytest.mark.parametrize("transport_name", ["rest",])
def test_node_types_host_with_port(transport_name):
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="compute.googleapis.com:8000"
),
transport=transport_name,
)
assert client.transport._host == (
"compute.googleapis.com:8000"
if transport_name in ["grpc", "grpc_asyncio"]
else "https://compute.googleapis.com:8000"
)
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = NodeTypesClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = NodeTypesClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = NodeTypesClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = NodeTypesClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = NodeTypesClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = NodeTypesClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = NodeTypesClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = NodeTypesClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = NodeTypesClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = NodeTypesClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = NodeTypesClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = NodeTypesClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = NodeTypesClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = NodeTypesClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = NodeTypesClient.parse_common_location_path(path)
assert expected == actual
def test_client_with_default_client_info():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.NodeTypesTransport, "_prep_wrapped_messages"
) as prep:
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.NodeTypesTransport, "_prep_wrapped_messages"
) as prep:
transport_class = NodeTypesClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
def test_transport_close():
transports = {
"rest": "_session",
}
for transport, close_name in transports.items():
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"rest",
]
for transport in transports:
client = NodeTypesClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
@pytest.mark.parametrize(
"client_class,transport_class",
[(NodeTypesClient, transports.NodeTypesRestTransport),],
)
def test_api_key_credentials(client_class, transport_class):
with mock.patch.object(
google.auth._default, "get_api_key_credentials", create=True
) as get_api_key_credentials:
mock_cred = mock.Mock()
get_api_key_credentials.return_value = mock_cred
options = client_options.ClientOptions()
options.api_key = "api_key"
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
|
|
#! /usr/bin/env python3
'''SMTP/ESMTP client class.
This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP
Authentication) and RFC 2487 (Secure SMTP over TLS).
Notes:
Please remember, when doing ESMTP, that the names of the SMTP service
extensions are NOT the same thing as the option keywords for the RCPT
and MAIL commands!
Example:
>>> import smtplib
>>> s=smtplib.SMTP("localhost")
>>> print(s.help())
This is Sendmail version 8.8.4
Topics:
HELO EHLO MAIL RCPT DATA
RSET NOOP QUIT HELP VRFY
EXPN VERB ETRN DSN
For more info use "HELP <topic>".
To report bugs in the implementation send email to
sendmail-bugs@sendmail.org.
For local information send email to Postmaster at your site.
End of HELP info
>>> s.putcmd("vrfy","someone@here")
>>> s.getreply()
(250, "Somebody OverHere <somebody@here.my.org>")
>>> s.quit()
'''
# Author: The Dragon De Monsyne <dragondm@integral.org>
# ESMTP support, test code and doc fixes added by
# Eric S. Raymond <esr@thyrsus.com>
# Better RFC 821 compliance (MAIL and RCPT, and CRLF in data)
# by Carey Evans <c.evans@clear.net.nz>, for picky mail servers.
# RFC 2554 (authentication) support by Gerhard Haering <gerhard@bigfoot.de>.
#
# This was modified from the Python 1.5 library HTTP lib.
import socket
import io
import re
import email.utils
import email.message
import email.generator
import base64
import hmac
import copy
from email.base64mime import body_encode as encode_base64
from sys import stderr
__all__ = ["SMTPException", "SMTPServerDisconnected", "SMTPResponseException",
"SMTPSenderRefused", "SMTPRecipientsRefused", "SMTPDataError",
"SMTPConnectError", "SMTPHeloError", "SMTPAuthenticationError",
"quoteaddr", "quotedata", "SMTP"]
SMTP_PORT = 25
SMTP_SSL_PORT = 465
CRLF = "\r\n"
bCRLF = b"\r\n"
_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3
OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
# Exception classes used by this module.
class SMTPException(Exception):
"""Base class for all exceptions raised by this module."""
class SMTPServerDisconnected(SMTPException):
"""Not connected to any SMTP server.
This exception is raised when the server unexpectedly disconnects,
or when an attempt is made to use the SMTP instance before
connecting it to a server.
"""
class SMTPResponseException(SMTPException):
"""Base class for all exceptions that include an SMTP error code.
These exceptions are generated in some instances when the SMTP
server returns an error code. The error code is stored in the
`smtp_code' attribute of the error, and the `smtp_error' attribute
is set to the error message.
"""
def __init__(self, code, msg):
self.smtp_code = code
self.smtp_error = msg
self.args = (code, msg)
class SMTPSenderRefused(SMTPResponseException):
"""Sender address refused.
In addition to the attributes set by on all SMTPResponseException
exceptions, this sets `sender' to the string that the SMTP refused.
"""
def __init__(self, code, msg, sender):
self.smtp_code = code
self.smtp_error = msg
self.sender = sender
self.args = (code, msg, sender)
class SMTPRecipientsRefused(SMTPException):
"""All recipient addresses refused.
The errors for each recipient are accessible through the attribute
'recipients', which is a dictionary of exactly the same sort as
SMTP.sendmail() returns.
"""
def __init__(self, recipients):
self.recipients = recipients
self.args = (recipients,)
class SMTPDataError(SMTPResponseException):
"""The SMTP server didn't accept the data."""
class SMTPConnectError(SMTPResponseException):
"""Error during connection establishment."""
class SMTPHeloError(SMTPResponseException):
"""The server refused our HELO reply."""
class SMTPAuthenticationError(SMTPResponseException):
"""Authentication error.
Most probably the server didn't accept the username/password
combination provided.
"""
def quoteaddr(addrstring):
"""Quote a subset of the email addresses defined by RFC 821.
Should be able to handle anything email.utils.parseaddr can handle.
"""
displayname, addr = email.utils.parseaddr(addrstring)
if (displayname, addr) == ('', ''):
# parseaddr couldn't parse it, use it as is and hope for the best.
if addrstring.strip().startswith('<'):
return addrstring
return "<%s>" % addrstring
return "<%s>" % addr
def _addr_only(addrstring):
displayname, addr = email.utils.parseaddr(addrstring)
if (displayname, addr) == ('', ''):
# parseaddr couldn't parse it, so use it as is.
return addrstring
return addr
# Legacy method kept for backward compatibility.
def quotedata(data):
"""Quote data for email.
Double leading '.', and change Unix newline '\\n', or Mac '\\r' into
Internet CRLF end-of-line.
"""
return re.sub(r'(?m)^\.', '..',
re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data))
def _quote_periods(bindata):
return re.sub(br'(?m)^\.', b'..', bindata)
def _fix_eols(data):
return re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data)
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
class SMTP:
"""This class manages a connection to an SMTP or ESMTP server.
SMTP Objects:
SMTP objects have the following attributes:
helo_resp
This is the message given by the server in response to the
most recent HELO command.
ehlo_resp
This is the message given by the server in response to the
most recent EHLO command. This is usually multiline.
does_esmtp
This is a True value _after you do an EHLO command_, if the
server supports ESMTP.
esmtp_features
This is a dictionary, which, if the server supports ESMTP,
will _after you do an EHLO command_, contain the names of the
SMTP service extensions this server supports, and their
parameters (if any).
Note, all extension names are mapped to lower case in the
dictionary.
See each method's docstrings for details. In general, there is a
method of the same name to perform each SMTP command. There is also a
method called 'sendmail' that will do an entire mail transaction.
"""
debuglevel = 0
file = None
helo_resp = None
ehlo_msg = "ehlo"
ehlo_resp = None
does_esmtp = 0
default_port = SMTP_PORT
def __init__(self, host='', port=0, local_hostname=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Initialize a new instance.
If specified, `host' is the name of the remote host to which to
connect. If specified, `port' specifies the port to which to connect.
By default, smtplib.SMTP_PORT is used. If a host is specified the
connect method is called, and if it returns anything other than a
success code an SMTPConnectError is raised. If specified,
`local_hostname` is used as the FQDN of the local host in the HELO/EHLO
command. Otherwise, the local hostname is found using
socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host,
port) for the socket to bind to as its source address before
connecting. If the host is '' and port is 0, the OS default behavior
will be used.
"""
self.timeout = timeout
self.esmtp_features = {}
self.source_address = source_address
if host:
(code, msg) = self.connect(host, port)
if code != 220:
raise SMTPConnectError(code, msg)
if local_hostname is not None:
self.local_hostname = local_hostname
else:
# RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and
# if that can't be calculated, that we should use a domain literal
# instead (essentially an encoded IP address like [A.B.C.D]).
fqdn = socket.getfqdn()
if '.' in fqdn:
self.local_hostname = fqdn
else:
# We can't find an fqdn hostname, so use a domain literal
addr = '127.0.0.1'
try:
addr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
pass
self.local_hostname = '[%s]' % addr
def __enter__(self):
return self
def __exit__(self, *args):
try:
code, message = self.docmd("QUIT")
if code != 221:
raise SMTPResponseException(code, message)
except SMTPServerDisconnected:
pass
finally:
self.close()
def set_debuglevel(self, debuglevel):
"""Set the debug output level.
A non-false value results in debug messages for connection and for all
messages sent to and received from the server.
"""
self.debuglevel = debuglevel
def _get_socket(self, host, port, timeout):
# This makes it simpler for SMTP_SSL to use the SMTP connect code
# and just alter the socket connection bit.
if self.debuglevel > 0:
print('connect: to', (host, port), self.source_address,
file=stderr)
return socket.create_connection((host, port), timeout,
self.source_address)
def connect(self, host='localhost', port=0, source_address=None):
"""Connect to a host on a given port.
If the hostname ends with a colon (`:') followed by a number, and
there is no port specified, that suffix will be stripped off and the
number interpreted as the port number to use.
Note: This method is automatically invoked by __init__, if a host is
specified during instantiation.
"""
if source_address:
self.source_address = source_address
if not port and (host.find(':') == host.rfind(':')):
i = host.rfind(':')
if i >= 0:
host, port = host[:i], host[i + 1:]
try:
port = int(port)
except ValueError:
raise socket.error("nonnumeric port")
if not port:
port = self.default_port
if self.debuglevel > 0:
print('connect:', (host, port), file=stderr)
self.sock = self._get_socket(host, port, self.timeout)
self.file = None
(code, msg) = self.getreply()
if self.debuglevel > 0:
print("connect:", msg, file=stderr)
return (code, msg)
def send(self, s):
"""Send `s' to the server."""
if self.debuglevel > 0:
print('send:', repr(s), file=stderr)
if hasattr(self, 'sock') and self.sock:
if isinstance(s, str):
s = s.encode("ascii")
try:
self.sock.sendall(s)
except socket.error:
self.close()
raise SMTPServerDisconnected('Server not connected')
else:
raise SMTPServerDisconnected('please run connect() first')
def putcmd(self, cmd, args=""):
"""Send a command to the server."""
if args == "":
str = '%s%s' % (cmd, CRLF)
else:
str = '%s %s%s' % (cmd, args, CRLF)
self.send(str)
def getreply(self):
"""Get a reply from the server.
Returns a tuple consisting of:
- server response code (e.g. '250', or such, if all goes well)
Note: returns -1 if it can't read response code.
- server response string corresponding to response code (multiline
responses are converted to a single, multiline string).
Raises SMTPServerDisconnected if end-of-file is reached.
"""
resp = []
if self.file is None:
self.file = self.sock.makefile('rb')
while 1:
try:
line = self.file.readline(_MAXLINE + 1)
except socket.error as e:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed: "
+ str(e))
if not line:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed")
if self.debuglevel > 0:
print('reply:', repr(line), file=stderr)
if len(line) > _MAXLINE:
raise SMTPResponseException(500, "Line too long.")
resp.append(line[4:].strip(b' \t\r\n'))
code = line[:3]
# Check that the error code is syntactically correct.
# Don't attempt to read a continuation line if it is broken.
try:
errcode = int(code)
except ValueError:
errcode = -1
break
# Check if multiline response.
if line[3:4] != b"-":
break
errmsg = b"\n".join(resp)
if self.debuglevel > 0:
print('reply: retcode (%s); Msg: %s' % (errcode, errmsg),
file=stderr)
return errcode, errmsg
def docmd(self, cmd, args=""):
"""Send a command, and return its response code."""
self.putcmd(cmd, args)
return self.getreply()
# std smtp commands
def helo(self, name=''):
"""SMTP 'helo' command.
Hostname to send for this command defaults to the FQDN of the local
host.
"""
self.putcmd("helo", name or self.local_hostname)
(code, msg) = self.getreply()
self.helo_resp = msg
return (code, msg)
def ehlo(self, name=''):
""" SMTP 'ehlo' command.
Hostname to send for this command defaults to the FQDN of the local
host.
"""
self.esmtp_features = {}
self.putcmd(self.ehlo_msg, name or self.local_hostname)
(code, msg) = self.getreply()
# According to RFC1869 some (badly written)
# MTA's will disconnect on an ehlo. Toss an exception if
# that happens -ddm
if code == -1 and len(msg) == 0:
self.close()
raise SMTPServerDisconnected("Server not connected")
self.ehlo_resp = msg
if code != 250:
return (code, msg)
self.does_esmtp = 1
#parse the ehlo response -ddm
assert isinstance(self.ehlo_resp, bytes), repr(self.ehlo_resp)
resp = self.ehlo_resp.decode("latin-1").split('\n')
del resp[0]
for each in resp:
# To be able to communicate with as many SMTP servers as possible,
# we have to take the old-style auth advertisement into account,
# because:
# 1) Else our SMTP feature parser gets confused.
# 2) There are some servers that only advertise the auth methods we
# support using the old style.
auth_match = OLDSTYLE_AUTH.match(each)
if auth_match:
# This doesn't remove duplicates, but that's no problem
self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \
+ " " + auth_match.groups(0)[0]
continue
# RFC 1869 requires a space between ehlo keyword and parameters.
# It's actually stricter, in that only spaces are allowed between
# parameters, but were not going to check for that here. Note
# that the space isn't present if there are no parameters.
m = re.match(r'(?P<feature>[A-Za-z0-9][A-Za-z0-9\-]*) ?', each)
if m:
feature = m.group("feature").lower()
params = m.string[m.end("feature"):].strip()
if feature == "auth":
self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \
+ " " + params
else:
self.esmtp_features[feature] = params
return (code, msg)
def has_extn(self, opt):
"""Does the server support a given SMTP service extension?"""
return opt.lower() in self.esmtp_features
def help(self, args=''):
"""SMTP 'help' command.
Returns help text from server."""
self.putcmd("help", args)
return self.getreply()[1]
def rset(self):
"""SMTP 'rset' command -- resets session."""
return self.docmd("rset")
def noop(self):
"""SMTP 'noop' command -- doesn't do anything :>"""
return self.docmd("noop")
def mail(self, sender, options=[]):
"""SMTP 'mail' command -- begins mail xfer session."""
optionlist = ''
if options and self.does_esmtp:
optionlist = ' ' + ' '.join(options)
self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender), optionlist))
return self.getreply()
def rcpt(self, recip, options=[]):
"""SMTP 'rcpt' command -- indicates 1 recipient for this mail."""
optionlist = ''
if options and self.does_esmtp:
optionlist = ' ' + ' '.join(options)
self.putcmd("rcpt", "TO:%s%s" % (quoteaddr(recip), optionlist))
return self.getreply()
def data(self, msg):
"""SMTP 'DATA' command -- sends message data to server.
Automatically quotes lines beginning with a period per rfc821.
Raises SMTPDataError if there is an unexpected reply to the
DATA command; the return value from this method is the final
response code received when the all data is sent. If msg
is a string, lone '\r' and '\n' characters are converted to
'\r\n' characters. If msg is bytes, it is transmitted as is.
"""
self.putcmd("data")
(code, repl) = self.getreply()
if self.debuglevel > 0:
print("data:", (code, repl), file=stderr)
if code != 354:
raise SMTPDataError(code, repl)
else:
if isinstance(msg, str):
msg = _fix_eols(msg).encode('ascii')
q = _quote_periods(msg)
if q[-2:] != bCRLF:
q = q + bCRLF
q = q + b"." + bCRLF
self.send(q)
(code, msg) = self.getreply()
if self.debuglevel > 0:
print("data:", (code, msg), file=stderr)
return (code, msg)
def verify(self, address):
"""SMTP 'verify' command -- checks for address validity."""
self.putcmd("vrfy", _addr_only(address))
return self.getreply()
# a.k.a.
vrfy = verify
def expn(self, address):
"""SMTP 'expn' command -- expands a mailing list."""
self.putcmd("expn", _addr_only(address))
return self.getreply()
# some useful methods
def ehlo_or_helo_if_needed(self):
"""Call self.ehlo() and/or self.helo() if needed.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
"""
if self.helo_resp is None and self.ehlo_resp is None:
if not (200 <= self.ehlo()[0] <= 299):
(code, resp) = self.helo()
if not (200 <= code <= 299):
raise SMTPHeloError(code, resp)
def login(self, user, password):
"""Log in on an SMTP server that requires authentication.
The arguments are:
- user: The user name to authenticate with.
- password: The password for the authentication.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
This method will return normally if the authentication was successful.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
SMTPAuthenticationError The server didn't accept the username/
password combination.
SMTPException No suitable authentication method was
found.
"""
def encode_cram_md5(challenge, user, password):
challenge = base64.decodebytes(challenge)
response = user + " " + hmac.HMAC(password.encode('ascii'),
challenge).hexdigest()
return encode_base64(response.encode('ascii'), eol='')
def encode_plain(user, password):
s = "\0%s\0%s" % (user, password)
return encode_base64(s.encode('ascii'), eol='')
AUTH_PLAIN = "PLAIN"
AUTH_CRAM_MD5 = "CRAM-MD5"
AUTH_LOGIN = "LOGIN"
self.ehlo_or_helo_if_needed()
if not self.has_extn("auth"):
raise SMTPException("SMTP AUTH extension not supported by server.")
# Authentication methods the server claims to support
advertised_authlist = self.esmtp_features["auth"].split()
# List of authentication methods we support: from preferred to
# less preferred methods. Except for the purpose of testing the weaker
# ones, we prefer stronger methods like CRAM-MD5:
preferred_auths = [AUTH_CRAM_MD5, AUTH_PLAIN, AUTH_LOGIN]
# We try the authentication methods the server advertises, but only the
# ones *we* support. And in our preferred order.
authlist = [auth for auth in preferred_auths if auth in advertised_authlist]
if not authlist:
raise SMTPException("No suitable authentication method found.")
# Some servers advertise authentication methods they don't really
# support, so if authentication fails, we continue until we've tried
# all methods.
for authmethod in authlist:
if authmethod == AUTH_CRAM_MD5:
(code, resp) = self.docmd("AUTH", AUTH_CRAM_MD5)
if code == 334:
(code, resp) = self.docmd(encode_cram_md5(resp, user, password))
elif authmethod == AUTH_PLAIN:
(code, resp) = self.docmd("AUTH",
AUTH_PLAIN + " " + encode_plain(user, password))
elif authmethod == AUTH_LOGIN:
(code, resp) = self.docmd("AUTH",
"%s %s" % (AUTH_LOGIN, encode_base64(user.encode('ascii'), eol='')))
if code == 334:
(code, resp) = self.docmd(encode_base64(password.encode('ascii'), eol=''))
# 235 == 'Authentication successful'
# 503 == 'Error: already authenticated'
if code in (235, 503):
return (code, resp)
# We could not login sucessfully. Return result of last attempt.
raise SMTPAuthenticationError(code, resp)
def starttls(self, keyfile=None, certfile=None, context=None):
"""Puts the connection to the SMTP server into TLS mode.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
If the server supports TLS, this will encrypt the rest of the SMTP
session. If you provide the keyfile and certfile parameters,
the identity of the SMTP server and client can be checked. This,
however, depends on whether the socket module really checks the
certificates.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
"""
self.ehlo_or_helo_if_needed()
if not self.has_extn("starttls"):
raise SMTPException("STARTTLS extension not supported by server.")
(resp, reply) = self.docmd("STARTTLS")
if resp == 220:
if not _have_ssl:
raise RuntimeError("No SSL support included in this Python")
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
if context is not None and certfile is not None:
raise ValueError("context and certfile arguments are mutually "
"exclusive")
if context is not None:
self.sock = context.wrap_socket(self.sock)
else:
self.sock = ssl.wrap_socket(self.sock, keyfile, certfile)
self.file = None
# RFC 3207:
# The client MUST discard any knowledge obtained from
# the server, such as the list of SMTP service extensions,
# which was not obtained from the TLS negotiation itself.
self.helo_resp = None
self.ehlo_resp = None
self.esmtp_features = {}
self.does_esmtp = 0
return (resp, reply)
def sendmail(self, from_addr, to_addrs, msg, mail_options=[],
rcpt_options=[]):
"""This command performs an entire mail transaction.
The arguments are:
- from_addr : The address sending this mail.
- to_addrs : A list of addresses to send this mail to. A bare
string will be treated as a list with 1 address.
- msg : The message to send.
- mail_options : List of ESMTP options (such as 8bitmime) for the
mail command.
- rcpt_options : List of ESMTP options (such as DSN commands) for
all the rcpt commands.
msg may be a string containing characters in the ASCII range, or a byte
string. A string is encoded to bytes using the ascii codec, and lone
\\r and \\n characters are converted to \\r\\n characters.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first. If the server does ESMTP, message size
and each of the specified options will be passed to it. If EHLO
fails, HELO will be tried and ESMTP options suppressed.
This method will return normally if the mail is accepted for at least
one recipient. It returns a dictionary, with one entry for each
recipient that was refused. Each entry contains a tuple of the SMTP
error code and the accompanying error message sent by the server.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
SMTPRecipientsRefused The server rejected ALL recipients
(no mail was sent).
SMTPSenderRefused The server didn't accept the from_addr.
SMTPDataError The server replied with an unexpected
error code (other than a refusal of
a recipient).
Note: the connection will be open even after an exception is raised.
Example:
>>> import smtplib
>>> s=smtplib.SMTP("localhost")
>>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"]
>>> msg = '''\\
... From: Me@my.org
... Subject: testin'...
...
... This is a test '''
>>> s.sendmail("me@my.org",tolist,msg)
{ "three@three.org" : ( 550 ,"User unknown" ) }
>>> s.quit()
In the above example, the message was accepted for delivery to three
of the four addresses, and one was rejected, with the error code
550. If all addresses are accepted, then the method will return an
empty dictionary.
"""
self.ehlo_or_helo_if_needed()
esmtp_opts = []
if isinstance(msg, str):
msg = _fix_eols(msg).encode('ascii')
if self.does_esmtp:
# Hmmm? what's this? -ddm
# self.esmtp_features['7bit']=""
if self.has_extn('size'):
esmtp_opts.append("size=%d" % len(msg))
for option in mail_options:
esmtp_opts.append(option)
(code, resp) = self.mail(from_addr, esmtp_opts)
if code != 250:
if code == 421:
self.close()
else:
self.rset()
raise SMTPSenderRefused(code, resp, from_addr)
senderrs = {}
if isinstance(to_addrs, str):
to_addrs = [to_addrs]
for each in to_addrs:
(code, resp) = self.rcpt(each, rcpt_options)
if (code != 250) and (code != 251):
senderrs[each] = (code, resp)
if code == 421:
self.close()
raise SMTPRecipientsRefused(senderrs)
if len(senderrs) == len(to_addrs):
# the server refused all our recipients
self.rset()
raise SMTPRecipientsRefused(senderrs)
(code, resp) = self.data(msg)
if code != 250:
if code == 421:
self.close()
else:
self.rset()
raise SMTPDataError(code, resp)
#if we got here then somebody got our mail
return senderrs
def send_message(self, msg, from_addr=None, to_addrs=None,
mail_options=[], rcpt_options={}):
"""Converts message to a bytestring and passes it to sendmail.
The arguments are as for sendmail, except that msg is an
email.message.Message object. If from_addr is None or to_addrs is
None, these arguments are taken from the headers of the Message as
described in RFC 2822 (a ValueError is raised if there is more than
one set of 'Resent-' headers). Regardless of the values of from_addr and
to_addr, any Bcc field (or Resent-Bcc field, when the Message is a
resent) of the Message object won't be transmitted. The Message
object is then serialized using email.generator.BytesGenerator and
sendmail is called to transmit the message.
"""
# 'Resent-Date' is a mandatory field if the Message is resent (RFC 2822
# Section 3.6.6). In such a case, we use the 'Resent-*' fields. However,
# if there is more than one 'Resent-' block there's no way to
# unambiguously determine which one is the most recent in all cases,
# so rather than guess we raise a ValueError in that case.
#
# TODO implement heuristics to guess the correct Resent-* block with an
# option allowing the user to enable the heuristics. (It should be
# possible to guess correctly almost all of the time.)
resent = msg.get_all('Resent-Date')
if resent is None:
header_prefix = ''
elif len(resent) == 1:
header_prefix = 'Resent-'
else:
raise ValueError("message has more than one 'Resent-' header block")
if from_addr is None:
# Prefer the sender field per RFC 2822:3.6.2.
from_addr = (msg[header_prefix + 'Sender']
if (header_prefix + 'Sender') in msg
else msg[header_prefix + 'From'])
if to_addrs is None:
addr_fields = [f for f in (msg[header_prefix + 'To'],
msg[header_prefix + 'Bcc'],
msg[header_prefix + 'Cc']) if f is not None]
to_addrs = [a[1] for a in email.utils.getaddresses(addr_fields)]
# Make a local copy so we can delete the bcc headers.
msg_copy = copy.copy(msg)
del msg_copy['Bcc']
del msg_copy['Resent-Bcc']
with io.BytesIO() as bytesmsg:
g = email.generator.BytesGenerator(bytesmsg)
g.flatten(msg_copy, linesep='\r\n')
flatmsg = bytesmsg.getvalue()
return self.sendmail(from_addr, to_addrs, flatmsg, mail_options,
rcpt_options)
def close(self):
"""Close the connection to the SMTP server."""
if self.file:
self.file.close()
self.file = None
if self.sock:
self.sock.close()
self.sock = None
def quit(self):
"""Terminate the SMTP session."""
res = self.docmd("quit")
self.close()
return res
if _have_ssl:
class SMTP_SSL(SMTP):
""" This is a subclass derived from SMTP that connects over an SSL
encrypted socket (to use this class you need a socket module that was
compiled with SSL support). If host is not specified, '' (the local
host) is used. If port is omitted, the standard SMTP-over-SSL port
(465) is used. local_hostname and source_address have the same meaning
as they do in the SMTP class. keyfile and certfile are also optional -
they can contain a PEM formatted private key and certificate chain file
for the SSL connection. context also optional, can contain a
SSLContext, and is an alternative to keyfile and certfile; If it is
specified both keyfile and certfile must be None.
"""
default_port = SMTP_SSL_PORT
def __init__(self, host='', port=0, local_hostname=None,
keyfile=None, certfile=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, context=None):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
if context is not None and certfile is not None:
raise ValueError("context and certfile arguments are mutually "
"exclusive")
self.keyfile = keyfile
self.certfile = certfile
self.context = context
SMTP.__init__(self, host, port, local_hostname, timeout,
source_address)
def _get_socket(self, host, port, timeout):
if self.debuglevel > 0:
print('connect:', (host, port), file=stderr)
new_socket = socket.create_connection((host, port), timeout,
self.source_address)
if self.context is not None:
new_socket = self.context.wrap_socket(new_socket)
else:
new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
return new_socket
__all__.append("SMTP_SSL")
#
# LMTP extension
#
LMTP_PORT = 2003
class LMTP(SMTP):
"""LMTP - Local Mail Transfer Protocol
The LMTP protocol, which is very similar to ESMTP, is heavily based
on the standard SMTP client. It's common to use Unix sockets for
LMTP, so our connect() method must support that as well as a regular
host:port server. local_hostname and source_address have the same
meaning as they do in the SMTP class. To specify a Unix socket,
you must use an absolute path as the host, starting with a '/'.
Authentication is supported, using the regular SMTP mechanism. When
using a Unix socket, LMTP generally don't support or require any
authentication, but your mileage might vary."""
ehlo_msg = "lhlo"
def __init__(self, host='', port=LMTP_PORT, local_hostname=None,
source_address=None):
"""Initialize a new instance."""
SMTP.__init__(self, host, port, local_hostname=local_hostname,
source_address=source_address)
def connect(self, host='localhost', port=0, source_address=None):
"""Connect to the LMTP daemon, on either a Unix or a TCP socket."""
if host[0] != '/':
return SMTP.connect(self, host, port, source_address=source_address)
# Handle Unix-domain sockets.
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.file = None
self.sock.connect(host)
except socket.error:
if self.debuglevel > 0:
print('connect fail:', host, file=stderr)
if self.sock:
self.sock.close()
self.sock = None
raise
(code, msg) = self.getreply()
if self.debuglevel > 0:
print('connect:', msg, file=stderr)
return (code, msg)
# Test the sendmail method, which tests most of the others.
# Note: This always sends to localhost.
if __name__ == '__main__':
import sys
def prompt(prompt):
sys.stdout.write(prompt + ": ")
sys.stdout.flush()
return sys.stdin.readline().strip()
fromaddr = prompt("From")
toaddrs = prompt("To").split(',')
print("Enter message, end with ^D:")
msg = ''
while 1:
line = sys.stdin.readline()
if not line:
break
msg = msg + line
print("Message length is %d" % len(msg))
server = SMTP('localhost')
server.set_debuglevel(1)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
|
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import os
import semantic_version
from platformio.clients.registry import RegistryClient
from platformio.commands.lib.command import cli as cmd_lib
from platformio.package.meta import PackageType
from platformio.package.vcsclient import VCSClientFactory
from platformio.project.config import ProjectConfig
def test_saving_deps(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory):
regclient = RegistryClient()
project_dir = tmpdir_factory.mktemp("project")
project_dir.join("platformio.ini").write(
"""
[env]
lib_deps = ArduinoJson
[env:one]
board = devkit
[env:two]
framework = foo
lib_deps =
CustomLib
ArduinoJson @ 5.10.1
"""
)
result = clirunner.invoke(
cmd_lib,
["-d", str(project_dir), "install", "64", "knolleary/PubSubClient@~2.7"],
)
validate_cliresult(result)
aj_pkg_data = regclient.get_package(PackageType.LIBRARY, "bblanchon", "ArduinoJson")
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
assert sorted(config.get("env:one", "lib_deps")) == sorted(
[
"bblanchon/ArduinoJson@^%s" % aj_pkg_data["version"]["name"],
"knolleary/PubSubClient@~2.7",
]
)
assert sorted(config.get("env:two", "lib_deps")) == sorted(
[
"CustomLib",
"bblanchon/ArduinoJson@^%s" % aj_pkg_data["version"]["name"],
"knolleary/PubSubClient@~2.7",
]
)
# ensure "build" version without NPM spec
result = clirunner.invoke(
cmd_lib,
["-d", str(project_dir), "-e", "one", "install", "mbed-sam-grove/LinkedList"],
)
validate_cliresult(result)
ll_pkg_data = regclient.get_package(
PackageType.LIBRARY, "mbed-sam-grove", "LinkedList"
)
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
assert sorted(config.get("env:one", "lib_deps")) == sorted(
[
"bblanchon/ArduinoJson@^%s" % aj_pkg_data["version"]["name"],
"knolleary/PubSubClient@~2.7",
"mbed-sam-grove/LinkedList@%s" % ll_pkg_data["version"]["name"],
]
)
# check external package via Git repo
result = clirunner.invoke(
cmd_lib,
[
"-d",
str(project_dir),
"-e",
"one",
"install",
"https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3 @ 0.8.3",
],
)
validate_cliresult(result)
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
assert len(config.get("env:one", "lib_deps")) == 4
assert config.get("env:one", "lib_deps")[3] == (
"https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3 @ 0.8.3"
)
# test uninstalling
# from all envs
result = clirunner.invoke(
cmd_lib, ["-d", str(project_dir), "uninstall", "ArduinoJson"]
)
validate_cliresult(result)
# from "one" env
result = clirunner.invoke(
cmd_lib,
[
"-d",
str(project_dir),
"-e",
"one",
"uninstall",
"knolleary/PubSubClient@~2.7",
],
)
validate_cliresult(result)
config = ProjectConfig(os.path.join(str(project_dir), "platformio.ini"))
assert len(config.get("env:one", "lib_deps")) == 2
assert len(config.get("env:two", "lib_deps")) == 2
assert config.get("env:one", "lib_deps") == [
"mbed-sam-grove/LinkedList@%s" % ll_pkg_data["version"]["name"],
"https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3 @ 0.8.3",
]
assert config.get("env:two", "lib_deps") == [
"CustomLib",
"knolleary/PubSubClient@~2.7",
]
# test list
result = clirunner.invoke(cmd_lib, ["-d", str(project_dir), "list"])
validate_cliresult(result)
assert "Version: 0.8.3+sha." in result.stdout
assert (
"Source: git+https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3"
in result.stdout
)
result = clirunner.invoke(
cmd_lib, ["-d", str(project_dir), "list", "--json-output"]
)
validate_cliresult(result)
data = {}
for key, value in json.loads(result.stdout).items():
data[os.path.basename(key)] = value
ame_lib = next(
item for item in data["one"] if item["name"] == "AsyncMqttClient-esphome"
)
ame_vcs = VCSClientFactory.new(ame_lib["__pkg_dir"], ame_lib["__src_url"])
assert len(data["two"]) == 1
assert data["two"][0]["name"] == "PubSubClient"
assert "__pkg_dir" in data["one"][0]
assert (
ame_lib["__src_url"]
== "git+https://github.com/OttoWinter/async-mqtt-client.git#v0.8.3"
)
assert ame_lib["version"] == ("0.8.3+sha.%s" % ame_vcs.get_current_revision())
def test_update(clirunner, validate_cliresult, isolated_pio_core, tmpdir_factory):
storage_dir = tmpdir_factory.mktemp("test-updates")
result = clirunner.invoke(
cmd_lib,
["-d", str(storage_dir), "install", "ArduinoJson @ 5.10.1", "Blynk @ ~0.5.0"],
)
validate_cliresult(result)
result = clirunner.invoke(
cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "--json-output"]
)
validate_cliresult(result)
outdated = json.loads(result.stdout)
assert len(outdated) == 2
# ArduinoJson
assert outdated[0]["version"] == "5.10.1"
assert outdated[0]["versionWanted"] is None
assert semantic_version.Version(
outdated[0]["versionLatest"]
) > semantic_version.Version("6.16.0")
# Blynk
assert outdated[1]["version"] == "0.5.4"
assert outdated[1]["versionWanted"] is None
assert semantic_version.Version(
outdated[1]["versionLatest"]
) > semantic_version.Version("0.6.0")
# check with spec
result = clirunner.invoke(
cmd_lib,
[
"-d",
str(storage_dir),
"update",
"--dry-run",
"--json-output",
"ArduinoJson @ ^5",
],
)
validate_cliresult(result)
outdated = json.loads(result.stdout)
assert outdated[0]["version"] == "5.10.1"
assert outdated[0]["versionWanted"] == "5.13.4"
assert semantic_version.Version(
outdated[0]["versionLatest"]
) > semantic_version.Version("6.16.0")
# update with spec
result = clirunner.invoke(
cmd_lib, ["-d", str(storage_dir), "update", "--silent", "ArduinoJson @ ^5.10.1"]
)
validate_cliresult(result)
result = clirunner.invoke(
cmd_lib, ["-d", str(storage_dir), "list", "--json-output"]
)
validate_cliresult(result)
items = json.loads(result.stdout)
assert len(items) == 2
assert items[0]["version"] == "5.13.4"
assert items[1]["version"] == "0.5.4"
# Check incompatible
result = clirunner.invoke(
cmd_lib, ["-d", str(storage_dir), "update", "--dry-run", "ArduinoJson @ ^5"]
)
validate_cliresult(result)
assert "Incompatible" in result.stdout
|
|
'''
Box Layout
==========
.. only:: html
.. image:: images/boxlayout.gif
:align: right
.. only:: latex
.. image:: images/boxlayout.png
:align: right
:class:`BoxLayout` arranges children in a vertical or horizontal box.
To position widgets above/below each other, use a vertical BoxLayout::
layout = BoxLayout(orientation='vertical')
btn1 = Button(text='Hello')
btn2 = Button(text='World')
layout.add_widget(btn1)
layout.add_widget(btn2)
To position widgets next to each other, use a horizontal BoxLayout. In this
example, we use 10 pixel spacing between children; the first button covers
70% of the horizontal space, the second covers 30%::
layout = BoxLayout(spacing=10)
btn1 = Button(text='Hello', size_hint=(.7, 1))
btn2 = Button(text='World', size_hint=(.3, 1))
layout.add_widget(btn1)
layout.add_widget(btn2)
Position hints are partially working, depending on the orientation:
* If the orientation is `vertical`: `x`, `right` and `center_x` will be used.
* If the orientation is `horizontal`: `y`, `top` and `center_y` will be used.
You can check the `examples/widgets/boxlayout_poshint.py` for a live example.
.. note::
The `size_hint` uses the available space after subtracting all the
fixed-size widgets. For example, if you have a layout that is 800px
wide, and add three buttons like this:
btn1 = Button(text='Hello', size=(200, 100), size_hint=(None, None))
btn2 = Button(text='Kivy', size_hint=(.5, 1))
btn3 = Button(text='World', size_hint=(.5, 1))
The first button will be 200px wide as specified, the second and third
will be 300px each, e.g. (800-200) * 0.5
.. versionchanged:: 1.4.1
Added support for `pos_hint`.
'''
__all__ = ('BoxLayout', )
from kivy.uix.layout import Layout
from kivy.properties import (NumericProperty, OptionProperty,
VariableListProperty)
class BoxLayout(Layout):
'''Box layout class. See module documentation for more information.
'''
spacing = NumericProperty(0)
'''Spacing between children, in pixels.
:data:`spacing` is a :class:`~kivy.properties.NumericProperty` and defaults
to 0.
'''
padding = VariableListProperty([0, 0, 0, 0])
'''Padding between layout box and children: [padding_left, padding_top,
padding_right, padding_bottom].
padding also accepts a two argument form [padding_horizontal,
padding_vertical] and a one argument form [padding].
.. versionchanged:: 1.7.0
Replaced NumericProperty with VariableListProperty.
:data:`padding` is a :class:`~kivy.properties.VariableListProperty` and
defaults to [0, 0, 0, 0].
'''
orientation = OptionProperty('horizontal', options=(
'horizontal', 'vertical'))
'''Orientation of the layout.
:data:`orientation` is an :class:`~kivy.properties.OptionProperty` and
defaults to 'horizontal'. Can be 'vertical' or 'horizontal'.
'''
def __init__(self, **kwargs):
super(BoxLayout, self).__init__(**kwargs)
self.bind(
spacing=self._trigger_layout,
padding=self._trigger_layout,
children=self._trigger_layout,
orientation=self._trigger_layout,
parent=self._trigger_layout,
size=self._trigger_layout,
pos=self._trigger_layout)
def do_layout(self, *largs):
# optimize layout by preventing looking at the same attribute in a loop
len_children = len(self.children)
if len_children == 0:
return
selfx = self.x
selfy = self.y
selfw = self.width
selfh = self.height
padding_left = self.padding[0]
padding_top = self.padding[1]
padding_right = self.padding[2]
padding_bottom = self.padding[3]
spacing = self.spacing
orientation = self.orientation
padding_x = padding_left + padding_right
padding_y = padding_top + padding_bottom
# calculate maximum space used by size_hint
stretch_weight_x = 0.
stretch_weight_y = 0.
minimum_size_x = padding_x + spacing * (len_children - 1)
minimum_size_y = padding_y + spacing * (len_children - 1)
for w in self.children:
shw = w.size_hint_x
shh = w.size_hint_y
if shw is None:
minimum_size_x += w.width
else:
stretch_weight_x += shw
if shh is None:
minimum_size_y += w.height
else:
stretch_weight_y += shh
if orientation == 'horizontal':
x = padding_left
stretch_space = max(0.0, selfw - minimum_size_x)
for c in reversed(self.children):
shw = c.size_hint_x
shh = c.size_hint_y
w = c.width
h = c.height
cx = selfx + x
cy = selfy + padding_bottom
if shw:
w = stretch_space * shw / stretch_weight_x
if shh:
h = max(0, shh * (selfh - padding_y))
for key, value in c.pos_hint.items():
posy = value * (selfh - padding_y)
if key == 'y':
cy += padding_bottom + posy
elif key == 'top':
cy += padding_bottom + posy - h
elif key == 'center_y':
cy += padding_bottom - h / 2. + posy
c.x = cx
c.y = cy
c.width = w
c.height = h
x += w + spacing
if orientation == 'vertical':
y = padding_bottom
stretch_space = max(0.0, selfh - minimum_size_y)
for c in self.children:
shw = c.size_hint_x
shh = c.size_hint_y
w = c.width
h = c.height
cx = selfx + padding_left
cy = selfy + y
if shh:
h = stretch_space * shh / stretch_weight_y
if shw:
w = max(0, shw * (selfw - padding_x))
for key, value in c.pos_hint.items():
posx = value * (selfw - padding_x)
if key == 'x':
cx += padding_left + posx
elif key == 'right':
cx += padding_left + posx - w
elif key == 'center_x':
cx += padding_left - w / 2. + posx
c.x = cx
c.y = cy
c.width = w
c.height = h
y += h + spacing
def add_widget(self, widget, index=0):
widget.bind(
pos_hint=self._trigger_layout)
return super(BoxLayout, self).add_widget(widget, index)
def remove_widget(self, widget):
widget.unbind(
pos_hint=self._trigger_layout)
return super(BoxLayout, self).remove_widget(widget)
|
|
from guardian.shortcuts import get_perms
from rest_framework import serializers as ser
from rest_framework.exceptions import ValidationError
from api.actions.serializers import ReviewableCountsRelationshipField
from api.base.utils import absolute_reverse, get_user_auth
from api.base.serializers import JSONAPISerializer, IDField, LinksField, RelationshipField, TypeField, TypedRelationshipField
from api.providers.permissions import GROUPS
from api.providers.workflows import Workflows
from osf.models.user import Email, OSFUser
from osf.models.validators import validate_email
from website import mails
from website.settings import DOMAIN
class ProviderSerializer(JSONAPISerializer):
class Meta:
type_ = 'providers'
name = ser.CharField(read_only=True)
description = ser.CharField(read_only=True)
id = ser.CharField(read_only=True, max_length=200, source='_id')
advisory_board = ser.CharField(read_only=True)
example = ser.CharField(read_only=True, allow_null=True)
domain = ser.CharField(read_only=True, allow_null=False)
domain_redirect_enabled = ser.BooleanField(read_only=True)
footer_links = ser.CharField(read_only=True)
email_support = ser.CharField(read_only=True, allow_null=True)
facebook_app_id = ser.IntegerField(read_only=True, allow_null=True)
allow_submissions = ser.BooleanField(read_only=True)
allow_commenting = ser.BooleanField(read_only=True)
assets = ser.SerializerMethodField(read_only=True)
links = LinksField({
'self': 'get_absolute_url',
'external_url': 'get_external_url'
})
taxonomies = TypedRelationshipField(
related_view='providers:taxonomy-list',
related_view_kwargs={'provider_id': '<_id>'}
)
highlighted_taxonomies = TypedRelationshipField(
related_view='providers:highlighted-taxonomy-list',
related_view_kwargs={'provider_id': '<_id>'},
related_meta={'has_highlighted_subjects': 'get_has_highlighted_subjects'}
)
licenses_acceptable = TypedRelationshipField(
related_view='providers:license-list',
related_view_kwargs={'provider_id': '<_id>'}
)
def get_has_highlighted_subjects(self, obj):
return obj.has_highlighted_subjects
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_external_url(self, obj):
return obj.external_url
def get_assets(self, obj):
return {asset.name: asset.file.url for asset in obj.asset_files.all()} or None
class CollectionProviderSerializer(ProviderSerializer):
class Meta:
type_ = 'collection-providers'
primary_collection = RelationshipField(
related_view='collections:collection-detail',
related_view_kwargs={'collection_id': '<primary_collection._id>'}
)
filterable_fields = frozenset([
'allow_submissions',
'allow_commenting',
'description',
'domain',
'domain_redirect_enabled',
'id',
'name',
])
class PreprintProviderSerializer(ProviderSerializer):
class Meta:
type_ = 'preprint-providers'
filterable_fields = frozenset([
'allow_submissions',
'allow_commenting',
'description',
'domain',
'domain_redirect_enabled',
'id',
'name',
'share_publish_type',
'reviews_workflow',
'permissions',
])
share_source = ser.CharField(read_only=True)
share_publish_type = ser.CharField(read_only=True)
preprint_word = ser.CharField(read_only=True, allow_null=True)
additional_providers = ser.ListField(read_only=True, child=ser.CharField())
permissions = ser.SerializerMethodField()
# Reviews settings are the only writable fields
reviews_workflow = ser.ChoiceField(choices=Workflows.choices())
reviews_comments_private = ser.BooleanField()
reviews_comments_anonymous = ser.BooleanField()
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'get_preprints_url',
'external_url': 'get_external_url'
})
preprints = ReviewableCountsRelationshipField(
related_view='providers:preprint-providers:preprints-list',
related_view_kwargs={'provider_id': '<_id>'}
)
def get_preprints_url(self, obj):
return absolute_reverse('providers:preprint-providers:preprints-list', kwargs={
'provider_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']
})
def get_permissions(self, obj):
auth = get_user_auth(self.context['request'])
if not auth.user:
return []
return get_perms(auth.user, obj)
def validate(self, data):
required_fields = ('reviews_workflow', 'reviews_comments_private', 'reviews_comments_anonymous')
for field in required_fields:
if data.get(field) is None:
raise ValidationError('All reviews fields must be set at once: `{}`'.format('`, `'.join(required_fields)))
return data
def update(self, instance, validated_data):
instance.reviews_workflow = validated_data['reviews_workflow']
instance.reviews_comments_private = validated_data['reviews_comments_private']
instance.reviews_comments_anonymous = validated_data['reviews_comments_anonymous']
instance.save()
return instance
class ModeratorSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'full_name',
'id',
'permission_group'
])
id = IDField(source='_id', required=False, allow_null=True)
type = TypeField()
full_name = ser.CharField(source='fullname', required=False, label='Full name', help_text='Display name used in the general user interface', max_length=186)
permission_group = ser.CharField(required=True)
email = ser.EmailField(required=False, write_only=True, validators=[validate_email])
class Meta:
type_ = 'moderators'
def get_absolute_url(self, obj):
return absolute_reverse('moderators:provider-moderator-detail', kwargs={
'provider_id': self.context['request'].parser_context['kwargs']['version'],
'moderator_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']})
def create(self, validated_data):
auth = get_user_auth(self.context['request'])
user_id = validated_data.pop('_id', '')
address = validated_data.pop('email', '')
provider = self.context['provider']
context = {
'referrer': auth.user
}
if user_id and address:
raise ValidationError('Cannot specify both "id" and "email".')
user = None
if user_id:
user = OSFUser.load(user_id)
elif address:
try:
email = Email.objects.get(address=address.lower())
except Email.DoesNotExist:
full_name = validated_data.pop('fullname', '')
if not full_name:
raise ValidationError('"full_name" is required when adding a moderator via email.')
user = OSFUser.create_unregistered(full_name, email=address)
user.add_unclaimed_record(provider, referrer=auth.user,
given_name=full_name, email=address)
user.save()
claim_url = user.get_claim_url(provider._id, external=True)
context['claim_url'] = claim_url
else:
user = email.user
else:
raise ValidationError('Must specify either "id" or "email".')
if not user:
raise ValidationError('Unable to find specified user.')
context['user'] = user
context['provider'] = provider
if bool(get_perms(user, provider)):
raise ValidationError('Specified user is already a moderator.')
if 'claim_url' in context:
template = mails.CONFIRM_EMAIL_MODERATION(provider)
else:
template = mails.MODERATOR_ADDED(provider)
perm_group = validated_data.pop('permission_group', '')
if perm_group not in GROUPS:
raise ValidationError('Unrecognized permission_group')
context['notification_settings_url'] = '{}reviews/preprints/{}/notifications'.format(DOMAIN, provider._id)
context['provider_name'] = provider.name
context['is_reviews_moderator_notification'] = True
context['is_admin'] = perm_group == 'admin'
provider.add_to_group(user, perm_group)
setattr(user, 'permission_group', perm_group) # Allows reserialization
mails.send_mail(
user.username,
template,
mimetype='html',
**context
)
return user
def update(self, instance, validated_data):
provider = self.context['provider']
perm_group = validated_data.get('permission_group')
if perm_group == instance.permission_group:
return instance
try:
provider.remove_from_group(instance, instance.permission_group, unsubscribe=False)
except ValueError as e:
raise ValidationError(e.message)
provider.add_to_group(instance, perm_group)
setattr(instance, 'permission_group', perm_group)
return instance
|
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
from six.moves import http_client
from testtools import matchers
from keystone import catalog
from keystone.tests import unit
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit import test_v3
class CatalogTestCase(test_v3.RestfulTestCase):
"""Test service & endpoint CRUD."""
# region crud tests
def test_create_region_with_id(self):
"""Call ``PUT /regions/{region_id}`` w/o an ID in the request body."""
ref = self.new_region_ref()
region_id = ref.pop('id')
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
self.assertEqual(region_id, r.json['region']['id'])
def test_create_region_with_matching_ids(self):
"""Call ``PUT /regions/{region_id}`` with an ID in the request body."""
ref = self.new_region_ref()
region_id = ref['id']
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
self.assertEqual(region_id, r.json['region']['id'])
def test_create_region_with_duplicate_id(self):
"""Call ``PUT /regions/{region_id}``."""
ref = dict(description="my region")
self.put(
'/regions/myregion',
body={'region': ref}, expected_status=http_client.CREATED)
# Create region again with duplicate id
self.put(
'/regions/myregion',
body={'region': ref}, expected_status=409)
def test_create_region(self):
"""Call ``POST /regions`` with an ID in the request body."""
# the ref will have an ID defined on it
ref = self.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
self.assertValidRegionResponse(r, ref)
# we should be able to get the region, having defined the ID ourselves
r = self.get(
'/regions/%(region_id)s' % {
'region_id': ref['id']})
self.assertValidRegionResponse(r, ref)
def test_create_region_with_empty_id(self):
"""Call ``POST /regions`` with an empty ID in the request body."""
ref = self.new_region_ref()
ref['id'] = ''
r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.assertNotEmpty(r.result['region'].get('id'))
def test_create_region_without_id(self):
"""Call ``POST /regions`` without an ID in the request body."""
ref = self.new_region_ref()
# instead of defining the ID ourselves...
del ref['id']
# let the service define the ID
r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
def test_create_region_without_description(self):
"""Call ``POST /regions`` without description in the request body."""
ref = self.new_region_ref()
del ref['description']
r = self.post('/regions', body={'region': ref})
# Create the description in the reference to compare to since the
# response should now have a description, even though we didn't send
# it with the original reference.
ref['description'] = ''
self.assertValidRegionResponse(r, ref)
def test_create_regions_with_same_description_string(self):
"""Call ``POST /regions`` with same description in the request bodies.
"""
# NOTE(lbragstad): Make sure we can create two regions that have the
# same description.
ref1 = self.new_region_ref()
ref2 = self.new_region_ref()
region_desc = 'Some Region Description'
ref1['description'] = region_desc
ref2['description'] = region_desc
resp1 = self.post('/regions', body={'region': ref1})
self.assertValidRegionResponse(resp1, ref1)
resp2 = self.post('/regions', body={'region': ref2})
self.assertValidRegionResponse(resp2, ref2)
def test_create_regions_without_descriptions(self):
"""Call ``POST /regions`` with no description in the request bodies.
"""
# NOTE(lbragstad): Make sure we can create two regions that have
# no description in the request body. The description should be
# populated by Catalog Manager.
ref1 = self.new_region_ref()
ref2 = self.new_region_ref()
del ref1['description']
ref2['description'] = None
resp1 = self.post('/regions', body={'region': ref1})
resp2 = self.post('/regions', body={'region': ref2})
# Create the descriptions in the references to compare to since the
# responses should now have descriptions, even though we didn't send
# a description with the original references.
ref1['description'] = ''
ref2['description'] = ''
self.assertValidRegionResponse(resp1, ref1)
self.assertValidRegionResponse(resp2, ref2)
def test_create_region_with_conflicting_ids(self):
"""Call ``PUT /regions/{region_id}`` with conflicting region IDs."""
# the region ref is created with an ID
ref = self.new_region_ref()
# but instead of using that ID, make up a new, conflicting one
self.put(
'/regions/%s' % uuid.uuid4().hex,
body={'region': ref},
expected_status=http_client.BAD_REQUEST)
def test_list_regions(self):
"""Call ``GET /regions``."""
r = self.get('/regions')
self.assertValidRegionListResponse(r, ref=self.region)
def _create_region_with_parent_id(self, parent_id=None):
ref = self.new_region_ref()
ref['parent_region_id'] = parent_id
return self.post(
'/regions',
body={'region': ref})
def test_list_regions_filtered_by_parent_region_id(self):
"""Call ``GET /regions?parent_region_id={parent_region_id}``."""
new_region = self._create_region_with_parent_id()
parent_id = new_region.result['region']['id']
new_region = self._create_region_with_parent_id(parent_id)
new_region = self._create_region_with_parent_id(parent_id)
r = self.get('/regions?parent_region_id=%s' % parent_id)
for region in r.result['regions']:
self.assertEqual(parent_id, region['parent_region_id'])
def test_get_region(self):
"""Call ``GET /regions/{region_id}``."""
r = self.get('/regions/%(region_id)s' % {
'region_id': self.region_id})
self.assertValidRegionResponse(r, self.region)
def test_update_region(self):
"""Call ``PATCH /regions/{region_id}``."""
region = self.new_region_ref()
del region['id']
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
self.assertValidRegionResponse(r, region)
def test_update_region_without_description_keeps_original(self):
"""Call ``PATCH /regions/{region_id}``."""
region_ref = self.new_region_ref()
resp = self.post('/regions', body={'region': region_ref})
region_updates = {
# update with something that's not the description
'parent_region_id': self.region_id,
}
resp = self.patch('/regions/%s' % region_ref['id'],
body={'region': region_updates})
# NOTE(dstanek): Keystone should keep the original description.
self.assertEqual(region_ref['description'],
resp.result['region']['description'])
def test_update_region_with_null_description(self):
"""Call ``PATCH /regions/{region_id}``."""
region = self.new_region_ref()
del region['id']
region['description'] = None
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
# NOTE(dstanek): Keystone should turn the provided None value into
# an empty string before storing in the backend.
region['description'] = ''
self.assertValidRegionResponse(r, region)
def test_delete_region(self):
"""Call ``DELETE /regions/{region_id}``."""
ref = self.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.delete('/regions/%(region_id)s' % {
'region_id': ref['id']})
# service crud tests
def test_create_service(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
def test_create_service_no_name(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
del ref['name']
r = self.post(
'/services',
body={'service': ref})
ref['name'] = ''
self.assertValidServiceResponse(r, ref)
def test_create_service_no_enabled(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
del ref['enabled']
r = self.post(
'/services',
body={'service': ref})
ref['enabled'] = True
self.assertValidServiceResponse(r, ref)
self.assertIs(True, r.result['service']['enabled'])
def test_create_service_enabled_false(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = False
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
self.assertIs(False, r.result['service']['enabled'])
def test_create_service_enabled_true(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = True
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
self.assertIs(True, r.result['service']['enabled'])
def test_create_service_enabled_str_true(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = 'True'
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_false(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = 'False'
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_random(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = 'puppies'
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_list_services(self):
"""Call ``GET /services``."""
r = self.get('/services')
self.assertValidServiceListResponse(r, ref=self.service)
def _create_random_service(self):
ref = self.new_service_ref()
ref['enabled'] = True
response = self.post(
'/services',
body={'service': ref})
return response.json['service']
def test_filter_list_services_by_type(self):
"""Call ``GET /services?type=<some type>``."""
target_ref = self._create_random_service()
# create unrelated services
self._create_random_service()
self._create_random_service()
response = self.get('/services?type=' + target_ref['type'])
self.assertValidServiceListResponse(response, ref=target_ref)
filtered_service_list = response.json['services']
self.assertEqual(1, len(filtered_service_list))
filtered_service = filtered_service_list[0]
self.assertEqual(target_ref['type'], filtered_service['type'])
def test_filter_list_services_by_name(self):
"""Call ``GET /services?name=<some name>``."""
target_ref = self._create_random_service()
# create unrelated services
self._create_random_service()
self._create_random_service()
response = self.get('/services?name=' + target_ref['name'])
self.assertValidServiceListResponse(response, ref=target_ref)
filtered_service_list = response.json['services']
self.assertEqual(1, len(filtered_service_list))
filtered_service = filtered_service_list[0]
self.assertEqual(target_ref['name'], filtered_service['name'])
def test_get_service(self):
"""Call ``GET /services/{service_id}``."""
r = self.get('/services/%(service_id)s' % {
'service_id': self.service_id})
self.assertValidServiceResponse(r, self.service)
def test_update_service(self):
"""Call ``PATCH /services/{service_id}``."""
service = self.new_service_ref()
del service['id']
r = self.patch('/services/%(service_id)s' % {
'service_id': self.service_id},
body={'service': service})
self.assertValidServiceResponse(r, service)
def test_delete_service(self):
"""Call ``DELETE /services/{service_id}``."""
self.delete('/services/%(service_id)s' % {
'service_id': self.service_id})
# endpoint crud tests
def test_list_endpoints(self):
"""Call ``GET /endpoints``."""
r = self.get('/endpoints')
self.assertValidEndpointListResponse(r, ref=self.endpoint)
def _create_random_endpoint(self, interface='public',
parent_region_id=None):
region = self._create_region_with_parent_id(
parent_id=parent_region_id)
service = self._create_random_service()
ref = self.new_endpoint_ref(
service_id=service['id'],
interface=interface,
region_id=region.result['region']['id'])
response = self.post(
'/endpoints',
body={'endpoint': ref})
return response.json['endpoint']
def test_list_endpoints_filtered_by_interface(self):
"""Call ``GET /endpoints?interface={interface}``."""
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s' % ref['interface'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
def test_list_endpoints_filtered_by_service_id(self):
"""Call ``GET /endpoints?service_id={service_id}``."""
ref = self._create_random_endpoint()
response = self.get('/endpoints?service_id=%s' % ref['service_id'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['service_id'], endpoint['service_id'])
def test_list_endpoints_filtered_by_region_id(self):
"""Call ``GET /endpoints?region_id={region_id}``."""
ref = self._create_random_endpoint()
response = self.get('/endpoints?region_id=%s' % ref['region_id'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['region_id'], endpoint['region_id'])
def test_list_endpoints_filtered_by_parent_region_id(self):
"""Call ``GET /endpoints?region_id={region_id}``.
Ensure passing the parent_region_id as filter returns an
empty list.
"""
parent_region = self._create_region_with_parent_id()
parent_region_id = parent_region.result['region']['id']
self._create_random_endpoint(parent_region_id=parent_region_id)
response = self.get('/endpoints?region_id=%s' % parent_region_id)
self.assertEqual(0, len(response.json['endpoints']))
def test_list_endpoints_with_multiple_filters(self):
"""Call ``GET /endpoints?interface={interface}...``.
Ensure passing different combinations of interface, region_id and
service_id as filters will return the correct result.
"""
# interface and region_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s®ion_id=%s' %
(ref['interface'], ref['region_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['region_id'], endpoint['region_id'])
# interface and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s&service_id=%s' %
(ref['interface'], ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
# region_id and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?region_id=%s&service_id=%s' %
(ref['region_id'], ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['region_id'], endpoint['region_id'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
# interface, region_id and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get(('/endpoints?interface=%s®ion_id=%s'
'&service_id=%s') %
(ref['interface'], ref['region_id'],
ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['region_id'], endpoint['region_id'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
def test_list_endpoints_with_random_filter_values(self):
"""Call ``GET /endpoints?interface={interface}...``.
Ensure passing random values for: interface, region_id and
service_id will return an empty list.
"""
self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
response = self.get('/endpoints?region_id=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
response = self.get('/endpoints?service_id=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
def test_create_endpoint_no_enabled(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
r = self.post(
'/endpoints',
body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_true(self):
"""Call ``POST /endpoints`` with enabled: true."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled=True)
r = self.post(
'/endpoints',
body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_false(self):
"""Call ``POST /endpoints`` with enabled: false."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled=False)
r = self.post(
'/endpoints',
body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_str_true(self):
"""Call ``POST /endpoints`` with enabled: 'True'."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled='True')
self.post(
'/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_false(self):
"""Call ``POST /endpoints`` with enabled: 'False'."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled='False')
self.post(
'/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_random(self):
"""Call ``POST /endpoints`` with enabled: 'puppies'."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled='puppies')
self.post(
'/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_invalid_region_id(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref["region_id"] = uuid.uuid4().hex
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_region(self):
"""EndpointV3 creates the region before creating the endpoint, if
endpoint is provided with 'region' and no 'region_id'
"""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref["region"] = uuid.uuid4().hex
ref.pop('region_id')
self.post('/endpoints', body={'endpoint': ref})
# Make sure the region is created
self.get('/regions/%(region_id)s' % {
'region_id': ref["region"]})
def test_create_endpoint_with_no_region(self):
"""EndpointV3 allows to creates the endpoint without region."""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref.pop('region_id')
self.post('/endpoints', body={'endpoint': ref})
def test_create_endpoint_with_empty_url(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref["url"] = ''
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_get_endpoint(self):
"""Call ``GET /endpoints/{endpoint_id}``."""
r = self.get(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
self.assertValidEndpointResponse(r, self.endpoint)
def test_update_endpoint(self):
"""Call ``PATCH /endpoints/{endpoint_id}``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
del ref['id']
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_update_endpoint_enabled_true(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True."""
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': True}})
self.assertValidEndpointResponse(r, self.endpoint)
def test_update_endpoint_enabled_false(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False."""
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': False}})
exp_endpoint = copy.copy(self.endpoint)
exp_endpoint['enabled'] = False
self.assertValidEndpointResponse(r, exp_endpoint)
def test_update_endpoint_enabled_str_true(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'True'}},
expected_status=http_client.BAD_REQUEST)
def test_update_endpoint_enabled_str_false(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'False'}},
expected_status=http_client.BAD_REQUEST)
def test_update_endpoint_enabled_str_random(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'kitties'}},
expected_status=http_client.BAD_REQUEST)
def test_delete_endpoint(self):
"""Call ``DELETE /endpoints/{endpoint_id}``."""
self.delete(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
def test_create_endpoint_on_v2(self):
# clear the v3 endpoint so we only have endpoints created on v2
self.delete(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
# create a v3 endpoint ref, and then tweak it back to a v2-style ref
ref = self.new_endpoint_ref(service_id=self.service['id'])
del ref['id']
del ref['interface']
ref['publicurl'] = ref.pop('url')
ref['internalurl'] = None
ref['region'] = ref['region_id']
del ref['region_id']
# don't set adminurl to ensure it's absence is handled like internalurl
# create the endpoint on v2 (using a v3 token)
r = self.admin_request(
method='POST',
path='/v2.0/endpoints',
token=self.get_scoped_token(),
body={'endpoint': ref})
endpoint_v2 = r.result['endpoint']
# test the endpoint on v3
r = self.get('/endpoints')
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(1, len(endpoints))
endpoint_v3 = endpoints.pop()
# these attributes are identical between both APIs
self.assertEqual(ref['region'], endpoint_v3['region_id'])
self.assertEqual(ref['service_id'], endpoint_v3['service_id'])
self.assertEqual(ref['description'], endpoint_v3['description'])
# a v2 endpoint is not quite the same concept as a v3 endpoint, so they
# receive different identifiers
self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id'])
# v2 has a publicurl; v3 has a url + interface type
self.assertEqual(ref['publicurl'], endpoint_v3['url'])
self.assertEqual('public', endpoint_v3['interface'])
# tests for bug 1152632 -- these attributes were being returned by v3
self.assertNotIn('publicurl', endpoint_v3)
self.assertNotIn('adminurl', endpoint_v3)
self.assertNotIn('internalurl', endpoint_v3)
# test for bug 1152635 -- this attribute was being returned by v3
self.assertNotIn('legacy_endpoint_id', endpoint_v3)
self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
def test_deleting_endpoint_with_space_in_url(self):
# create a v3 endpoint ref
ref = self.new_endpoint_ref(service_id=self.service['id'])
# add a space to all urls (intentional "i d" to test bug)
url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s"
ref['publicurl'] = url_with_space
ref['internalurl'] = url_with_space
ref['adminurl'] = url_with_space
ref['url'] = url_with_space
# add the endpoint to the database
self.catalog_api.create_endpoint(ref['id'], ref)
# delete the endpoint
self.delete('/endpoints/%s' % ref['id'])
# make sure it's deleted (GET should return Not Found)
self.get('/endpoints/%s' % ref['id'],
expected_status=http_client.NOT_FOUND)
def test_endpoint_create_with_valid_url(self):
"""Create endpoint with valid url should be tested,too."""
# list one valid url is enough, no need to list too much
valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = valid_url
self.post('/endpoints', body={'endpoint': ref})
def test_endpoint_create_with_invalid_url(self):
"""Test the invalid cases: substitutions is not exactly right.
"""
invalid_urls = [
# using a substitution that is not whitelisted - KeyError
'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
# invalid formatting - ValueError
'http://127.0.0.1:8774/v1.1/$(tenant_id)',
'http://127.0.0.1:8774/v1.1/$(tenant_id)t',
'http://127.0.0.1:8774/v1.1/$(tenant_id',
# invalid type specifier - TypeError
# admin_url is a string not an int
'http://127.0.0.1:8774/v1.1/$(admin_url)d',
]
ref = self.new_endpoint_ref(self.service_id)
for invalid_url in invalid_urls:
ref['url'] = invalid_url
self.post('/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
class TestCatalogAPISQL(unit.TestCase):
"""Tests for the catalog Manager against the SQL backend.
"""
def setUp(self):
super(TestCatalogAPISQL, self).setUp()
self.useFixture(database.Database())
self.catalog_api = catalog.Manager()
self.service_id = uuid.uuid4().hex
service = {'id': self.service_id, 'name': uuid.uuid4().hex}
self.catalog_api.create_service(self.service_id, service)
endpoint = self.new_endpoint_ref(service_id=self.service_id)
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
def config_overrides(self):
super(TestCatalogAPISQL, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'region': uuid.uuid4().hex,
}
def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
# the only endpoint in the catalog is the one created in setUp
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
# it's also the only endpoint in the backend
self.assertEqual(1, len(self.catalog_api.list_endpoints()))
# create a new, invalid endpoint - malformed type declaration
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = 'http://keystone/%(tenant_id)'
self.catalog_api.create_endpoint(ref['id'], ref)
# create a new, invalid endpoint - nonexistent key
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = 'http://keystone/%(you_wont_find_me)s'
self.catalog_api.create_endpoint(ref['id'], ref)
# verify that the invalid endpoints don't appear in the catalog
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
# all three appear in the backend
self.assertEqual(3, len(self.catalog_api.list_endpoints()))
# create another valid endpoint - tenant_id will be replaced
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = 'http://keystone/%(tenant_id)s'
self.catalog_api.create_endpoint(ref['id'], ref)
# there are two valid endpoints, positive check
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
# If the URL has no 'tenant_id' to substitute, we will skip the
# endpoint which contains this kind of URL, negative check.
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id=None)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
def test_get_catalog_always_returns_service_name(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
# create a service, with a name
named_svc = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
}
self.catalog_api.create_service(named_svc['id'], named_svc)
endpoint = self.new_endpoint_ref(service_id=named_svc['id'])
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
# create a service, with no name
unnamed_svc = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex
}
self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
endpoint = self.new_endpoint_ref(service_id=unnamed_svc['id'])
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
named_endpoint = [ep for ep in catalog
if ep['type'] == named_svc['type']][0]
self.assertEqual(named_svc['name'], named_endpoint['name'])
unnamed_endpoint = [ep for ep in catalog
if ep['type'] == unnamed_svc['type']][0]
self.assertEqual('', unnamed_endpoint['name'])
# TODO(dstanek): this needs refactoring with the test above, but we are in a
# crunch so that will happen in a future patch.
class TestCatalogAPISQLRegions(unit.TestCase):
"""Tests for the catalog Manager against the SQL backend.
"""
def setUp(self):
super(TestCatalogAPISQLRegions, self).setUp()
self.useFixture(database.Database())
self.catalog_api = catalog.Manager()
def config_overrides(self):
super(TestCatalogAPISQLRegions, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'region_id': uuid.uuid4().hex,
}
def test_get_catalog_returns_proper_endpoints_with_no_region(self):
service_id = uuid.uuid4().hex
service = {'id': service_id, 'name': uuid.uuid4().hex}
self.catalog_api.create_service(service_id, service)
endpoint = self.new_endpoint_ref(service_id=service_id)
del endpoint['region_id']
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertValidCatalogEndpoint(
catalog[0]['endpoints'][0], ref=endpoint)
def test_get_catalog_returns_proper_endpoints_with_region(self):
service_id = uuid.uuid4().hex
service = {'id': service_id, 'name': uuid.uuid4().hex}
self.catalog_api.create_service(service_id, service)
endpoint = self.new_endpoint_ref(service_id=service_id)
self.catalog_api.create_region({'id': endpoint['region_id']})
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
endpoint = self.catalog_api.get_endpoint(endpoint['id'])
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertValidCatalogEndpoint(
catalog[0]['endpoints'][0], ref=endpoint)
def assertValidCatalogEndpoint(self, entity, ref=None):
keys = ['description', 'id', 'interface', 'name', 'region_id', 'url']
for k in keys:
self.assertEqual(ref.get(k), entity[k], k)
self.assertEqual(entity['region_id'], entity['region'])
|
|
"""User input parameter validation.
This module handles user input parameter validation
against a provided input model.
Note that the objects in this module do *not* mutate any
arguments. No type version happens here. It is up to another
layer to properly convert arguments to any required types.
Validation Errors
-----------------
"""
import six
import decimal
from datetime import datetime
from .utils import parse_timestamp
from .exceptions import ParamValidationError
def validate_parameters(params, shape):
"""Validates input parameters against a schema.
This is a convenience function that validates parameters against a schema.
You can also instantiate and use the ParamValidator class directly if you
want more control.
If there are any validation errors then a ParamValidationError
will be raised. If there are no validation errors than no exception
is raised and a value of None is returned.
:param params: The user provided input parameters.
:type shape: botoore.model.Shape
:param shape: The schema which the input parameters should
adhere to.
:raise: ParamValidationError
"""
validator = ParamValidator()
report = validator.validate(params, shape)
if report.has_errors():
raise ParamValidationError(report=report.generate_report())
def type_check(valid_types):
def _create_type_check_guard(func):
def _on_passes_type_check(self, param, shape, errors, name):
if _type_check(param, errors, name):
return func(self, param, shape, errors, name)
def _type_check(param, errors, name):
if not isinstance(param, valid_types):
valid_type_names = [six.text_type(t) for t in valid_types]
errors.report(name, 'invalid type', param=param,
valid_types=valid_type_names)
return False
return True
return _on_passes_type_check
return _create_type_check_guard
def range_check(name, value, shape, error_type, errors):
failed = False
min_allowed = float('-inf')
max_allowed = float('inf')
if 'min' in shape.metadata:
min_allowed = shape.metadata['min']
if value < min_allowed:
failed = True
if 'max' in shape.metadata:
max_allowed = shape.metadata['max']
if value > max_allowed:
failed = True
if failed:
errors.report(name, error_type, param=value,
valid_range=[min_allowed, max_allowed])
class ValidationErrors(object):
def __init__(self):
self._errors = []
def has_errors(self):
if self._errors:
return True
return False
def generate_report(self):
error_messages = []
for error in self._errors:
error_messages.append(self._format_error(error))
return '\n'.join(error_messages)
def _format_error(self, error):
error_type, name, additional = error
name = self._get_name(name)
if error_type == 'missing required field':
return 'Missing required parameter in %s: "%s"' % (
name, additional['required_name'])
elif error_type == 'unknown field':
return 'Unknown parameter in %s: "%s", must be one of: %s' % (
name, additional['unknown_param'], ', '.join(additional['valid_names']))
elif error_type == 'invalid type':
return 'Invalid type for parameter %s, value: %s, type: %s, valid types: %s' % (
name, additional['param'],
str(type(additional['param'])),
', '.join(additional['valid_types']))
elif error_type == 'invalid range':
min_allowed = additional['valid_range'][0]
max_allowed = additional['valid_range'][1]
return ('Invalid range for parameter %s, value: %s, valid range: '
'%s-%s' % (name, additional['param'],
min_allowed, max_allowed))
elif error_type == 'invalid length':
min_allowed = additional['valid_range'][0]
max_allowed = additional['valid_range'][1]
return ('Invalid length for parameter %s, value: %s, valid range: '
'%s-%s' % (name, additional['param'],
min_allowed, max_allowed))
def _get_name(self, name):
if not name:
return 'input'
elif name.startswith('.'):
return name[1:]
else:
return name
def report(self, name, reason, **kwargs):
self._errors.append((reason, name, kwargs))
class ParamValidator(object):
"""Validates parameters against a shape model."""
def validate(self, params, shape):
"""Validate parameters against a shape model.
This method will validate the parameters against a provided shape model.
All errors will be collected before returning to the caller. This means
that this method will not stop at the first error, it will return all
possible errors.
:param params: User provided dict of parameters
:param shape: A shape model describing the expected input.
:return: A list of errors.
"""
errors = ValidationErrors()
self._validate(params, shape, errors, name='')
return errors
def _validate(self, params, shape, errors, name):
getattr(self, '_validate_%s' % shape.type_name)(params, shape, errors, name)
@type_check(valid_types=(dict,))
def _validate_structure(self, params, shape, errors, name):
# Validate required fields.
for required_member in shape.metadata.get('required', []):
if required_member not in params:
errors.report(name, 'missing required field',
required_name=required_member, user_params=params)
members = shape.members
known_params = []
# Validate known params.
for param in params:
if param not in members:
errors.report(name, 'unknown field', unknown_param=param,
valid_names=list(members))
else:
known_params.append(param)
# Validate structure members.
for param in known_params:
self._validate(params[param], shape.members[param],
errors, '%s.%s' % (name, param))
@type_check(valid_types=six.string_types)
def _validate_string(self, param, shape, errors, name):
# Validate range. For a string, the min/max contraints
# are of the string length.
# Looks like:
# "WorkflowId":{
# "type":"string",
# "min":1,
# "max":256
# }
range_check(name, len(param), shape, 'invalid length', errors)
@type_check(valid_types=(list, tuple))
def _validate_list(self, param, shape, errors, name):
member_shape = shape.member
range_check(name, len(param), shape, 'invalid length', errors)
for i, item in enumerate(param):
self._validate(item, member_shape, errors, '%s[%s]' % (name, i))
@type_check(valid_types=(dict,))
def _validate_map(self, param, shape, errors, name):
key_shape = shape.key
value_shape = shape.value
for key, value in param.items():
self._validate(key, key_shape, errors, "%s (key: %s)"
% (name, key))
self._validate(value, value_shape, errors, '%s.%s' % (name, key))
@type_check(valid_types=six.integer_types)
def _validate_integer(self, param, shape, errors, name):
range_check(name, param, shape, 'invalid range', errors)
def _validate_blob(self, param, shape, errors, name):
if isinstance(param, (bytes, bytearray, six.text_type)):
return
elif hasattr(param, 'read'):
# File like objects are also allowed for blob types.
return
else:
errors.report(name, 'invalid type', param=param,
valid_types=[str(bytes), str(bytearray),
'file-like object'])
@type_check(valid_types=(bool,))
def _validate_boolean(self, param, shape, errors, name):
pass
@type_check(valid_types=(float, decimal.Decimal) + six.integer_types)
def _validate_double(self, param, shape, errors, name):
range_check(name, param, shape, 'invalid range', errors)
_validate_float = _validate_double
@type_check(valid_types=six.integer_types)
def _validate_long(self, param, shape, errors, name):
range_check(name, param, shape, 'invalid range', errors)
def _validate_timestamp(self, param, shape, errors, name):
# We don't use @type_check because datetimes are a bit
# more flexible. You can either provide a datetime
# object, or a string that parses to a datetime.
is_valid_type = self._type_check_datetime(param)
if not is_valid_type:
valid_type_names = [six.text_type(datetime), 'timestamp-string']
errors.report(name, 'invalid type', param=param,
valid_types=valid_type_names)
def _type_check_datetime(self, value):
if isinstance(value, datetime):
return True
try:
parse_timestamp(value)
return True
except (TypeError, ValueError):
return False
class ParamValidationDecorator(object):
def __init__(self, param_validator, serializer):
self._param_validator = param_validator
self._serializer = serializer
def serialize_to_request(self, parameters, operation_model):
input_shape = operation_model.input_shape
if input_shape is not None:
report = self._param_validator.validate(parameters,
operation_model.input_shape)
if report.has_errors():
raise ParamValidationError(report=report.generate_report())
return self._serializer.serialize_to_request(parameters,
operation_model)
|
|
# -:- encoding: utf-8 -:-
# This file is part of the GBI project.
# Copyright (C) 2012 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from flask import render_template, abort, flash, g, request, redirect, url_for, Blueprint, jsonify, current_app
from flaskext.babel import _
from ...model.sources import LocalWMTSSource
from geobox.model import ExternalWMTSSource, ExternalWFSSource
from geobox.web.forms import RasterSourceForm, WMSForm, UnlockRasterSourceForm
from geobox.lib.capabilities import parse_capabilities_url
from geobox.lib.coverage import llbbox_to_geojson
from geobox.lib.couchdb import VectorCouchDB
raster = Blueprint('raster', __name__)
@raster.route('/admin/raster/list', methods=["GET"])
def raster_list():
external_sources = g.db.query(ExternalWMTSSource).filter_by(is_user_defined=False).filter_by(active=True).all()
external_wfs_sources = g.db.query(ExternalWFSSource).filter_by(active=True).all()
user_sources = g.db.query(ExternalWMTSSource).filter_by(is_user_defined=True).filter_by(background_layer=False).all()
local_sources = g.db.query(LocalWMTSSource).all()
return render_template('admin/external_raster_list.html', external_sources=external_sources,
user_sources=user_sources, external_wfs_sources=external_wfs_sources, local_sources=local_sources)
@raster.route('/admin/<_type>/unlock/<int:id>', methods=["GET", "POST"])
def unlock_source(_type, id):
if _type == 'wmts':
source = g.db.query(ExternalWMTSSource).filter_by(id=id).first()
elif _type == 'wfs':
source = g.db.query(ExternalWFSSource).filter_by(id=id).first()
form = UnlockRasterSourceForm(request.form, source)
if form.validate_on_submit():
source.username = form.data['username']
source.password = form.data['password']
flash( _('update WMTS'), 'success')
g.db.commit()
return redirect(url_for('.raster_list'))
return render_template('admin/external_unlock.html', source=source, form=form)
@raster.route('/admin/wms/edit', methods=["GET", "POST"])
@raster.route('/admin/wms/edit/<int:id>', methods=["GET", "POST"])
def wms_edit(id=None):
wms = g.db.query(ExternalWMTSSource).filter_by(id=id).first() if id else None
if wms and not wms.is_user_defined:
abort(404)
edit_mode = True if wms else False
form = WMSForm(request.form, wms)
if form.validate_on_submit():
llbbox = form.data['llbbox']
try:
bbox_coverage = llbbox_to_geojson(llbbox)
except ValueError:
try:
bbox_coverage = json.dumps(json.loads(llbbox))
except ValueError:
flash( _('invalid bbox'), 'error')
return render_template('admin/external_wms.html', form=form, edit_mode=edit_mode)
if not wms:
wms = ExternalWMTSSource(
name=form.data['name'],
title=form.data['title'],
url=form.data['url'],
layer = form.data['layer'],
format = form.data['format'],
srs = form.data['srs'],
username = form.data['username'],
password = form.data['password'],
prefix = 'local',
is_user_defined=True,
source_type='wms',
download_level_start=0,
download_level_end=20,
active=True,
download_coverage=bbox_coverage,
)
g.db.add(wms)
flash( _('Save local WMS'), 'success')
else:
wms.name = form.data['name']
wms.title = form.data['title']
wms.url = form.data['url']
wms.layer = form.data['layer']
wms.format = form.data['format']
wms.version = form.data['version']
wms.srs = form.data['srs']
wms.username = form.data['username']
wms.password = form.data['password']
wms.prefix = 'local'
wms.active = True
wms.download_coverage = bbox_coverage
flash( _('update WMS'), 'success')
g.db.commit()
return redirect(url_for('.raster_list'))
if wms:
form.llbbox.data = wms.download_coverage
return render_template('admin/external_wms.html', form=form, edit_mode=edit_mode)
@raster.route('/admin/wms/capabilities', methods=["GET"])
def wms_capabilities():
url = request.args.get('url', False)
if not url:
return jsonify(error=_('Need url for capabilities'))
try:
data = parse_capabilities_url(url)
except:
data = {'error': 'load capabilities not possible'}
return jsonify(data=data)
@raster.route('/admin/wmts/edit', methods=["GET", "POST"])
@raster.route('/admin/wmts/edit/<int:id>', methods=["GET", "POST"])
def wmts_edit(id=None):
wmts = g.db.query(ExternalWMTSSource).filter_by(id=id).first() if id else None
if wmts and not wmts.is_user_defined:
abort(404)
form = RasterSourceForm(request.form, wmts)
if form.validate_on_submit():
llbbox = form.data['llbbox']
try:
bbox_coverage = llbbox_to_geojson(llbbox)
except ValueError:
try:
bbox_coverage = json.dumps(json.loads(llbbox))
except ValueError:
flash( _('invalid bbox'), 'error')
return render_template('admin/external_wmts.html', form=form)
if not wmts:
wmts = ExternalWMTSSource(
name=form.data['name'],
title=form.data['title'],
url=form.data['url'],
format=form.data['format'],
username = form.data['username'],
password = form.data['password'],
is_user_defined= True,
prefix = 'local',
source_type='wmts',
download_level_start=0,
download_level_end=20,
download_coverage = bbox_coverage,
active=True,
)
g.db.add(wmts)
flash( _('Save local WMTS'), 'success')
else:
wmts.name = form.data['name']
wmts.title = form.data['title']
wmts.url = form.data['url']
wmts.format = form.data['format']
wmts.username = form.data['username']
wmts.password = form.data['password']
wmts.download_coverage = bbox_coverage
wmts.prefix = 'local'
flash( _('update WMTS'), 'success')
g.db.commit()
return redirect(url_for('.raster_list'))
if wmts:
form.llbbox.data = wmts.download_coverage
return render_template('admin/external_wmts.html', form=form)
@raster.route('/admin/localraster/remove/<int:id>', methods=["GET", "POST"])
def local_raster_remove(id):
raster_source = g.db.query(LocalWMTSSource).with_polymorphic('*').filter_by(id=id).first()
if not raster_source:
abort(404)
couch_url = 'http://%s:%s' % ('127.0.0.1', current_app.config.geobox_state.config.get('couchdb', 'port'))
couch = VectorCouchDB(couch_url, raster_source.name, raster_source.name)
couch.delete_db()
g.db.delete(raster_source)
g.db.commit()
flash( _('delete local source successful'), 'success')
return redirect(url_for('.raster_list'))
@raster.route('/admin/raster/remove/<int:id>', methods=["GET", "POST"])
def raster_remove(id):
raster_source = g.db.query(ExternalWMTSSource).with_polymorphic('*').filter_by(id=id).filter_by(is_user_defined=True).first()
if not raster_source:
abort(404)
g.db.delete(raster_source)
g.db.commit()
flash( _('delete source successful'), 'success')
return redirect(url_for('.raster_list'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.