hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72baab36e3a21c7ba43e643724c016ec8ee0837 | 8,858 | py | Python | cinder/openstack/common/config/generator.py | sombrafam/cinder | 60ffb0eedf41b56f3c6af5b301400ced95762194 | [
"Apache-2.0"
] | null | null | null | cinder/openstack/common/config/generator.py | sombrafam/cinder | 60ffb0eedf41b56f3c6af5b301400ced95762194 | [
"Apache-2.0"
] | null | null | null | cinder/openstack/common/config/generator.py | sombrafam/cinder | 60ffb0eedf41b56f3c6af5b301400ced95762194 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 SINA Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
from cinder.openstack.common import gettextutils
from cinder.openstack.common import importutils
gettextutils.install('cinder')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(srcfiles):
mods_by_pkg = dict()
for filepath in srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = filter(lambda x: x.endswith(PY_EXT), mods_by_pkg.keys())
pkg_names.sort()
ext_names = filter(lambda x: x not in pkg_names, mods_by_pkg.keys())
ext_names.sort()
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
extra_modules = os.getenv("OSLO_CONFIG_GENERATOR_EXTRA_MODULES", "")
if extra_modules:
for module_name in extra_modules.split(','):
module_name = module_name.strip()
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group, opts in opts_by_group.items():
print_group_opts(group, opts)
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for key, value in group._opts.items():
if value['opt'] == opt:
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value == socket.gethostname() and 'host' in name:
return 'cinder'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help += ' (' + OPT_TYPES[opt_type] + ')'
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == FLOATOPT:
assert(isinstance(opt_default, float))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == LISTOPT:
assert(isinstance(opt_default, list))
print('#%s=%s' % (opt_name, ','.join(opt_default)))
elif opt_type == MULTISTROPT:
assert(isinstance(opt_default, list))
if not opt_default:
opt_default = ['']
for default in opt_default:
print('#%s=%s' % (opt_name, default))
print('')
except Exception:
sys.stderr.write('Error in option "%s"\n' % opt_name)
sys.exit(1)
def main():
generate(sys.argv[1:])
if __name__ == '__main__':
main()
| 32.929368 | 79 | 0.593362 |
from __future__ import print_function
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
from cinder.openstack.common import gettextutils
from cinder.openstack.common import importutils
gettextutils.install('cinder')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(srcfiles):
mods_by_pkg = dict()
for filepath in srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
pkg_names = filter(lambda x: x.endswith(PY_EXT), mods_by_pkg.keys())
pkg_names.sort()
ext_names = filter(lambda x: x not in pkg_names, mods_by_pkg.keys())
ext_names.sort()
pkg_names.extend(ext_names)
opts_by_group = {'DEFAULT': []}
extra_modules = os.getenv("OSLO_CONFIG_GENERATOR_EXTRA_MODULES", "")
if extra_modules:
for module_name in extra_modules.split(','):
module_name = module_name.strip()
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group, opts in opts_by_group.items():
print_group_opts(group, opts)
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
for key, value in group._opts.items():
if value['opt'] == opt:
return True
return False
def _guess_groups(opt, mod_obj):
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
for key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('
print('
print('
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value == socket.gethostname() and 'host' in name:
return 'cinder'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help += ' (' + OPT_TYPES[opt_type] + ')'
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == FLOATOPT:
assert(isinstance(opt_default, float))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == LISTOPT:
assert(isinstance(opt_default, list))
print('#%s=%s' % (opt_name, ','.join(opt_default)))
elif opt_type == MULTISTROPT:
assert(isinstance(opt_default, list))
if not opt_default:
opt_default = ['']
for default in opt_default:
print('#%s=%s' % (opt_name, default))
print('')
except Exception:
sys.stderr.write('Error in option "%s"\n' % opt_name)
sys.exit(1)
def main():
generate(sys.argv[1:])
if __name__ == '__main__':
main()
| true | true |
f72bab7ab1bca3789121e05f9da664664eeab617 | 3,732 | py | Python | GitHubUploader.py | ytyaru/GitHub.Uploader.AddFunction.Contributions.201705141424 | 06e44308b0eb132f0ce1e547cedc6b0f6e566c93 | [
"CC0-1.0"
] | null | null | null | GitHubUploader.py | ytyaru/GitHub.Uploader.AddFunction.Contributions.201705141424 | 06e44308b0eb132f0ce1e547cedc6b0f6e566c93 | [
"CC0-1.0"
] | null | null | null | GitHubUploader.py | ytyaru/GitHub.Uploader.AddFunction.Contributions.201705141424 | 06e44308b0eb132f0ce1e547cedc6b0f6e566c93 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/python3
#!python3
#encoding:utf-8
import sys
import os.path
import subprocess
import configparser
import argparse
import web.service.github.api.v3.AuthenticationsCreator
import web.service.github.api.v3.AuthenticationData
#import web.service.github.api.v3.CurrentUser
import web.service.github.api.v3.CurrentRepository
import web.service.github.api.v3.Client
import database.src.Database
import cui.uploader.Main
import web.log.Log
import database.src.contributions.Main
class Main:
def __init__(self):
pass
def Run(self):
parser = argparse.ArgumentParser(
description='GitHub Repository Uploader.',
)
parser.add_argument('path_dir_pj')
parser.add_argument('-u', '--username')
parser.add_argument('-d', '--description')
parser.add_argument('-l', '--homepage', '--link', '--url')
args = parser.parse_args()
# print(args)
# print('path_dir_pj: {0}'.format(args.path_dir_pj))
# print('-u: {0}'.format(args.username))
# print('-d: {0}'.format(args.description))
# print('-l: {0}'.format(args.homepage))
config = configparser.ConfigParser()
config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'config.ini'))
# path_dir_db = os.path.abspath(config['Path']['DB'])
# 相対パスなら
if config['Path']['DB'].startswith('./'):
# python起動パスでなく、このファイルをrootとする
path_dir_db = os.path.join(os.path.abspath(os.path.dirname(__file__)), config['Path']['DB'][2:])
else:
path_dir_db = os.path.abspath(config['Path']['DB'])
# print(path_dir_db)
web.log.Log.Log().Logger.debug(path_dir_db)
if None is args.username:
args.username = config['GitHub']['User']
# print('default-username: {0}'.format(args.username))
# print(os.path.join(path_dir_db, 'GitHub.Accounts.sqlite3'))
# print(os.path.join(path_dir_db, 'GitHub.Repositories.{0}.sqlite3'.format(args.username)))
# self.__db = database.src.Database.Database()
self.__db = database.src.Database.Database(os.path.abspath(os.path.dirname(__file__)))
self.__db.Initialize()
if None is self.__db.Accounts['Accounts'].find_one(Username=args.username):
# print('指定したユーザ {0} はDBに存在しません。GitHubUserRegister.pyで登録してください。')
web.log.Log.Log().Logger.warning('指定したユーザ {0} はDBに存在しません。GitHubUserRegister.pyで登録してください。'.format(args.username))
return
# Contributionsバックアップ
self.__UpdateAllUserContributions(path_dir_db, username=args.username)
# アップローダ起動
creator = web.service.github.api.v3.AuthenticationsCreator.AuthenticationsCreator(self.__db, args.username)
authentications = creator.Create()
# user = web.service.github.api.v3.CurrentUser.CurrentUser(self.__db, args.username)
repo = web.service.github.api.v3.CurrentRepository.CurrentRepository(self.__db, args.path_dir_pj, description=args.description, homepage=args.homepage)
authData = web.service.github.api.v3.AuthenticationData.AuthenticationData()
authData.Load(self.__db.Accounts, args.username)
client = web.service.github.api.v3.Client.Client(self.__db, authentications, authData=authData, repo=repo)
main = cui.uploader.Main.Main(self.__db, client, authData, repo)
main.Run()
def __UpdateAllUserContributions(self, path_dir_db, username=None):
m = database.src.contributions.Main.Main(path_dir_db)
for a in self.__db.Accounts['Accounts'].find():
m.Run(a['Username'])
if __name__ == '__main__':
main = Main()
main.Run()
| 42.409091 | 159 | 0.665327 |
import sys
import os.path
import subprocess
import configparser
import argparse
import web.service.github.api.v3.AuthenticationsCreator
import web.service.github.api.v3.AuthenticationData
import web.service.github.api.v3.CurrentRepository
import web.service.github.api.v3.Client
import database.src.Database
import cui.uploader.Main
import web.log.Log
import database.src.contributions.Main
class Main:
def __init__(self):
pass
def Run(self):
parser = argparse.ArgumentParser(
description='GitHub Repository Uploader.',
)
parser.add_argument('path_dir_pj')
parser.add_argument('-u', '--username')
parser.add_argument('-d', '--description')
parser.add_argument('-l', '--homepage', '--link', '--url')
args = parser.parse_args()
config = configparser.ConfigParser()
config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'config.ini'))
if config['Path']['DB'].startswith('./'):
path_dir_db = os.path.join(os.path.abspath(os.path.dirname(__file__)), config['Path']['DB'][2:])
else:
path_dir_db = os.path.abspath(config['Path']['DB'])
web.log.Log.Log().Logger.debug(path_dir_db)
if None is args.username:
args.username = config['GitHub']['User']
self.__db = database.src.Database.Database(os.path.abspath(os.path.dirname(__file__)))
self.__db.Initialize()
if None is self.__db.Accounts['Accounts'].find_one(Username=args.username):
web.log.Log.Log().Logger.warning('指定したユーザ {0} はDBに存在しません。GitHubUserRegister.pyで登録してください。'.format(args.username))
return
self.__UpdateAllUserContributions(path_dir_db, username=args.username)
creator = web.service.github.api.v3.AuthenticationsCreator.AuthenticationsCreator(self.__db, args.username)
authentications = creator.Create()
repo = web.service.github.api.v3.CurrentRepository.CurrentRepository(self.__db, args.path_dir_pj, description=args.description, homepage=args.homepage)
authData = web.service.github.api.v3.AuthenticationData.AuthenticationData()
authData.Load(self.__db.Accounts, args.username)
client = web.service.github.api.v3.Client.Client(self.__db, authentications, authData=authData, repo=repo)
main = cui.uploader.Main.Main(self.__db, client, authData, repo)
main.Run()
def __UpdateAllUserContributions(self, path_dir_db, username=None):
m = database.src.contributions.Main.Main(path_dir_db)
for a in self.__db.Accounts['Accounts'].find():
m.Run(a['Username'])
if __name__ == '__main__':
main = Main()
main.Run()
| true | true |
f72bac46a7bf89e92fd86af932bfe8a7135e61d2 | 13,197 | py | Python | src/sardana/util/funcgenerator.py | schooft/sardana | 76287b416650f40da79871ee3849340d0ff31f1d | [
"CC-BY-3.0"
] | null | null | null | src/sardana/util/funcgenerator.py | schooft/sardana | 76287b416650f40da79871ee3849340d0ff31f1d | [
"CC-BY-3.0"
] | null | null | null | src/sardana/util/funcgenerator.py | schooft/sardana | 76287b416650f40da79871ee3849340d0ff31f1d | [
"CC-BY-3.0"
] | null | null | null | ##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
import time
import threading
import math
import copy
import numpy
import traceback
from sardana import State
from sardana.sardanaevent import EventGenerator, EventType
from sardana.pool.pooldefs import SynchParam, SynchDomain
from taurus.core.util.log import Logger
def strictly_increasing(l):
"""Check whether list l has strictly increasing values"""
return all(x < y for x, y in zip(l, l[1:]))
def strictly_decreasing(l):
"""Check whether list l has strictly deacreasing values"""
return all(x > y for x, y in zip(l, l[1:]))
class FunctionGenerator(EventGenerator, Logger):
"""Generator of active and passive events describing a rectangular
function.
.. note::
The FunctionGenerator class has been included in Sardana
on a provisional basis. Backwards incompatible changes
(up to and including removal of the module) may occur if
deemed necessary by the core developers.
"""
MAX_NAP_TIME = 0.1
def __init__(self, name="FunctionGenerator"):
EventGenerator.__init__(self)
Logger.__init__(self, name)
self._name = name
self._initial_domain = None
self._active_domain = None
self._position_event = threading.Event()
self._position = None
self._initial_domain_in_use = None
self._active_domain_in_use = None
self._active_events = list()
self._passive_events = list()
self._started = False
self._stopped = False
self._running = False
self._start_time = None
self._direction = None
self._condition = None
self._id = None
self._start_fired = False
def get_name(self):
return self._name
name = property(get_name)
def set_initial_domain(self, domain):
self._initial_domain = domain
def get_initial_domain(self):
return self._initial_domain
initial_domain = property(get_initial_domain, set_initial_domain)
def set_active_domain(self, domain):
self._active_domain = domain
def get_active_domain(self):
return self._active_domain
active_domain = property(get_active_domain, set_active_domain)
def set_initial_domain_in_use(self, domain):
self._initial_domain_in_use = domain
def get_initial_domain_in_use(self):
return self._initial_domain_in_use
initial_domain_in_use = property(get_initial_domain_in_use,
set_initial_domain_in_use)
def set_active_domain_in_use(self, domain):
self._active_domain_in_use = domain
def get_active_domain_in_use(self):
return self._active_domain_in_use
active_domain_in_use = property(get_active_domain_in_use,
set_active_domain_in_use)
def add_active_event(self, event):
self._active_events.append(event)
def set_active_events(self, events):
self._active_events = events
def get_active_events(self):
return self._active_events
active_events = property(get_active_events, set_active_events)
def add_passive_event(self, event):
self._passive_events.append(event)
def set_passive_events(self, events):
self._passive_events = events
def get_passive_events(self):
return self._passive_events
passive_events = property(get_passive_events, set_passive_events)
def set_direction(self, direction):
self._direction = direction
if direction == 1:
self._condition = numpy.greater_equal
elif direction == -1:
self._condition = numpy.less_equal
else:
raise ValueError("direction can be -1 or 1 (negative or positive)")
def get_direction(self):
return self._direction
direction = property(get_direction, set_direction)
def event_received(self, *args, **kwargs):
_, _, v = args
if v.error:
exc_info = v.exc_info
self.error("Synchronization base attribute in error")
msg = "Details: " + "".join(traceback.format_exception(*exc_info))
self.debug(msg)
return
self._position = v.value
self._position_event.set()
def start(self):
self._start_time = time.time()
self._stopped = False
self._started = True
self._position = None
self._start_fired = False
self._position_event.clear()
self._id = 0
self.fire_event(EventType("state"), State.Moving)
def stop(self):
self._stopped = True
def is_started(self):
return self._started
def is_stopped(self):
return self._stopped
def is_running(self):
return self._running
def run(self):
self._running = True
try:
while len(self.active_events) > 0 and not self.is_stopped():
self.wait_active()
self.fire_active()
self.wait_passive()
self.fire_passive()
self._id += 1
finally:
self._started = False
self._running = False
self._stopped = False
self.fire_event(EventType("state"), State.On)
def sleep(self, period):
if period <= 0:
return
necessary_naps = int(math.ceil(period / self.MAX_NAP_TIME))
if necessary_naps == 0: # avoid zero ZeroDivisionError
nap = 0
else:
nap = period / necessary_naps
for _ in xrange(necessary_naps):
if self.is_stopped():
break
time.sleep(nap)
def fire_start(self):
self.fire_event(EventType("start"), self._id)
self._start_fired = True
if self._id > 0:
msg = "start was fired with {0} delay".format(self._id)
self.warning(msg)
def wait_active(self):
candidate = self.active_events[0]
if self.initial_domain_in_use == SynchDomain.Time:
now = time.time()
candidate += self._start_time
self.sleep(candidate - now)
else:
while True:
if self.is_stopped():
break
if self._position_event.isSet():
self._position_event.clear()
now = self._position
if self._condition(now, candidate):
break
else:
self._position_event.wait(self.MAX_NAP_TIME)
def fire_active(self):
# check if some events needs to be skipped
i = 0
while i < len(self.active_events) - 1:
candidate = self.active_events[i + 1]
if self.initial_domain_in_use is SynchDomain.Time:
candidate += self._start_time
now = time.time()
elif self.initial_domain_in_use is SynchDomain.Position:
now = self._position
if self._condition(now, candidate):
i += 1
else:
break
self._id += i
if not self._start_fired:
self.fire_start()
self.fire_event(EventType("active"), self._id)
self.active_events = self.active_events[i + 1:]
self.passive_events = self.passive_events[i:]
def wait_passive(self):
if self.active_domain_in_use == SynchDomain.Time:
now = time.time()
candidate = self._start_time + self.passive_events[0]
self.sleep(candidate - now)
else:
while True:
if self._position_event.isSet():
self._position_event.clear()
if self._condition(self._position, self.passive_events[0]):
break
else:
self._position_event.wait(self.MAX_NAP_TIME)
if self.is_stopped():
break
def fire_passive(self):
self.fire_event(EventType("passive"), self._id)
self.set_passive_events(self.passive_events[1:])
if len(self.passive_events) == 0:
self.fire_end()
def fire_end(self):
self.fire_event(EventType("end"), self._id)
def set_configuration(self, configuration):
# make a copy since we may inject the initial time
configuration = copy.deepcopy(configuration)
active_events = []
passive_events = []
self._direction = None
# create short variables for commodity
Time = SynchDomain.Time
Position = SynchDomain.Position
Initial = SynchParam.Initial
Delay = SynchParam.Delay
Active = SynchParam.Active
Total = SynchParam.Total
Repeats = SynchParam.Repeats
for i, group in enumerate(configuration):
# inject delay as initial time - generation will be
# relative to the start time
initial_param = group.get(Initial)
if initial_param is None:
initial_param = dict()
if Time not in initial_param:
delay_param = group.get(Delay)
if Time in delay_param:
initial_param[Time] = delay_param[Time]
group[Initial] = initial_param
# determine active domain in use
msg = "no initial value in group %d" % i
if self.initial_domain in initial_param:
self.initial_domain_in_use = self.initial_domain
elif Position in initial_param:
self.initial_domain_in_use = Position
elif Time in initial_param:
self.initial_domain_in_use = Time
else:
raise ValueError(msg)
# determine passive domain in use
active_param = group.get(Active)
msg = "no active value in group %d" % i
if self.active_domain is None:
if Time in active_param:
self.active_domain_in_use = Time
elif Position in active_param:
self.active_domain_in_use = Position
else:
raise ValueError(msg)
elif self.active_domain in active_param:
self.active_domain_in_use = self.active_domain
else:
raise ValueError(msg)
# create short variables for commodity
initial_domain_in_use = self.initial_domain_in_use
active_domain_in_use = self.active_domain_in_use
repeats = group.get(Repeats, 1)
active = active_param[active_domain_in_use]
initial_in_initial_domain = initial_param[initial_domain_in_use]
initial_in_active_domain = initial_param[active_domain_in_use]
active_event_in_initial_domain = initial_in_initial_domain
active_event_in_active_domain = initial_in_active_domain
if repeats > 1:
total_param = group[Total]
total_in_initial_domain = total_param[initial_domain_in_use]
total_in_active_domain = total_param[active_domain_in_use]
for _ in xrange(repeats):
passive_event = active_event_in_active_domain + active
active_events.append(active_event_in_initial_domain)
passive_events.append(passive_event)
active_event_in_initial_domain += total_in_initial_domain
active_event_in_active_domain += total_in_active_domain
else:
active_events.append(active_event_in_initial_domain)
passive_event = active_event_in_active_domain + active
passive_events.append(passive_event)
# determine direction
if self.direction is None:
if strictly_increasing(active_events):
self.direction = 1
elif strictly_decreasing(active_events):
self.direction = -1
else:
msg = "active values indicate contradictory directions"
raise ValueError(msg)
self.active_events = active_events
self.passive_events = passive_events
| 35.286096 | 79 | 0.608169 | elf._position_event.wait(self.MAX_NAP_TIME)
def fire_active(self):
i = 0
while i < len(self.active_events) - 1:
candidate = self.active_events[i + 1]
if self.initial_domain_in_use is SynchDomain.Time:
candidate += self._start_time
now = time.time()
elif self.initial_domain_in_use is SynchDomain.Position:
now = self._position
if self._condition(now, candidate):
i += 1
else:
break
self._id += i
if not self._start_fired:
self.fire_start()
self.fire_event(EventType("active"), self._id)
self.active_events = self.active_events[i + 1:]
self.passive_events = self.passive_events[i:]
def wait_passive(self):
if self.active_domain_in_use == SynchDomain.Time:
now = time.time()
candidate = self._start_time + self.passive_events[0]
self.sleep(candidate - now)
else:
while True:
if self._position_event.isSet():
self._position_event.clear()
if self._condition(self._position, self.passive_events[0]):
break
else:
self._position_event.wait(self.MAX_NAP_TIME)
if self.is_stopped():
break
def fire_passive(self):
self.fire_event(EventType("passive"), self._id)
self.set_passive_events(self.passive_events[1:])
if len(self.passive_events) == 0:
self.fire_end()
def fire_end(self):
self.fire_event(EventType("end"), self._id)
def set_configuration(self, configuration):
configuration = copy.deepcopy(configuration)
active_events = []
passive_events = []
self._direction = None
Time = SynchDomain.Time
Position = SynchDomain.Position
Initial = SynchParam.Initial
Delay = SynchParam.Delay
Active = SynchParam.Active
Total = SynchParam.Total
Repeats = SynchParam.Repeats
for i, group in enumerate(configuration):
initial_param = group.get(Initial)
if initial_param is None:
initial_param = dict()
if Time not in initial_param:
delay_param = group.get(Delay)
if Time in delay_param:
initial_param[Time] = delay_param[Time]
group[Initial] = initial_param
msg = "no initial value in group %d" % i
if self.initial_domain in initial_param:
self.initial_domain_in_use = self.initial_domain
elif Position in initial_param:
self.initial_domain_in_use = Position
elif Time in initial_param:
self.initial_domain_in_use = Time
else:
raise ValueError(msg)
active_param = group.get(Active)
msg = "no active value in group %d" % i
if self.active_domain is None:
if Time in active_param:
self.active_domain_in_use = Time
elif Position in active_param:
self.active_domain_in_use = Position
else:
raise ValueError(msg)
elif self.active_domain in active_param:
self.active_domain_in_use = self.active_domain
else:
raise ValueError(msg)
initial_domain_in_use = self.initial_domain_in_use
active_domain_in_use = self.active_domain_in_use
repeats = group.get(Repeats, 1)
active = active_param[active_domain_in_use]
initial_in_initial_domain = initial_param[initial_domain_in_use]
initial_in_active_domain = initial_param[active_domain_in_use]
active_event_in_initial_domain = initial_in_initial_domain
active_event_in_active_domain = initial_in_active_domain
if repeats > 1:
total_param = group[Total]
total_in_initial_domain = total_param[initial_domain_in_use]
total_in_active_domain = total_param[active_domain_in_use]
for _ in xrange(repeats):
passive_event = active_event_in_active_domain + active
active_events.append(active_event_in_initial_domain)
passive_events.append(passive_event)
active_event_in_initial_domain += total_in_initial_domain
active_event_in_active_domain += total_in_active_domain
else:
active_events.append(active_event_in_initial_domain)
passive_event = active_event_in_active_domain + active
passive_events.append(passive_event)
if self.direction is None:
if strictly_increasing(active_events):
self.direction = 1
elif strictly_decreasing(active_events):
self.direction = -1
else:
msg = "active values indicate contradictory directions"
raise ValueError(msg)
self.active_events = active_events
self.passive_events = passive_events
| true | true |
f72bac8581667e7935b4584979b404c4fdca5866 | 468 | py | Python | examples/trap.py | fthyssen/aiosnmp | 667a166214e2e70c333510a6d24208dd772a1f26 | [
"MIT"
] | null | null | null | examples/trap.py | fthyssen/aiosnmp | 667a166214e2e70c333510a6d24208dd772a1f26 | [
"MIT"
] | null | null | null | examples/trap.py | fthyssen/aiosnmp | 667a166214e2e70c333510a6d24208dd772a1f26 | [
"MIT"
] | null | null | null | import asyncio
import aiosnmp
async def handler(host: str, port: int, message: aiosnmp.SnmpV2TrapMessage) -> None:
print(f"got packet from {host}:{port}")
for d in message.data.varbinds:
print(f"oid: {d.oid}, value: {d.value}")
async def main():
p = aiosnmp.SnmpV2TrapServer(
host="127.0.0.1", port=162, communities=("public",), handler=handler
)
await p.run()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| 22.285714 | 84 | 0.662393 | import asyncio
import aiosnmp
async def handler(host: str, port: int, message: aiosnmp.SnmpV2TrapMessage) -> None:
print(f"got packet from {host}:{port}")
for d in message.data.varbinds:
print(f"oid: {d.oid}, value: {d.value}")
async def main():
p = aiosnmp.SnmpV2TrapServer(
host="127.0.0.1", port=162, communities=("public",), handler=handler
)
await p.run()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
| true | true |
f72bacd78c4c106ed1626a1724d718b3f8d317dc | 481 | py | Python | _unittests/ut_plotting/test_dummy.py | sdpython/manyapi | dc2aadc58a5d72904f95424dbe57bb832d3ccd73 | [
"MIT"
] | 1 | 2020-08-08T13:38:45.000Z | 2020-08-08T13:38:45.000Z | _unittests/ut_plotting/test_dummy.py | sdpython/manyapi | dc2aadc58a5d72904f95424dbe57bb832d3ccd73 | [
"MIT"
] | 8 | 2018-05-05T10:03:04.000Z | 2019-06-08T10:21:30.000Z | _unittests/ut_plotting/test_dummy.py | sdpython/manyapi | dc2aadc58a5d72904f95424dbe57bb832d3ccd73 | [
"MIT"
] | null | null | null | """
@brief test log(time=13s)
"""
import unittest
from pyquickhelper.pycode import ExtTestCase
from manydataapi.plotting import plot_aggregated_ts, daily_timeseries
class TestDummm(ExtTestCase):
def test_agg_raise(self):
df = daily_timeseries()
from matplotlib import pyplot as plt
_, ax = plt.subplots(1, 1)
plot_aggregated_ts(df, ax=ax, value='X', agg='year')
plt.close('all')
if __name__ == "__main__":
unittest.main()
| 21.863636 | 69 | 0.675676 | import unittest
from pyquickhelper.pycode import ExtTestCase
from manydataapi.plotting import plot_aggregated_ts, daily_timeseries
class TestDummm(ExtTestCase):
def test_agg_raise(self):
df = daily_timeseries()
from matplotlib import pyplot as plt
_, ax = plt.subplots(1, 1)
plot_aggregated_ts(df, ax=ax, value='X', agg='year')
plt.close('all')
if __name__ == "__main__":
unittest.main()
| true | true |
f72bacee48dd612bab37d2f97351e60521569498 | 6,673 | py | Python | tests/test_lammps_cycle.py | irisTa56/ease4lmp | 0ad69632fbe0d8c2a55e58af13efd7be1d566394 | [
"MIT"
] | null | null | null | tests/test_lammps_cycle.py | irisTa56/ease4lmp | 0ad69632fbe0d8c2a55e58af13efd7be1d566394 | [
"MIT"
] | 2 | 2019-03-06T04:33:27.000Z | 2019-07-27T08:30:28.000Z | tests/test_lammps_cycle.py | irisTa56/ease4lmp | 0ad69632fbe0d8c2a55e58af13efd7be1d566394 | [
"MIT"
] | null | null | null | import unittest
from ease4lmp import (
BondedAtoms, LammpsWriter,
create_atoms_from_data, create_atoms_from_molecule)
from ase.build import bulk, molecule
import numpy as np
import os
import itertools
def write_files(atoms):
writer = LammpsWriter(atoms, atom_style="molecular")
writer.set_atom_data(mol=[0]*len(atoms))
writer.set_bond_types({
seq: i+1 for i, seq in enumerate(writer.get_bond_patterns())
})
writer.set_angle_types({
seq: i+1 for i, seq in enumerate(writer.get_angle_patterns())
})
writer.set_dihedral_types({
seq: i+1 for i, seq in enumerate(writer.get_dihedral_patterns())
})
writer.set_improper_types({
seq: i+1 for i, seq in enumerate(writer.get_improper_patterns())
})
writer.write_lammps_data("data.tmp", mass=True)
writer.write_lammps_molecule("molecule.tmp", mass=True)
def remove_files():
os.remove("data.tmp")
os.remove("molecule.tmp")
class TestLammpsCycle(unittest.TestCase):
def test_methanol(self):
"""Test for equivalence between original and written/read data."""
atoms = BondedAtoms(molecule("CH3OH"))
# confirm atomic numbers
self.assertTrue(np.allclose(
atoms.get_atomic_numbers(), np.array([6, 8, 1, 1, 1, 1])))
# confirm O-H distance
self.assertTrue(np.allclose(atoms.get_distance(1, 3), 0.97))
atoms.set_types([1, 2, 3, 4, 3, 3])
positions = atoms.get_positions()
bonded_pairs = [
(i, j) for i, j in itertools.combinations(range(len(atoms)), 2)
if np.linalg.norm(positions[i] - positions[j]) < 1.5]
# there are five bonds in CH3OH
self.assertEqual(len(bonded_pairs), 5)
for pair in bonded_pairs:
atoms.add_bond(*pair)
atoms.sort_bonds()
atoms.set_cell([[5., 0., 0.], [0., 5., 0.], [0., 0., 5.]])
atoms.center()
write_files(atoms)
atoms_from_data = create_atoms_from_data("data.tmp", "molecular")
atoms_from_molecule = create_atoms_from_molecule("molecule.tmp")
# atoms from Lammps' data and molecule file must be eaqual.
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms_from_molecule.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms_from_molecule.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms_from_molecule.get_types()))
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms_from_molecule.get_bonds()))
# comparison with original atoms
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms.get_types()))
# storing order of bonds might be changed
atoms_from_data.sort_bonds()
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms.get_bonds()))
remove_files()
def test_acetic(self):
"""Test for equivalence between original and written/read data."""
atoms = BondedAtoms(molecule("CH3COOH"))
# confirm atomic numbers
self.assertTrue(np.allclose(
atoms.get_atomic_numbers(), np.array([6, 8, 8, 1, 6, 1, 1, 1])))
# confirm O-H distance < C=H distance
self.assertTrue(all(
atoms.get_distance(2, 3) < atoms.get_distance(i, j)
for i, j in [(4, 5), (4, 6), (4, 7)]))
atoms.set_types([1, 2, 3, 4, 5, 6, 6, 6])
positions = atoms.get_positions()
bonded_pairs = [
(i, j) for i, j in itertools.combinations(range(len(atoms)), 2)
if np.linalg.norm(positions[i] - positions[j]) < 1.5]
# there are seven bonds in CH3COOH
self.assertEqual(len(bonded_pairs), 7)
for pair in bonded_pairs:
atoms.add_bond(*pair)
atoms.sort_bonds()
atoms.set_cell([[5., 0., 0.], [0., 5., 0.], [0., 0., 5.]])
atoms.center()
write_files(atoms)
atoms_from_data = create_atoms_from_data("data.tmp", "molecular")
atoms_from_molecule = create_atoms_from_molecule("molecule.tmp")
# atoms from Lammps' data and molecule file must be eaqual.
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms_from_molecule.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms_from_molecule.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms_from_molecule.get_types()))
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms_from_molecule.get_bonds()))
# comparison with original atoms
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms.get_types()))
# storing order of bonds might be changed
atoms_from_data.sort_bonds()
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms.get_bonds()))
remove_files()
def test_nacl(self):
"""Test for equivalence between original and written/read data."""
atoms = BondedAtoms(bulk("NaCl", "rocksalt", a=5.64, orthorhombic=True))
# confirm atomic numbers
self.assertTrue(np.allclose(
atoms.get_atomic_numbers(), np.array([11, 17, 11, 17])))
atoms.set_types([1, 2, 1, 2])
atoms.change_max_bonds(6)
cell = atoms.get_cell()
positions = atoms.get_positions()
for i, j in itertools.combinations(range(len(atoms)), 2):
r_original = positions[j] - positions[i]
for ix, iy, iz in itertools.product(*[(-1, 0, 1)]*3):
r = r_original + ix * cell[0] + iy * cell[1] + iz * cell[2]
if np.isclose(np.linalg.norm(r), 2.82):
atoms.add_bond(i, j, img2=(ix, iy, iz))
atoms *= 5
atoms.sort_bonds()
write_files(atoms)
atoms_from_data = create_atoms_from_data(
"data.tmp", "molecular", pbc=True)
# comparison with original atoms
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms.get_types()))
# storing order of bonds might be changed
atoms_from_data.sort_bonds()
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms.get_bonds()))
remove_files()
def suite():
suite = unittest.TestSuite()
suite.addTest(TestLammpsCycle("test_methanol"))
suite.addTest(TestLammpsCycle("test_acetic"))
suite.addTest(TestLammpsCycle("test_nacl"))
return suite | 30.47032 | 76 | 0.683051 | import unittest
from ease4lmp import (
BondedAtoms, LammpsWriter,
create_atoms_from_data, create_atoms_from_molecule)
from ase.build import bulk, molecule
import numpy as np
import os
import itertools
def write_files(atoms):
writer = LammpsWriter(atoms, atom_style="molecular")
writer.set_atom_data(mol=[0]*len(atoms))
writer.set_bond_types({
seq: i+1 for i, seq in enumerate(writer.get_bond_patterns())
})
writer.set_angle_types({
seq: i+1 for i, seq in enumerate(writer.get_angle_patterns())
})
writer.set_dihedral_types({
seq: i+1 for i, seq in enumerate(writer.get_dihedral_patterns())
})
writer.set_improper_types({
seq: i+1 for i, seq in enumerate(writer.get_improper_patterns())
})
writer.write_lammps_data("data.tmp", mass=True)
writer.write_lammps_molecule("molecule.tmp", mass=True)
def remove_files():
os.remove("data.tmp")
os.remove("molecule.tmp")
class TestLammpsCycle(unittest.TestCase):
def test_methanol(self):
atoms = BondedAtoms(molecule("CH3OH"))
self.assertTrue(np.allclose(
atoms.get_atomic_numbers(), np.array([6, 8, 1, 1, 1, 1])))
self.assertTrue(np.allclose(atoms.get_distance(1, 3), 0.97))
atoms.set_types([1, 2, 3, 4, 3, 3])
positions = atoms.get_positions()
bonded_pairs = [
(i, j) for i, j in itertools.combinations(range(len(atoms)), 2)
if np.linalg.norm(positions[i] - positions[j]) < 1.5]
self.assertEqual(len(bonded_pairs), 5)
for pair in bonded_pairs:
atoms.add_bond(*pair)
atoms.sort_bonds()
atoms.set_cell([[5., 0., 0.], [0., 5., 0.], [0., 0., 5.]])
atoms.center()
write_files(atoms)
atoms_from_data = create_atoms_from_data("data.tmp", "molecular")
atoms_from_molecule = create_atoms_from_molecule("molecule.tmp")
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms_from_molecule.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms_from_molecule.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms_from_molecule.get_types()))
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms_from_molecule.get_bonds()))
# comparison with original atoms
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms.get_types()))
# storing order of bonds might be changed
atoms_from_data.sort_bonds()
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms.get_bonds()))
remove_files()
def test_acetic(self):
atoms = BondedAtoms(molecule("CH3COOH"))
# confirm atomic numbers
self.assertTrue(np.allclose(
atoms.get_atomic_numbers(), np.array([6, 8, 8, 1, 6, 1, 1, 1])))
# confirm O-H distance < C=H distance
self.assertTrue(all(
atoms.get_distance(2, 3) < atoms.get_distance(i, j)
for i, j in [(4, 5), (4, 6), (4, 7)]))
atoms.set_types([1, 2, 3, 4, 5, 6, 6, 6])
positions = atoms.get_positions()
bonded_pairs = [
(i, j) for i, j in itertools.combinations(range(len(atoms)), 2)
if np.linalg.norm(positions[i] - positions[j]) < 1.5]
# there are seven bonds in CH3COOH
self.assertEqual(len(bonded_pairs), 7)
for pair in bonded_pairs:
atoms.add_bond(*pair)
atoms.sort_bonds()
atoms.set_cell([[5., 0., 0.], [0., 5., 0.], [0., 0., 5.]])
atoms.center()
write_files(atoms)
atoms_from_data = create_atoms_from_data("data.tmp", "molecular")
atoms_from_molecule = create_atoms_from_molecule("molecule.tmp")
# atoms from Lammps' data and molecule file must be eaqual.
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms_from_molecule.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms_from_molecule.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms_from_molecule.get_types()))
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms_from_molecule.get_bonds()))
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms.get_types()))
atoms_from_data.sort_bonds()
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms.get_bonds()))
remove_files()
def test_nacl(self):
atoms = BondedAtoms(bulk("NaCl", "rocksalt", a=5.64, orthorhombic=True))
self.assertTrue(np.allclose(
atoms.get_atomic_numbers(), np.array([11, 17, 11, 17])))
atoms.set_types([1, 2, 1, 2])
atoms.change_max_bonds(6)
cell = atoms.get_cell()
positions = atoms.get_positions()
for i, j in itertools.combinations(range(len(atoms)), 2):
r_original = positions[j] - positions[i]
for ix, iy, iz in itertools.product(*[(-1, 0, 1)]*3):
r = r_original + ix * cell[0] + iy * cell[1] + iz * cell[2]
if np.isclose(np.linalg.norm(r), 2.82):
atoms.add_bond(i, j, img2=(ix, iy, iz))
atoms *= 5
atoms.sort_bonds()
write_files(atoms)
atoms_from_data = create_atoms_from_data(
"data.tmp", "molecular", pbc=True)
self.assertTrue(np.allclose(
atoms_from_data.get_positions(), atoms.get_positions()))
self.assertTrue(np.allclose(
atoms_from_data.get_masses(), atoms.get_masses()))
self.assertTrue(np.allclose(
atoms_from_data.get_types(), atoms.get_types()))
atoms_from_data.sort_bonds()
self.assertTrue(np.allclose(
atoms_from_data.get_bonds(), atoms.get_bonds()))
remove_files()
def suite():
suite = unittest.TestSuite()
suite.addTest(TestLammpsCycle("test_methanol"))
suite.addTest(TestLammpsCycle("test_acetic"))
suite.addTest(TestLammpsCycle("test_nacl"))
return suite | true | true |
f72bacff2befd45529dc6156485e5eaa617d18a2 | 611 | py | Python | api/models/post_rank/migrations/0001_initial.py | eggmoid/GalleryManage-FastAPI | fa50cef623a03aed2d7b4ac9c76d74cfb9d898eb | [
"MIT"
] | null | null | null | api/models/post_rank/migrations/0001_initial.py | eggmoid/GalleryManage-FastAPI | fa50cef623a03aed2d7b4ac9c76d74cfb9d898eb | [
"MIT"
] | 6 | 2021-08-06T16:30:03.000Z | 2021-12-11T05:30:02.000Z | api/models/post_rank/migrations/0001_initial.py | eggmoid/GalleryManage-FastAPI | fa50cef623a03aed2d7b4ac9c76d74cfb9d898eb | [
"MIT"
] | null | null | null | # Generated by Django 3.2.5 on 2021-08-25 04:03
from django.db import migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
('post', '0005_alter_post_options'),
]
operations = [
migrations.CreateModel(
name='PostRank',
fields=[
],
options={
'verbose_name': '8월 갤창랭킹',
'verbose_name_plural': '8월 갤창랭킹',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('post.post',),
),
]
| 21.068966 | 49 | 0.464812 |
from django.db import migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
('post', '0005_alter_post_options'),
]
operations = [
migrations.CreateModel(
name='PostRank',
fields=[
],
options={
'verbose_name': '8월 갤창랭킹',
'verbose_name_plural': '8월 갤창랭킹',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('post.post',),
),
]
| true | true |
f72bad3d0b4002ad1cb2fcc9489978b3fa4f88bf | 3,577 | py | Python | checkers/utils.py | gbravoi/monte-carlo-tree-search | 578df8df925e5f569e7354daff6642e1781389b6 | [
"MIT"
] | null | null | null | checkers/utils.py | gbravoi/monte-carlo-tree-search | 578df8df925e5f569e7354daff6642e1781389b6 | [
"MIT"
] | null | null | null | checkers/utils.py | gbravoi/monte-carlo-tree-search | 578df8df925e5f569e7354daff6642e1781389b6 | [
"MIT"
] | null | null | null | """
Martin Kersner, m.kersner@gmail.com
seoulai.com
2018
Adapted by Gabriela B. to work with python 2.7 and ROS
"""
import random
import numpy as np
from base import Constants
from rules import Rules
class BoardEncoding(object):
def __init__(self):
self._constants = Constants()
self._encoding = {}
self.empty = 0
self.dark = 20
self.dark_king = 21
self.light = 10
self.light_king = 11
def __getitem__(self, name):
return self._encoding[name]
@property
def empty(self):
return self._encoding[self._constants.EMPTY]
@empty.setter
def empty(self, value):
self._encoding[self._constants.EMPTY] = value
@property
def dark(self):
return self._encoding[self._constants.DARK]
@dark.setter
def dark(self, value):
self._encoding[self._constants.DARK] = value
@property
def dark_king(self):
return self._encoding[self._constants.DARK_KING]
@dark_king.setter
def dark_king(self, value):
self._encoding[self._constants.DARK_KING] = value
@property
def light(self):
return self._encoding[self._constants.LIGHT]
@light.setter
def light(self, value):
self._encoding[self._constants.LIGHT] = value
@property
def light_king(self):
return self._encoding[self._constants.LIGHT_KING]
@light_king.setter
def light_king(self, value):
self._encoding[self._constants.LIGHT_KING] = value
def board_list2numpy(
board_list,
encoding) :
"""Convert the state of game (`board_list`) into 2D NumPy Array using `encoding`.
Args:
board_list: (List[List[Piece]]) State of the game.
encoding: (BoardEncoding) Optional argument. If not given default encoding will be utilized.
Returns:
board_numpy: (np.array)
"""
board_size = len(board_list)
constants = Constants()
board_numpy = encoding[constants.EMPTY] * np.ones((board_size, board_size))
for row in range(board_size):
for col in range(board_size):
if board_list[row][col] is not None:
ptype = board_list[row][col].ptype
king = board_list[row][col].king
if ptype == constants.LIGHT:
if king:
piece_type = constants.LIGHT_KING
else:
piece_type = constants.LIGHT
else: # DARK
if king:
piece_type = constants.DARK_KING
else:
piece_type = constants.DARK
board_numpy[row][col] = encoding[piece_type]
return board_numpy
def generate_random_move(
board,
ptype,
board_size):
"""Generate random move from all `ptype` valid moves but does not execute it.
Args:
board: (List[List[Piece]]) State of the game.
ptype: (int) type of piece for which random move will be generated
board_size: (int) size of board
"""
valid_moves = Rules.generate_valid_moves(board, ptype, board_size)
rand_from_row, rand_from_col = random.choice(list(valid_moves.keys()))
rand_to_row, rand_to_col = random.choice(valid_moves[(rand_from_row, rand_from_col)])
return rand_from_row, rand_from_col, rand_to_row, rand_to_col
#new functions
def print_board(board_list):
"""
print board for debugging putposes
receives board as a board_list: List[List],
"""
numpy_board=board_list2numpy(board_list)
print(numpy_board)
| 26.496296 | 100 | 0.632933 | import random
import numpy as np
from base import Constants
from rules import Rules
class BoardEncoding(object):
def __init__(self):
self._constants = Constants()
self._encoding = {}
self.empty = 0
self.dark = 20
self.dark_king = 21
self.light = 10
self.light_king = 11
def __getitem__(self, name):
return self._encoding[name]
@property
def empty(self):
return self._encoding[self._constants.EMPTY]
@empty.setter
def empty(self, value):
self._encoding[self._constants.EMPTY] = value
@property
def dark(self):
return self._encoding[self._constants.DARK]
@dark.setter
def dark(self, value):
self._encoding[self._constants.DARK] = value
@property
def dark_king(self):
return self._encoding[self._constants.DARK_KING]
@dark_king.setter
def dark_king(self, value):
self._encoding[self._constants.DARK_KING] = value
@property
def light(self):
return self._encoding[self._constants.LIGHT]
@light.setter
def light(self, value):
self._encoding[self._constants.LIGHT] = value
@property
def light_king(self):
return self._encoding[self._constants.LIGHT_KING]
@light_king.setter
def light_king(self, value):
self._encoding[self._constants.LIGHT_KING] = value
def board_list2numpy(
board_list,
encoding) :
board_size = len(board_list)
constants = Constants()
board_numpy = encoding[constants.EMPTY] * np.ones((board_size, board_size))
for row in range(board_size):
for col in range(board_size):
if board_list[row][col] is not None:
ptype = board_list[row][col].ptype
king = board_list[row][col].king
if ptype == constants.LIGHT:
if king:
piece_type = constants.LIGHT_KING
else:
piece_type = constants.LIGHT
else:
if king:
piece_type = constants.DARK_KING
else:
piece_type = constants.DARK
board_numpy[row][col] = encoding[piece_type]
return board_numpy
def generate_random_move(
board,
ptype,
board_size):
valid_moves = Rules.generate_valid_moves(board, ptype, board_size)
rand_from_row, rand_from_col = random.choice(list(valid_moves.keys()))
rand_to_row, rand_to_col = random.choice(valid_moves[(rand_from_row, rand_from_col)])
return rand_from_row, rand_from_col, rand_to_row, rand_to_col
def print_board(board_list):
numpy_board=board_list2numpy(board_list)
print(numpy_board)
| true | true |
f72bad4faefc64d772b991b54a1874f2f71d5c3b | 1,267 | py | Python | modules/AppTheme.py | Fa1c0n35/RootTheBoxs | 4f2a9886c8eedca3039604b93929c8c09866115e | [
"Apache-2.0"
] | 1 | 2019-06-29T08:40:54.000Z | 2019-06-29T08:40:54.000Z | modules/AppTheme.py | Fa1c0n35/RootTheBoxs | 4f2a9886c8eedca3039604b93929c8c09866115e | [
"Apache-2.0"
] | null | null | null | modules/AppTheme.py | Fa1c0n35/RootTheBoxs | 4f2a9886c8eedca3039604b93929c8c09866115e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mar 14, 2012
@author: moloch
Copyright 2012 Root the Box
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from tornado.web import UIModule
from tornado.options import options
from models.Theme import Theme
class AppTheme(UIModule):
theme = Theme.by_name(options.default_theme)
def render(self, *args, **kwargs):
""" Includes different CSS themes based on user prefs """
if options.allow_user_to_change_theme and (self.handler.session is not None):
return self.render_string(
"theme/theme.html", theme_files=self.handler.session["theme"]
)
else:
return self.render_string("theme/theme.html", theme_files=self.theme)
| 30.902439 | 85 | 0.697711 |
from tornado.web import UIModule
from tornado.options import options
from models.Theme import Theme
class AppTheme(UIModule):
theme = Theme.by_name(options.default_theme)
def render(self, *args, **kwargs):
if options.allow_user_to_change_theme and (self.handler.session is not None):
return self.render_string(
"theme/theme.html", theme_files=self.handler.session["theme"]
)
else:
return self.render_string("theme/theme.html", theme_files=self.theme)
| true | true |
f72bad64f0a9f77a3272ef388210f662dc6820c7 | 749 | py | Python | smbl/prog/plugins/picard.py | karel-brinda/snakemake-lib | 5922fa2fc4060d86172e991361a1cceb0af51af8 | [
"MIT"
] | 26 | 2015-03-16T03:37:02.000Z | 2021-01-18T17:34:16.000Z | smbl/prog/plugins/picard.py | karel-brinda/smbl | 5922fa2fc4060d86172e991361a1cceb0af51af8 | [
"MIT"
] | 12 | 2015-02-05T10:57:16.000Z | 2016-06-07T18:09:57.000Z | smbl/prog/plugins/picard.py | karel-brinda/snakemake-lib | 5922fa2fc4060d86172e991361a1cceb0af51af8 | [
"MIT"
] | 6 | 2015-06-03T20:06:49.000Z | 2020-12-13T09:48:03.000Z | import smbl
import snakemake
import os
from ._program import *
PICARD = get_bin_file_path("picard.jar")
##########################################
##########################################
class Picard(Program):
@classmethod
def get_installation_files(cls):
return [
PICARD,
]
@classmethod
def install(cls):
ver="1.140"
fn=cls.download_file("https://github.com/broadinstitute/picard/releases/download/{ver}/picard-tools-{ver}.zip".format(ver=ver),"picard.zip")
dir=os.path.dirname(fn)
smbl.utils.shell('(cd "{dir}" && unzip -j picard.zip) > /dev/null'.format(dir=dir))
cls.install_file("picard.jar",PICARD)
@classmethod
def supported_platforms(cls):
return ["osx","linux","cygwin"]
| 23.40625 | 143 | 0.599466 | import smbl
import snakemake
import os
from ._program import *
PICARD = get_bin_file_path("picard.jar")
| true | true |
f72badcfec08b1a7904807f002fbd7e59e89878d | 2,519 | py | Python | ckan/tests/cli/test_config_tool.py | jbrown-xentity/ckan | ecd9ac6bc6cfb5bb4fcbec82c431f4564ef649ed | [
"BSD-3-Clause"
] | 2,805 | 2015-01-02T18:13:15.000Z | 2022-03-31T03:35:01.000Z | ckan/tests/cli/test_config_tool.py | jbrown-xentity/ckan | ecd9ac6bc6cfb5bb4fcbec82c431f4564ef649ed | [
"BSD-3-Clause"
] | 3,801 | 2015-01-02T11:05:36.000Z | 2022-03-31T19:24:37.000Z | ckan/tests/cli/test_config_tool.py | jbrown-xentity/ckan | ecd9ac6bc6cfb5bb4fcbec82c431f4564ef649ed | [
"BSD-3-Clause"
] | 1,689 | 2015-01-02T19:46:43.000Z | 2022-03-28T14:59:43.000Z | # -*- coding: utf-8 -*-
import os
import pytest
from ckan.cli.cli import ckan
from configparser import ConfigParser, NoOptionError
@pytest.fixture
def config_file(tmp_path):
dest = tmp_path / u'config.ini'
tpl = os.path.join(
os.path.dirname(__file__),
u'templates/config_tool.ini.tpl')
with open(tpl, u'rb') as data:
dest.write_bytes(data.read())
return dest
def _parse(config_file):
parser = ConfigParser()
parser.read([str(config_file)])
return parser
def test_config_no_params(cli, config_file):
"""test-config requires some params for update.
"""
result = cli.invoke(ckan, [u'config-tool', str(config_file)])
assert result.exit_code
def test_config_unset_debug(cli, config_file):
"""Existing values can be updated.
"""
assert _parse(config_file).get(u'app:main', u'debug') == u'true'
result = cli.invoke(
ckan,
[u'config-tool', str(config_file), u'debug=false']
)
assert not result.exit_code, result.output
assert _parse(config_file).get(u'app:main', u'debug') == u'false'
def test_config_create_custom_debug(cli, config_file):
"""New values can be added
"""
with pytest.raises(NoOptionError):
_parse(config_file).get(u'app:main', u'custom_debug')
result = cli.invoke(
ckan, [u'config-tool',
str(config_file), u'custom_debug=false'])
assert not result.exit_code, result.output
assert _parse(config_file).get(u'app:main', u'custom_debug') == u'false'
def test_config_custom_section(cli, config_file):
"""Custom section updated when specified.
"""
assert _parse(config_file).get(u'server:main', u'port') == u'5000'
result = cli.invoke(ckan, [
u'config-tool',
str(config_file), u'-s', u'server:main', u'port=8000'
])
assert not result.exit_code, result.output
assert _parse(config_file).get(u'server:main', u'port') == u'8000'
def test_merge_into_new_file(cli, config_file, tmp_path):
"""New file can be created without updating old one.
"""
dest = tmp_path / u'new_config.ini'
dest.touch()
assert _parse(config_file).get(u'app:main', u'debug') == u'true'
result = cli.invoke(
ckan,
[u'config-tool',
str(dest), u'-f',
str(config_file), u'debug=false'])
assert not result.exit_code, result.output
assert _parse(config_file).get(u'app:main', u'debug') == u'true'
assert _parse(dest).get(u'app:main', u'debug') == u'false'
| 29.988095 | 76 | 0.653831 |
import os
import pytest
from ckan.cli.cli import ckan
from configparser import ConfigParser, NoOptionError
@pytest.fixture
def config_file(tmp_path):
dest = tmp_path / u'config.ini'
tpl = os.path.join(
os.path.dirname(__file__),
u'templates/config_tool.ini.tpl')
with open(tpl, u'rb') as data:
dest.write_bytes(data.read())
return dest
def _parse(config_file):
parser = ConfigParser()
parser.read([str(config_file)])
return parser
def test_config_no_params(cli, config_file):
result = cli.invoke(ckan, [u'config-tool', str(config_file)])
assert result.exit_code
def test_config_unset_debug(cli, config_file):
assert _parse(config_file).get(u'app:main', u'debug') == u'true'
result = cli.invoke(
ckan,
[u'config-tool', str(config_file), u'debug=false']
)
assert not result.exit_code, result.output
assert _parse(config_file).get(u'app:main', u'debug') == u'false'
def test_config_create_custom_debug(cli, config_file):
with pytest.raises(NoOptionError):
_parse(config_file).get(u'app:main', u'custom_debug')
result = cli.invoke(
ckan, [u'config-tool',
str(config_file), u'custom_debug=false'])
assert not result.exit_code, result.output
assert _parse(config_file).get(u'app:main', u'custom_debug') == u'false'
def test_config_custom_section(cli, config_file):
assert _parse(config_file).get(u'server:main', u'port') == u'5000'
result = cli.invoke(ckan, [
u'config-tool',
str(config_file), u'-s', u'server:main', u'port=8000'
])
assert not result.exit_code, result.output
assert _parse(config_file).get(u'server:main', u'port') == u'8000'
def test_merge_into_new_file(cli, config_file, tmp_path):
dest = tmp_path / u'new_config.ini'
dest.touch()
assert _parse(config_file).get(u'app:main', u'debug') == u'true'
result = cli.invoke(
ckan,
[u'config-tool',
str(dest), u'-f',
str(config_file), u'debug=false'])
assert not result.exit_code, result.output
assert _parse(config_file).get(u'app:main', u'debug') == u'true'
assert _parse(dest).get(u'app:main', u'debug') == u'false'
| true | true |
f72badfdcc7c9b8294a786b553d4648ee517ad6c | 12,634 | py | Python | tests/python/pants_test/util/test_contextutil.py | ghthor/pants | 450de702414f87f563081ddefaefd8a554de07a3 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/util/test_contextutil.py | ghthor/pants | 450de702414f87f563081ddefaefd8a554de07a3 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/util/test_contextutil.py | ghthor/pants | 450de702414f87f563081ddefaefd8a554de07a3 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import pstats
import shutil
import signal
import sys
import unittest
import uuid
import zipfile
from builtins import next, object, range, str
from contextlib import contextmanager
import mock
from pants.util.contextutil import (HardSystemExit, InvalidZipPath, Timer, environment_as,
exception_logging, hard_exit_handler, hermetic_environment_as,
maybe_profiled, open_zip, pushd, signal_handler_as, stdio_as,
temporary_dir, temporary_file)
from pants.util.process_handler import subprocess
PATCH_OPTS = dict(autospec=True, spec_set=True)
class ContextutilTest(unittest.TestCase):
def test_empty_environment(self):
with environment_as():
pass
def test_override_single_variable(self):
with temporary_file() as output:
# test that the override takes place
with environment_as(HORK='BORK'):
subprocess.Popen([sys.executable, '-c', 'import os; print(os.environ["HORK"])'],
stdout=output).wait()
output.seek(0)
self.assertEquals('BORK\n', output.read())
# test that the variable is cleared
with temporary_file() as new_output:
subprocess.Popen([sys.executable, '-c', 'import os; print("HORK" in os.environ)'],
stdout=new_output).wait()
new_output.seek(0)
self.assertEquals('False\n', new_output.read())
def test_environment_negation(self):
with temporary_file() as output:
with environment_as(HORK='BORK'):
with environment_as(HORK=None):
# test that the variable is cleared
subprocess.Popen([sys.executable, '-c', 'import os; print("HORK" in os.environ)'],
stdout=output).wait()
output.seek(0)
self.assertEquals('False\n', output.read())
def test_hermetic_environment(self):
self.assertIn('USER', os.environ)
with hermetic_environment_as(**{}):
self.assertNotIn('USER', os.environ)
def test_hermetic_environment_subprocesses(self):
self.assertIn('USER', os.environ)
with hermetic_environment_as(**dict(AAA='333')):
output = subprocess.check_output('env', shell=True)
self.assertNotIn('USER=', output)
self.assertIn('AAA', os.environ)
self.assertEquals(os.environ['AAA'], '333')
self.assertIn('USER', os.environ)
self.assertNotIn('AAA', os.environ)
def test_hermetic_environment_unicode(self):
UNICODE_CHAR = '¡'
ENCODED_CHAR = UNICODE_CHAR.encode('utf-8')
with environment_as(**dict(XXX=UNICODE_CHAR)):
self.assertEquals(os.environ['XXX'], ENCODED_CHAR)
with hermetic_environment_as(**dict(AAA=UNICODE_CHAR)):
self.assertIn('AAA', os.environ)
self.assertEquals(os.environ['AAA'], ENCODED_CHAR)
self.assertEquals(os.environ['XXX'], ENCODED_CHAR)
def test_simple_pushd(self):
pre_cwd = os.getcwd()
with temporary_dir() as tempdir:
with pushd(tempdir) as path:
self.assertEquals(tempdir, path)
self.assertEquals(os.path.realpath(tempdir), os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
def test_nested_pushd(self):
pre_cwd = os.getcwd()
with temporary_dir() as tempdir1:
with pushd(tempdir1):
self.assertEquals(os.path.realpath(tempdir1), os.getcwd())
with temporary_dir(root_dir=tempdir1) as tempdir2:
with pushd(tempdir2):
self.assertEquals(os.path.realpath(tempdir2), os.getcwd())
self.assertEquals(os.path.realpath(tempdir1), os.getcwd())
self.assertEquals(os.path.realpath(tempdir1), os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
def test_temporary_file_no_args(self):
with temporary_file() as fp:
self.assertTrue(os.path.exists(fp.name), 'Temporary file should exist within the context.')
self.assertTrue(os.path.exists(fp.name) == False,
'Temporary file should not exist outside of the context.')
def test_temporary_file_without_cleanup(self):
with temporary_file(cleanup=False) as fp:
self.assertTrue(os.path.exists(fp.name), 'Temporary file should exist within the context.')
self.assertTrue(os.path.exists(fp.name),
'Temporary file should exist outside of context if cleanup=False.')
os.unlink(fp.name)
def test_temporary_file_within_other_dir(self):
with temporary_dir() as path:
with temporary_file(root_dir=path) as f:
self.assertTrue(os.path.realpath(f.name).startswith(os.path.realpath(path)),
'file should be created in root_dir if specified.')
def test_temporary_dir_no_args(self):
with temporary_dir() as path:
self.assertTrue(os.path.exists(path), 'Temporary dir should exist within the context.')
self.assertTrue(os.path.isdir(path), 'Temporary dir should be a dir and not a file.')
self.assertFalse(os.path.exists(path), 'Temporary dir should not exist outside of the context.')
def test_temporary_dir_without_cleanup(self):
with temporary_dir(cleanup=False) as path:
self.assertTrue(os.path.exists(path), 'Temporary dir should exist within the context.')
self.assertTrue(os.path.exists(path),
'Temporary dir should exist outside of context if cleanup=False.')
shutil.rmtree(path)
def test_temporary_dir_with_root_dir(self):
with temporary_dir() as path1:
with temporary_dir(root_dir=path1) as path2:
self.assertTrue(os.path.realpath(path2).startswith(os.path.realpath(path1)),
'Nested temporary dir should be created within outer dir.')
def test_timer(self):
class FakeClock(object):
def __init__(self):
self._time = 0.0
def time(self):
ret = self._time
self._time += 0.0001 # Force a little time to elapse.
return ret
def sleep(self, duration):
self._time += duration
clock = FakeClock()
# Note: to test with the real system clock, use this instead:
# import time
# clock = time
with Timer(clock=clock) as t:
self.assertLess(t.start, clock.time())
self.assertGreater(t.elapsed, 0)
clock.sleep(0.1)
self.assertGreater(t.elapsed, 0.1)
clock.sleep(0.1)
self.assertTrue(t.finish is None)
self.assertGreater(t.elapsed, 0.2)
self.assertLess(t.finish, clock.time())
def test_open_zipDefault(self):
with temporary_dir() as tempdir:
with open_zip(os.path.join(tempdir, 'test'), 'w') as zf:
self.assertTrue(zf._allowZip64)
def test_open_zipTrue(self):
with temporary_dir() as tempdir:
with open_zip(os.path.join(tempdir, 'test'), 'w', allowZip64=True) as zf:
self.assertTrue(zf._allowZip64)
def test_open_zipFalse(self):
with temporary_dir() as tempdir:
with open_zip(os.path.join(tempdir, 'test'), 'w', allowZip64=False) as zf:
self.assertFalse(zf._allowZip64)
def test_open_zip_raises_exception_on_falsey_paths(self):
falsey = (None, '', False)
for invalid in falsey:
with self.assertRaises(InvalidZipPath):
next(open_zip(invalid).gen)
def test_open_zip_returns_realpath_on_badzipfile(self):
# In case of file corruption, deleting a Pants-constructed symlink would not resolve the error.
with temporary_file() as not_zip:
with temporary_dir() as tempdir:
file_symlink = os.path.join(tempdir, 'foo')
os.symlink(not_zip.name, file_symlink)
self.assertEquals(os.path.realpath(file_symlink), os.path.realpath(not_zip.name))
with self.assertRaisesRegexp(zipfile.BadZipfile, r'{}'.format(not_zip.name)):
next(open_zip(file_symlink).gen)
@contextmanager
def _stdio_as_tempfiles(self):
"""Harness to replace `sys.std*` with tempfiles.
Validates that all files are read/written/flushed correctly, and acts as a
contextmanager to allow for recursive tests.
"""
# Prefix contents written within this instance with a unique string to differentiate
# them from other instances.
uuid_str = str(uuid.uuid4())
def u(string):
return '{}#{}'.format(uuid_str, string)
stdin_data = u('stdio')
stdout_data = u('stdout')
stderr_data = u('stderr')
with temporary_file() as tmp_stdin,\
temporary_file() as tmp_stdout,\
temporary_file() as tmp_stderr:
print(stdin_data, file=tmp_stdin)
tmp_stdin.seek(0)
# Read prepared content from stdin, and write content to stdout/stderr.
with stdio_as(stdout_fd=tmp_stdout.fileno(),
stderr_fd=tmp_stderr.fileno(),
stdin_fd=tmp_stdin.fileno()):
self.assertEquals(sys.stdin.fileno(), 0)
self.assertEquals(sys.stdout.fileno(), 1)
self.assertEquals(sys.stderr.fileno(), 2)
self.assertEquals(stdin_data, sys.stdin.read().strip())
print(stdout_data, file=sys.stdout)
yield
print(stderr_data, file=sys.stderr)
tmp_stdout.seek(0)
tmp_stderr.seek(0)
self.assertEquals(stdout_data, tmp_stdout.read().strip())
self.assertEquals(stderr_data, tmp_stderr.read().strip())
def test_stdio_as(self):
self.assertTrue(sys.stderr.fileno() > 2,
"Expected a pseudofile as stderr, got: {}".format(sys.stderr))
old_stdout, old_stderr, old_stdin = sys.stdout, sys.stderr, sys.stdin
# The first level tests that when `sys.std*` are file-likes (in particular, the ones set up in
# pytest's harness) rather than actual files, we stash and restore them properly.
with self._stdio_as_tempfiles():
# The second level stashes the first level's actual file objects and then re-opens them.
with self._stdio_as_tempfiles():
pass
# Validate that after the second level completes, the first level still sees valid
# fds on `sys.std*`.
self.assertEquals(sys.stdin.fileno(), 0)
self.assertEquals(sys.stdout.fileno(), 1)
self.assertEquals(sys.stderr.fileno(), 2)
self.assertEquals(sys.stdout, old_stdout)
self.assertEquals(sys.stderr, old_stderr)
self.assertEquals(sys.stdin, old_stdin)
def test_stdio_as_dev_null(self):
# Capture output to tempfiles.
with self._stdio_as_tempfiles():
# Read/write from/to `/dev/null`, which will be validated by the harness as not
# affecting the tempfiles.
with stdio_as(stdout_fd=-1, stderr_fd=-1, stdin_fd=-1):
self.assertEquals(b'', sys.stdin.read())
print('garbage', file=sys.stdout)
print('garbage', file=sys.stderr)
def test_signal_handler_as(self):
mock_initial_handler = 1
mock_new_handler = 2
with mock.patch('signal.signal', **PATCH_OPTS) as mock_signal:
mock_signal.return_value = mock_initial_handler
try:
with signal_handler_as(signal.SIGUSR2, mock_new_handler):
raise NotImplementedError('blah')
except NotImplementedError:
pass
self.assertEquals(mock_signal.call_count, 2)
mock_signal.assert_has_calls([
mock.call(signal.SIGUSR2, mock_new_handler),
mock.call(signal.SIGUSR2, mock_initial_handler)
])
def test_permissions(self):
with temporary_file(permissions=0o700) as f:
self.assertEquals(0o700, os.stat(f.name)[0] & 0o777)
with temporary_dir(permissions=0o644) as path:
self.assertEquals(0o644, os.stat(path)[0] & 0o777)
def test_exception_logging(self):
fake_logger = mock.Mock()
with self.assertRaises(AssertionError):
with exception_logging(fake_logger, 'error!'):
assert True is False
fake_logger.exception.assert_called_once_with('error!')
def test_maybe_profiled(self):
with temporary_dir() as td:
profile_path = os.path.join(td, 'profile.prof')
with maybe_profiled(profile_path):
for _ in range(5):
print('test')
# Ensure the profile data was written.
self.assertTrue(os.path.exists(profile_path))
# Ensure the profile data is valid.
pstats.Stats(profile_path).print_stats()
def test_hard_exit_handler(self):
with mock.patch('os._exit', **PATCH_OPTS) as mock_exit:
with hard_exit_handler():
raise HardSystemExit()
mock_exit.assert_called_once_with(0)
| 37.93994 | 100 | 0.679516 |
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import pstats
import shutil
import signal
import sys
import unittest
import uuid
import zipfile
from builtins import next, object, range, str
from contextlib import contextmanager
import mock
from pants.util.contextutil import (HardSystemExit, InvalidZipPath, Timer, environment_as,
exception_logging, hard_exit_handler, hermetic_environment_as,
maybe_profiled, open_zip, pushd, signal_handler_as, stdio_as,
temporary_dir, temporary_file)
from pants.util.process_handler import subprocess
PATCH_OPTS = dict(autospec=True, spec_set=True)
class ContextutilTest(unittest.TestCase):
def test_empty_environment(self):
with environment_as():
pass
def test_override_single_variable(self):
with temporary_file() as output:
with environment_as(HORK='BORK'):
subprocess.Popen([sys.executable, '-c', 'import os; print(os.environ["HORK"])'],
stdout=output).wait()
output.seek(0)
self.assertEquals('BORK\n', output.read())
with temporary_file() as new_output:
subprocess.Popen([sys.executable, '-c', 'import os; print("HORK" in os.environ)'],
stdout=new_output).wait()
new_output.seek(0)
self.assertEquals('False\n', new_output.read())
def test_environment_negation(self):
with temporary_file() as output:
with environment_as(HORK='BORK'):
with environment_as(HORK=None):
subprocess.Popen([sys.executable, '-c', 'import os; print("HORK" in os.environ)'],
stdout=output).wait()
output.seek(0)
self.assertEquals('False\n', output.read())
def test_hermetic_environment(self):
self.assertIn('USER', os.environ)
with hermetic_environment_as(**{}):
self.assertNotIn('USER', os.environ)
def test_hermetic_environment_subprocesses(self):
self.assertIn('USER', os.environ)
with hermetic_environment_as(**dict(AAA='333')):
output = subprocess.check_output('env', shell=True)
self.assertNotIn('USER=', output)
self.assertIn('AAA', os.environ)
self.assertEquals(os.environ['AAA'], '333')
self.assertIn('USER', os.environ)
self.assertNotIn('AAA', os.environ)
def test_hermetic_environment_unicode(self):
UNICODE_CHAR = '¡'
ENCODED_CHAR = UNICODE_CHAR.encode('utf-8')
with environment_as(**dict(XXX=UNICODE_CHAR)):
self.assertEquals(os.environ['XXX'], ENCODED_CHAR)
with hermetic_environment_as(**dict(AAA=UNICODE_CHAR)):
self.assertIn('AAA', os.environ)
self.assertEquals(os.environ['AAA'], ENCODED_CHAR)
self.assertEquals(os.environ['XXX'], ENCODED_CHAR)
def test_simple_pushd(self):
pre_cwd = os.getcwd()
with temporary_dir() as tempdir:
with pushd(tempdir) as path:
self.assertEquals(tempdir, path)
self.assertEquals(os.path.realpath(tempdir), os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
def test_nested_pushd(self):
pre_cwd = os.getcwd()
with temporary_dir() as tempdir1:
with pushd(tempdir1):
self.assertEquals(os.path.realpath(tempdir1), os.getcwd())
with temporary_dir(root_dir=tempdir1) as tempdir2:
with pushd(tempdir2):
self.assertEquals(os.path.realpath(tempdir2), os.getcwd())
self.assertEquals(os.path.realpath(tempdir1), os.getcwd())
self.assertEquals(os.path.realpath(tempdir1), os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
self.assertEquals(pre_cwd, os.getcwd())
def test_temporary_file_no_args(self):
with temporary_file() as fp:
self.assertTrue(os.path.exists(fp.name), 'Temporary file should exist within the context.')
self.assertTrue(os.path.exists(fp.name) == False,
'Temporary file should not exist outside of the context.')
def test_temporary_file_without_cleanup(self):
with temporary_file(cleanup=False) as fp:
self.assertTrue(os.path.exists(fp.name), 'Temporary file should exist within the context.')
self.assertTrue(os.path.exists(fp.name),
'Temporary file should exist outside of context if cleanup=False.')
os.unlink(fp.name)
def test_temporary_file_within_other_dir(self):
with temporary_dir() as path:
with temporary_file(root_dir=path) as f:
self.assertTrue(os.path.realpath(f.name).startswith(os.path.realpath(path)),
'file should be created in root_dir if specified.')
def test_temporary_dir_no_args(self):
with temporary_dir() as path:
self.assertTrue(os.path.exists(path), 'Temporary dir should exist within the context.')
self.assertTrue(os.path.isdir(path), 'Temporary dir should be a dir and not a file.')
self.assertFalse(os.path.exists(path), 'Temporary dir should not exist outside of the context.')
def test_temporary_dir_without_cleanup(self):
with temporary_dir(cleanup=False) as path:
self.assertTrue(os.path.exists(path), 'Temporary dir should exist within the context.')
self.assertTrue(os.path.exists(path),
'Temporary dir should exist outside of context if cleanup=False.')
shutil.rmtree(path)
def test_temporary_dir_with_root_dir(self):
with temporary_dir() as path1:
with temporary_dir(root_dir=path1) as path2:
self.assertTrue(os.path.realpath(path2).startswith(os.path.realpath(path1)),
'Nested temporary dir should be created within outer dir.')
def test_timer(self):
class FakeClock(object):
def __init__(self):
self._time = 0.0
def time(self):
ret = self._time
self._time += 0.0001
return ret
def sleep(self, duration):
self._time += duration
clock = FakeClock()
with Timer(clock=clock) as t:
self.assertLess(t.start, clock.time())
self.assertGreater(t.elapsed, 0)
clock.sleep(0.1)
self.assertGreater(t.elapsed, 0.1)
clock.sleep(0.1)
self.assertTrue(t.finish is None)
self.assertGreater(t.elapsed, 0.2)
self.assertLess(t.finish, clock.time())
def test_open_zipDefault(self):
with temporary_dir() as tempdir:
with open_zip(os.path.join(tempdir, 'test'), 'w') as zf:
self.assertTrue(zf._allowZip64)
def test_open_zipTrue(self):
with temporary_dir() as tempdir:
with open_zip(os.path.join(tempdir, 'test'), 'w', allowZip64=True) as zf:
self.assertTrue(zf._allowZip64)
def test_open_zipFalse(self):
with temporary_dir() as tempdir:
with open_zip(os.path.join(tempdir, 'test'), 'w', allowZip64=False) as zf:
self.assertFalse(zf._allowZip64)
def test_open_zip_raises_exception_on_falsey_paths(self):
falsey = (None, '', False)
for invalid in falsey:
with self.assertRaises(InvalidZipPath):
next(open_zip(invalid).gen)
def test_open_zip_returns_realpath_on_badzipfile(self):
with temporary_file() as not_zip:
with temporary_dir() as tempdir:
file_symlink = os.path.join(tempdir, 'foo')
os.symlink(not_zip.name, file_symlink)
self.assertEquals(os.path.realpath(file_symlink), os.path.realpath(not_zip.name))
with self.assertRaisesRegexp(zipfile.BadZipfile, r'{}'.format(not_zip.name)):
next(open_zip(file_symlink).gen)
@contextmanager
def _stdio_as_tempfiles(self):
uuid_str = str(uuid.uuid4())
def u(string):
return '{}#{}'.format(uuid_str, string)
stdin_data = u('stdio')
stdout_data = u('stdout')
stderr_data = u('stderr')
with temporary_file() as tmp_stdin,\
temporary_file() as tmp_stdout,\
temporary_file() as tmp_stderr:
print(stdin_data, file=tmp_stdin)
tmp_stdin.seek(0)
with stdio_as(stdout_fd=tmp_stdout.fileno(),
stderr_fd=tmp_stderr.fileno(),
stdin_fd=tmp_stdin.fileno()):
self.assertEquals(sys.stdin.fileno(), 0)
self.assertEquals(sys.stdout.fileno(), 1)
self.assertEquals(sys.stderr.fileno(), 2)
self.assertEquals(stdin_data, sys.stdin.read().strip())
print(stdout_data, file=sys.stdout)
yield
print(stderr_data, file=sys.stderr)
tmp_stdout.seek(0)
tmp_stderr.seek(0)
self.assertEquals(stdout_data, tmp_stdout.read().strip())
self.assertEquals(stderr_data, tmp_stderr.read().strip())
def test_stdio_as(self):
self.assertTrue(sys.stderr.fileno() > 2,
"Expected a pseudofile as stderr, got: {}".format(sys.stderr))
old_stdout, old_stderr, old_stdin = sys.stdout, sys.stderr, sys.stdin
with self._stdio_as_tempfiles():
# The second level stashes the first level's actual file objects and then re-opens them.
with self._stdio_as_tempfiles():
pass
self.assertEquals(sys.stdin.fileno(), 0)
self.assertEquals(sys.stdout.fileno(), 1)
self.assertEquals(sys.stderr.fileno(), 2)
self.assertEquals(sys.stdout, old_stdout)
self.assertEquals(sys.stderr, old_stderr)
self.assertEquals(sys.stdin, old_stdin)
def test_stdio_as_dev_null(self):
with self._stdio_as_tempfiles():
with stdio_as(stdout_fd=-1, stderr_fd=-1, stdin_fd=-1):
self.assertEquals(b'', sys.stdin.read())
print('garbage', file=sys.stdout)
print('garbage', file=sys.stderr)
def test_signal_handler_as(self):
mock_initial_handler = 1
mock_new_handler = 2
with mock.patch('signal.signal', **PATCH_OPTS) as mock_signal:
mock_signal.return_value = mock_initial_handler
try:
with signal_handler_as(signal.SIGUSR2, mock_new_handler):
raise NotImplementedError('blah')
except NotImplementedError:
pass
self.assertEquals(mock_signal.call_count, 2)
mock_signal.assert_has_calls([
mock.call(signal.SIGUSR2, mock_new_handler),
mock.call(signal.SIGUSR2, mock_initial_handler)
])
def test_permissions(self):
with temporary_file(permissions=0o700) as f:
self.assertEquals(0o700, os.stat(f.name)[0] & 0o777)
with temporary_dir(permissions=0o644) as path:
self.assertEquals(0o644, os.stat(path)[0] & 0o777)
def test_exception_logging(self):
fake_logger = mock.Mock()
with self.assertRaises(AssertionError):
with exception_logging(fake_logger, 'error!'):
assert True is False
fake_logger.exception.assert_called_once_with('error!')
def test_maybe_profiled(self):
with temporary_dir() as td:
profile_path = os.path.join(td, 'profile.prof')
with maybe_profiled(profile_path):
for _ in range(5):
print('test')
self.assertTrue(os.path.exists(profile_path))
pstats.Stats(profile_path).print_stats()
def test_hard_exit_handler(self):
with mock.patch('os._exit', **PATCH_OPTS) as mock_exit:
with hard_exit_handler():
raise HardSystemExit()
mock_exit.assert_called_once_with(0)
| true | true |
f72bae6a1e0213eb229c54b75687299a18759829 | 1,090 | py | Python | IntroProPython/aula10-poo/ex10_9.py | SweydAbdul/estudos-python | b052708d0566a0afb9a1c04d035467d45f820879 | [
"MIT"
] | null | null | null | IntroProPython/aula10-poo/ex10_9.py | SweydAbdul/estudos-python | b052708d0566a0afb9a1c04d035467d45f820879 | [
"MIT"
] | null | null | null | IntroProPython/aula10-poo/ex10_9.py | SweydAbdul/estudos-python | b052708d0566a0afb9a1c04d035467d45f820879 | [
"MIT"
] | null | null | null | #Nilo soluction
class Estado:
def __init__(self, nome, sigla):
self.nome = nome
self.sigla = sigla
self.cidades = []
def adiciona_cidades(self, cidade):
cidade.estado = self
self.cidades.append(cidade)
def populacao(self):
return sum([c.populacao for c in self.cidades])
class Cidade:
def __init__(self, nome, populacao):
self.nome = nome
self.populacao = populacao
self.estado = None
def __str__(self):
return f'''Cidade (nome={self.nome}, populacao={self.populacao},
estado={self.estado})'''
# Populacoes obtidas no site da wikipedia
# IBGE estimativa 2012
am = Estado('Amazonas', 'AM')
am.adiciona_cidades(Cidade('manaus', 1861838))
am.adiciona_cidades(Cidade('Parintins', 103828))
am.adiciona_cidades(Cidade('Itacoatiara', 89064))
for estado in [am]:
print(f'Estado: {estado.nome} Sigla: {estado.sigla}')
for cidade in estado.cidades:
print(f'Cidade: {cidade.nome}, Populacao: {cidade.populacao}')
print(f'Populacaodo Estado: {estado.populacao()}\n') | 28.684211 | 72 | 0.655963 |
class Estado:
def __init__(self, nome, sigla):
self.nome = nome
self.sigla = sigla
self.cidades = []
def adiciona_cidades(self, cidade):
cidade.estado = self
self.cidades.append(cidade)
def populacao(self):
return sum([c.populacao for c in self.cidades])
class Cidade:
def __init__(self, nome, populacao):
self.nome = nome
self.populacao = populacao
self.estado = None
def __str__(self):
return f'''Cidade (nome={self.nome}, populacao={self.populacao},
estado={self.estado})'''
am = Estado('Amazonas', 'AM')
am.adiciona_cidades(Cidade('manaus', 1861838))
am.adiciona_cidades(Cidade('Parintins', 103828))
am.adiciona_cidades(Cidade('Itacoatiara', 89064))
for estado in [am]:
print(f'Estado: {estado.nome} Sigla: {estado.sigla}')
for cidade in estado.cidades:
print(f'Cidade: {cidade.nome}, Populacao: {cidade.populacao}')
print(f'Populacaodo Estado: {estado.populacao()}\n') | true | true |
f72baec4eeaab524e7f16dca567ab548d635f3bb | 3,949 | py | Python | samples/basic/crud/models/cisco-ios-xr/Cisco-IOS-XR-shellutil-oper/nc-read-xr-shellutil-oper-20-ydk.py | maccioni/ydk-py-samples | d1758694bef97327c5477e65649326c7595ce499 | [
"Apache-2.0"
] | 104 | 2016-03-15T17:04:01.000Z | 2021-12-31T06:09:35.000Z | samples/basic/crud/models/cisco-ios-xr/Cisco-IOS-XR-shellutil-oper/nc-read-xr-shellutil-oper-20-ydk.py | https-maxus-github-com/ydk-py-samples | 1ad6cc2b798f358ff835df93d12924df308b85fc | [
"Apache-2.0"
] | 15 | 2016-03-15T23:09:47.000Z | 2020-08-13T12:13:18.000Z | samples/basic/crud/models/cisco-ios-xr/Cisco-IOS-XR-shellutil-oper/nc-read-xr-shellutil-oper-20-ydk.py | https-maxus-github-com/ydk-py-samples | 1ad6cc2b798f358ff835df93d12924df308b85fc | [
"Apache-2.0"
] | 87 | 2016-04-15T16:59:23.000Z | 2021-09-18T18:05:47.000Z | #!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Read all data for model Cisco-IOS-XR-shellutil-oper.
usage: nc-read-xr-shellutil-oper-20-ydk.py [-h] [-v] device
positional arguments:
device NETCONF device (ssh://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_shellutil_oper \
as xr_shellutil_oper
import datetime
import textwrap
import logging
def process_system_time(system_time):
"""Process data in system_time object."""
# format string for system time
show_system_time = textwrap.dedent("""
Host: {host}
System time: {time} {tzone} {date}
Time source: {source}
System uptime: {uptime}
""").strip()
# create time object
clock_time = datetime.time(system_time.clock.hour,
system_time.clock.minute,
system_time.clock.second,
system_time.clock.millisecond / 1000)
# create date object
clock_date = datetime.date(system_time.clock.year,
system_time.clock.month,
system_time.clock.day)
# convert uptime from seconds
clock_delta = datetime.timedelta(seconds=system_time.uptime.uptime)
# return formatted string
return(show_system_time.format(host=system_time.uptime.host_name,
time=clock_time,
tzone=system_time.clock.time_zone,
date=clock_date,
source=system_time.clock.time_source.name,
uptime=clock_delta))
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="NETCONF device (ssh://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create NETCONF provider
provider = NetconfServiceProvider(address=device.hostname,
port=device.port,
username=device.username,
password=device.password,
protocol=device.scheme)
# create CRUD service
crud = CRUDService()
system_time = xr_shellutil_oper.SystemTime() # create object
# read data from NETCONF device
system_time = crud.read(provider, system_time)
print(process_system_time(system_time)) # process object data
exit()
# End of script
| 34.946903 | 78 | 0.622436 |
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_shellutil_oper \
as xr_shellutil_oper
import datetime
import textwrap
import logging
def process_system_time(system_time):
show_system_time = textwrap.dedent("""
Host: {host}
System time: {time} {tzone} {date}
Time source: {source}
System uptime: {uptime}
""").strip()
clock_time = datetime.time(system_time.clock.hour,
system_time.clock.minute,
system_time.clock.second,
system_time.clock.millisecond / 1000)
clock_date = datetime.date(system_time.clock.year,
system_time.clock.month,
system_time.clock.day)
clock_delta = datetime.timedelta(seconds=system_time.uptime.uptime)
return(show_system_time.format(host=system_time.uptime.host_name,
time=clock_time,
tzone=system_time.clock.time_zone,
date=clock_date,
source=system_time.clock.time_source.name,
uptime=clock_delta))
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="NETCONF device (ssh://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
provider = NetconfServiceProvider(address=device.hostname,
port=device.port,
username=device.username,
password=device.password,
protocol=device.scheme)
crud = CRUDService()
system_time = xr_shellutil_oper.SystemTime()
system_time = crud.read(provider, system_time)
print(process_system_time(system_time))
exit()
| true | true |
f72baf0f5132719036a4523e688ff00bace31589 | 6,130 | py | Python | scrape_mars.py | darrenluc93/web-scraping-challenge | 50a9a21161ab0920038c8e0d6a9390bb8e35c5f5 | [
"ADSL"
] | null | null | null | scrape_mars.py | darrenluc93/web-scraping-challenge | 50a9a21161ab0920038c8e0d6a9390bb8e35c5f5 | [
"ADSL"
] | null | null | null | scrape_mars.py | darrenluc93/web-scraping-challenge | 50a9a21161ab0920038c8e0d6a9390bb8e35c5f5 | [
"ADSL"
] | null | null | null | #Import Libraries
#Web Scraping tools
from bs4 import BeautifulSoup as bs
from selenium import webdriver
#from splinter import Browser
#DataFrame tools
import pandas as pd
#Misc tools for web scraping
import time
import requests
#Function to initianilze browser.
def init_browser():
#Settings for headless mode.
options = webdriver.ChromeOptions()
options.add_argument('headless')
#path to the driver and load the options.
browser = webdriver.Chrome("/usr/local/bin/chromedriver",chrome_options = options)
#returns the brower.
return browser
def scrapper():
#Call browser function
browser = init_browser()
#Dictionary to store all the results.
marsInfo_dict = {}
#Code to get NASA Mars News ----------------------------------------------------------------------------------------------
try:
url = "https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&year=2020%3Apublish_date&category=19%2C165%2C184%2C204&blank_scope=Latest"
#splinter option - open url
#browser.visit(url)
#Open url.
browser.get(url)
#Time to let the website load all the elements
time.sleep(4)
#splinter option - save HTML
#html = browser.html
#save the html source.
html = browser.page_source
#Use bs4 to parse the html response.
soup = bs(html, "html.parser")
#Collect the latest news title
news_title = soup.find_all('li', class_="slide")[0].find(class_="content_title").text
news_p = soup.find_all('li', class_="slide")[0].text
marsInfo_dict['news_title'] = news_title
marsInfo_dict['news_p'] = news_p
except :
print(f"Problem at website {url}")
#Code to get JPL Mars Space Images - Featured Image ---------------------------------------------------------------------------------
try:
url = "https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars"
#splinter option - open url
#browser.visit(url)
#Opens the url.
browser.get(url)
#splinter option - FULL IMAGE BUTTON
#browser.click_link_by_id("full_image")
#Interact with the FULL IMAGE BUTTON
browser.find_element_by_id("full_image").click()
time.sleep(4)
#splinter option - save HTML
#html = browser.html
#save the html source.
html = browser.page_source
#Use bs4 to parse the html response.
soup = bs(html, "html.parser")
featured_image_url = "https://www.jpl.nasa.gov/" + soup.find_all('img', class_="fancybox-image")[0]['src']
marsInfo_dict['featured_image_url'] = featured_image_url
except :
print(f"Problem at website {url}")
#Mars Weather ------------------------------------------------------------------------------------------------------------------------
try:
url = "https://twitter.com/marswxreport?lang=en"
#splinter option - open url
#browser.visit(url)
#Open the url.
browser.get(url)
#Time to let the website load all the elements
time.sleep(4)
#splinter option - save HTML
#html = browser.html
#save the html source.
html = browser.page_source
#Use bs4 to parse the html response.
soup = bs(html, "html.parser")
mars_weather = soup.find_all('article', class_="css-1dbjc4n r-1loqt21 r-18u37iz r-1ny4l3l r-o7ynqc r-6416eg")[0].text.strip().replace('Mars Weather@MarsWxReport·19hInSight ','')
marsInfo_dict['mars_weather'] = mars_weather
except :
print(mars_weather)
print(f"Problem at website {url}")
# Mars Facts--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
try:
url = 'http://space-facts.com/mars/'
#Load url to pandas read html.
tables = pd.read_html(url)
#Tables
marsFacts_df = tables[0]
earthMars_df = tables[1]
#Rename columns
marsFacts_df.columns = ['Facts', 'Values']
#Outpout
html_outputFacts = marsFacts_df.to_html(index = False)
html_outputFacts = html_outputFacts.replace('\n', '')
html_outputMarsEarth = earthMars_df.to_html(index = False)
html_outputMarsEarth = html_outputMarsEarth.replace('\n', '')
marsInfo_dict['html_outputFacts'] = html_outputFacts
marsInfo_dict['html_outputMarsEarth'] = html_outputMarsEarth
except :
print(f"Problem at website {url}")
#hemisphereImages ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------
try:
temp_list = []
url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
#splinter option - open url
#browser.visit(url)
#Opens the url.
browser.get(url)
time.sleep(4)
#splinter option - save HTML
#html = browser.html
#save the html source.
html = browser.page_source
# close web browser
browser.close()
#Use bs4 to parse the html response.
soup = bs(html, "html.parser")
links = soup.find_all('div', class_="description")
for link in links:
highDef_url = f"https://astrogeology.usgs.gov{link.find('a')['href']}"
responseHighDef = requests.get(highDef_url)
soupHighDef = bs(responseHighDef.text, 'html.parser')
highDef_url = soupHighDef.find_all("div", class_="downloads")[0].find('a')['href']
title = link.find('h3').text
temp_list.append({"title" : title, "img_url" : highDef_url})
marsInfo_dict['hemisphere_image_urls'] = temp_list
except :
print(f"Problem at website {url}")
return marsInfo_dict | 29.471154 | 194 | 0.556444 |
from bs4 import BeautifulSoup as bs
from selenium import webdriver
import pandas as pd
import time
import requests
def init_browser():
options = webdriver.ChromeOptions()
options.add_argument('headless')
browser = webdriver.Chrome("/usr/local/bin/chromedriver",chrome_options = options)
return browser
def scrapper():
browser = init_browser()
marsInfo_dict = {}
try:
url = "https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&year=2020%3Apublish_date&category=19%2C165%2C184%2C204&blank_scope=Latest"
browser.get(url)
time.sleep(4)
html = browser.page_source
soup = bs(html, "html.parser")
news_title = soup.find_all('li', class_="slide")[0].find(class_="content_title").text
news_p = soup.find_all('li', class_="slide")[0].text
marsInfo_dict['news_title'] = news_title
marsInfo_dict['news_p'] = news_p
except :
print(f"Problem at website {url}")
try:
url = "https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars"
browser.get(url)
browser.find_element_by_id("full_image").click()
time.sleep(4)
html = browser.page_source
soup = bs(html, "html.parser")
featured_image_url = "https://www.jpl.nasa.gov/" + soup.find_all('img', class_="fancybox-image")[0]['src']
marsInfo_dict['featured_image_url'] = featured_image_url
except :
print(f"Problem at website {url}")
try:
url = "https://twitter.com/marswxreport?lang=en"
browser.get(url)
time.sleep(4)
html = browser.page_source
soup = bs(html, "html.parser")
mars_weather = soup.find_all('article', class_="css-1dbjc4n r-1loqt21 r-18u37iz r-1ny4l3l r-o7ynqc r-6416eg")[0].text.strip().replace('Mars Weather@MarsWxReport·19hInSight ','')
marsInfo_dict['mars_weather'] = mars_weather
except :
print(mars_weather)
print(f"Problem at website {url}")
try:
url = 'http://space-facts.com/mars/'
tables = pd.read_html(url)
marsFacts_df = tables[0]
earthMars_df = tables[1]
marsFacts_df.columns = ['Facts', 'Values']
html_outputFacts = marsFacts_df.to_html(index = False)
html_outputFacts = html_outputFacts.replace('\n', '')
html_outputMarsEarth = earthMars_df.to_html(index = False)
html_outputMarsEarth = html_outputMarsEarth.replace('\n', '')
marsInfo_dict['html_outputFacts'] = html_outputFacts
marsInfo_dict['html_outputMarsEarth'] = html_outputMarsEarth
except :
print(f"Problem at website {url}")
try:
temp_list = []
url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
browser.get(url)
time.sleep(4)
html = browser.page_source
browser.close()
soup = bs(html, "html.parser")
links = soup.find_all('div', class_="description")
for link in links:
highDef_url = f"https://astrogeology.usgs.gov{link.find('a')['href']}"
responseHighDef = requests.get(highDef_url)
soupHighDef = bs(responseHighDef.text, 'html.parser')
highDef_url = soupHighDef.find_all("div", class_="downloads")[0].find('a')['href']
title = link.find('h3').text
temp_list.append({"title" : title, "img_url" : highDef_url})
marsInfo_dict['hemisphere_image_urls'] = temp_list
except :
print(f"Problem at website {url}")
return marsInfo_dict | true | true |
f72bb0dfab67291261f8d64decce38b078e31dc2 | 1,812 | py | Python | API/client/python-client-generated/test/test_model_flow_chart_node_component_api.py | zhuofusong/machine-fault-diagnosis | 4c35885e3fbb3c552f526019313a8eae9df28905 | [
"MIT"
] | 2 | 2020-04-30T01:06:55.000Z | 2020-06-08T04:11:28.000Z | API/client/python-client-generated/test/test_model_flow_chart_node_component_api.py | zhuofusong/machine-fault-diagnosis | 4c35885e3fbb3c552f526019313a8eae9df28905 | [
"MIT"
] | 5 | 2020-04-13T14:13:53.000Z | 2021-08-24T17:16:30.000Z | API/client/python-client-generated/test/test_model_flow_chart_node_component_api.py | zhuofusong/machine-fault-diagnosis | 4c35885e3fbb3c552f526019313a8eae9df28905 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Machine fault diagnosis
List of top level server APIs # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from api.model_flow_chart_node_component_api import ModelFlowChartNodeComponentApi # noqa: E501
from swagger_client.rest import ApiException
class TestModelFlowChartNodeComponentApi(unittest.TestCase):
"""ModelFlowChartNodeComponentApi unit test stubs"""
def setUp(self):
self.api = api.model_flow_chart_node_component_api.ModelFlowChartNodeComponentApi() # noqa: E501
def tearDown(self):
pass
def test_model_flow_node_model_flow_id_node_id_component_delete(self):
"""Test case for model_flow_node_model_flow_id_node_id_component_delete
delete a node's components information in a model flow chart # noqa: E501
"""
pass
def test_model_flow_node_model_flow_id_node_id_component_get(self):
"""Test case for model_flow_node_model_flow_id_node_id_component_get
retrieve a node's components information in a model flow chart # noqa: E501
"""
pass
def test_model_flow_node_model_flow_id_node_id_component_post(self):
"""Test case for model_flow_node_model_flow_id_node_id_component_post
create a node's components information in a model flow chart # noqa: E501
"""
pass
def test_model_flow_node_model_flow_id_node_id_component_put(self):
"""Test case for model_flow_node_model_flow_id_node_id_component_put
update a node's components information in a model flow chart # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 29.225806 | 105 | 0.734547 |
from __future__ import absolute_import
import unittest
import swagger_client
from api.model_flow_chart_node_component_api import ModelFlowChartNodeComponentApi
from swagger_client.rest import ApiException
class TestModelFlowChartNodeComponentApi(unittest.TestCase):
def setUp(self):
self.api = api.model_flow_chart_node_component_api.ModelFlowChartNodeComponentApi()
def tearDown(self):
pass
def test_model_flow_node_model_flow_id_node_id_component_delete(self):
pass
def test_model_flow_node_model_flow_id_node_id_component_get(self):
pass
def test_model_flow_node_model_flow_id_node_id_component_post(self):
pass
def test_model_flow_node_model_flow_id_node_id_component_put(self):
pass
if __name__ == '__main__':
unittest.main()
| true | true |
f72bb188e19f59d4f42c726c98471832b4cf7c08 | 62 | py | Python | run_ai_api.py | datesann0109/D_2117 | 07a94c65c622cf2aa9f2a852f1f28e647a5823bd | [
"MIT"
] | 1 | 2021-10-19T02:43:30.000Z | 2021-10-19T02:43:30.000Z | run_ai_api.py | datesann0109/D_2117 | 07a94c65c622cf2aa9f2a852f1f28e647a5823bd | [
"MIT"
] | 1 | 2021-10-30T04:46:00.000Z | 2021-10-30T04:46:00.000Z | run_ai_api.py | datesann0109/D_2117 | 07a94c65c622cf2aa9f2a852f1f28e647a5823bd | [
"MIT"
] | 2 | 2021-10-30T22:58:39.000Z | 2021-11-01T10:19:45.000Z | from ai.api import main
if __name__ == "__main__":
main() | 15.5 | 26 | 0.66129 | from ai.api import main
if __name__ == "__main__":
main() | true | true |
f72bb1d1d668b0faf7a48161762de174a03e8bff | 154 | py | Python | Cours-3/Programmes-Python/M3-2.py | Naereen/Introduction-au-Numerique-avec-Python-dpt-DEM-2020 | ee935f67970acbb3c8ba0373f57c21826340c3aa | [
"MIT"
] | 1 | 2020-10-07T19:44:29.000Z | 2020-10-07T19:44:29.000Z | Cours-3/Programmes-Python/M3-2.py | Naereen/Introduction-au-Num-rique-avec-Python-dpt-DEM-2020 | ee935f67970acbb3c8ba0373f57c21826340c3aa | [
"MIT"
] | null | null | null | Cours-3/Programmes-Python/M3-2.py | Naereen/Introduction-au-Num-rique-avec-Python-dpt-DEM-2020 | ee935f67970acbb3c8ba0373f57c21826340c3aa | [
"MIT"
] | null | null | null | somme = 0
n = 5 # valeur quelconque
i = 1
while i <= n:
somme = somme + i
i = i + 1
print("La somme des", n, "premiers entiers est :", somme)
| 19.25 | 57 | 0.558442 | somme = 0
n = 5
i = 1
while i <= n:
somme = somme + i
i = i + 1
print("La somme des", n, "premiers entiers est :", somme)
| true | true |
f72bb320c80fb2f7343a4f0f36be5c6b322a7a94 | 107,134 | py | Python | bigquery/google/cloud/bigquery/job.py | erikwebb/google-cloud-python | 288a878e9a07239015c78a193eca1cc15e926127 | [
"Apache-2.0"
] | 1 | 2019-01-23T21:54:51.000Z | 2019-01-23T21:54:51.000Z | bigquery/google/cloud/bigquery/job.py | erikwebb/google-cloud-python | 288a878e9a07239015c78a193eca1cc15e926127 | [
"Apache-2.0"
] | null | null | null | bigquery/google/cloud/bigquery/job.py | erikwebb/google-cloud-python | 288a878e9a07239015c78a193eca1cc15e926127 | [
"Apache-2.0"
] | 1 | 2020-11-15T11:44:36.000Z | 2020-11-15T11:44:36.000Z | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define API Jobs."""
import copy
import threading
from six.moves import http_client
import google.api_core.future.polling
from google.cloud import exceptions
from google.cloud.exceptions import NotFound
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.external_config import ExternalConfig
from google.cloud.bigquery.query import _query_param_from_api_repr
from google.cloud.bigquery.query import ArrayQueryParameter
from google.cloud.bigquery.query import ScalarQueryParameter
from google.cloud.bigquery.query import StructQueryParameter
from google.cloud.bigquery.query import UDFResource
from google.cloud.bigquery.retry import DEFAULT_RETRY
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import _EmptyRowIterator
from google.cloud.bigquery.table import EncryptionConfiguration
from google.cloud.bigquery.table import TableReference
from google.cloud.bigquery.table import Table
from google.cloud.bigquery.table import TimePartitioning
from google.cloud.bigquery import _helpers
_DONE_STATE = "DONE"
_STOPPED_REASON = "stopped"
_TIMEOUT_BUFFER_SECS = 0.1
_ERROR_REASON_TO_EXCEPTION = {
"accessDenied": http_client.FORBIDDEN,
"backendError": http_client.INTERNAL_SERVER_ERROR,
"billingNotEnabled": http_client.FORBIDDEN,
"billingTierLimitExceeded": http_client.BAD_REQUEST,
"blocked": http_client.FORBIDDEN,
"duplicate": http_client.CONFLICT,
"internalError": http_client.INTERNAL_SERVER_ERROR,
"invalid": http_client.BAD_REQUEST,
"invalidQuery": http_client.BAD_REQUEST,
"notFound": http_client.NOT_FOUND,
"notImplemented": http_client.NOT_IMPLEMENTED,
"quotaExceeded": http_client.FORBIDDEN,
"rateLimitExceeded": http_client.FORBIDDEN,
"resourceInUse": http_client.BAD_REQUEST,
"resourcesExceeded": http_client.BAD_REQUEST,
"responseTooLarge": http_client.FORBIDDEN,
"stopped": http_client.OK,
"tableUnavailable": http_client.BAD_REQUEST,
}
def _error_result_to_exception(error_result):
"""Maps BigQuery error reasons to an exception.
The reasons and their matching HTTP status codes are documented on
the `troubleshooting errors`_ page.
.. _troubleshooting errors: https://cloud.google.com/bigquery\
/troubleshooting-errors
:type error_result: Mapping[str, str]
:param error_result: The error result from BigQuery.
:rtype google.cloud.exceptions.GoogleCloudError:
:returns: The mapped exception.
"""
reason = error_result.get("reason")
status_code = _ERROR_REASON_TO_EXCEPTION.get(
reason, http_client.INTERNAL_SERVER_ERROR
)
return exceptions.from_http_status(
status_code, error_result.get("message", ""), errors=[error_result]
)
class Compression(object):
"""The compression type to use for exported files. The default value is
:attr:`NONE`.
:attr:`DEFLATE` and :attr:`SNAPPY` are
only supported for Avro.
"""
GZIP = "GZIP"
"""Specifies GZIP format."""
DEFLATE = "DEFLATE"
"""Specifies DEFLATE format."""
SNAPPY = "SNAPPY"
"""Specifies SNAPPY format."""
NONE = "NONE"
"""Specifies no compression."""
class CreateDisposition(object):
"""Specifies whether the job is allowed to create new tables. The default
value is :attr:`CREATE_IF_NEEDED`.
Creation, truncation and append actions occur as one atomic update
upon job completion.
"""
CREATE_IF_NEEDED = "CREATE_IF_NEEDED"
"""If the table does not exist, BigQuery creates the table."""
CREATE_NEVER = "CREATE_NEVER"
"""The table must already exist. If it does not, a 'notFound' error is
returned in the job result."""
class DestinationFormat(object):
"""The exported file format. The default value is :attr:`CSV`.
Tables with nested or repeated fields cannot be exported as CSV.
"""
CSV = "CSV"
"""Specifies CSV format."""
NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
"""Specifies newline delimited JSON format."""
AVRO = "AVRO"
"""Specifies Avro format."""
class Encoding(object):
"""The character encoding of the data. The default is :attr:`UTF_8`.
BigQuery decodes the data after the raw, binary data has been
split using the values of the quote and fieldDelimiter properties.
"""
UTF_8 = "UTF-8"
"""Specifies UTF-8 encoding."""
ISO_8859_1 = "ISO-8859-1"
"""Specifies ISO-8859-1 encoding."""
class QueryPriority(object):
"""Specifies a priority for the query. The default value is
:attr:`INTERACTIVE`.
"""
INTERACTIVE = "INTERACTIVE"
"""Specifies interactive priority."""
BATCH = "BATCH"
"""Specifies batch priority."""
class SourceFormat(object):
"""The format of the data files. The default value is :attr:`CSV`.
Note that the set of allowed values for loading data is different
than the set used for external data sources (see
:class:`~google.cloud.bigquery.external_config.ExternalSourceFormat`).
"""
CSV = "CSV"
"""Specifies CSV format."""
DATASTORE_BACKUP = "DATASTORE_BACKUP"
"""Specifies datastore backup format"""
NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
"""Specifies newline delimited JSON format."""
AVRO = "AVRO"
"""Specifies Avro format."""
PARQUET = "PARQUET"
"""Specifies Parquet format."""
ORC = "ORC"
"""Specifies Orc format."""
class WriteDisposition(object):
"""Specifies the action that occurs if destination table already exists.
The default value is :attr:`WRITE_APPEND`.
Each action is atomic and only occurs if BigQuery is able to complete
the job successfully. Creation, truncation and append actions occur as one
atomic update upon job completion.
"""
WRITE_APPEND = "WRITE_APPEND"
"""If the table already exists, BigQuery appends the data to the table."""
WRITE_TRUNCATE = "WRITE_TRUNCATE"
"""If the table already exists, BigQuery overwrites the table data."""
WRITE_EMPTY = "WRITE_EMPTY"
"""If the table already exists and contains data, a 'duplicate' error is
returned in the job result."""
class SchemaUpdateOption(object):
"""Specifies an update to the destination table schema as a side effect of
a load job.
"""
ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION"
"""Allow adding a nullable field to the schema."""
ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION"
"""Allow relaxing a required field in the original schema to nullable."""
class _JobReference(object):
"""A reference to a job.
Arguments:
job_id (str): ID of the job to run.
project (str): ID of the project where the job runs.
location (str): Location of where the job runs.
"""
def __init__(self, job_id, project, location):
self._properties = {"jobId": job_id, "projectId": project}
# The location field must not be populated if it is None.
if location:
self._properties["location"] = location
@property
def job_id(self):
"""str: ID of the job."""
return self._properties.get("jobId")
@property
def project(self):
"""str: ID of the project where the job runs."""
return self._properties.get("projectId")
@property
def location(self):
"""str: Location where the job runs."""
return self._properties.get("location")
def _to_api_repr(self):
"""Returns the API resource representation of the job reference."""
return copy.deepcopy(self._properties)
@classmethod
def _from_api_repr(cls, resource):
"""Returns a job reference for an API resource representation."""
job_id = resource.get("jobId")
project = resource.get("projectId")
location = resource.get("location")
job_ref = cls(job_id, project, location)
return job_ref
class _AsyncJob(google.api_core.future.polling.PollingFuture):
"""Base class for asynchronous jobs.
Arguments:
job_id (Union[str, _JobReference]):
Job's ID in the project associated with the client or a
fully-qualified job reference.
client (google.cloud.bigquery.client.Client):
Client which holds credentials and project configuration.
"""
def __init__(self, job_id, client):
super(_AsyncJob, self).__init__()
# The job reference can be either a plain job ID or the full resource.
# Populate the properties dictionary consistently depending on what has
# been passed in.
job_ref = job_id
if not isinstance(job_id, _JobReference):
job_ref = _JobReference(job_id, client.project, None)
self._properties = {"jobReference": job_ref._to_api_repr()}
self._client = client
self._result_set = False
self._completion_lock = threading.Lock()
@property
def job_id(self):
"""str: ID of the job."""
return _helpers._get_sub_prop(self._properties, ["jobReference", "jobId"])
@property
def project(self):
"""Project bound to the job.
:rtype: str
:returns: the project (derived from the client).
"""
return _helpers._get_sub_prop(self._properties, ["jobReference", "projectId"])
@property
def location(self):
"""str: Location where the job runs."""
return _helpers._get_sub_prop(self._properties, ["jobReference", "location"])
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`~google.cloud.bigquery.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:rtype: :class:`google.cloud.bigquery.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
@property
def job_type(self):
"""Type of job
:rtype: str
:returns: one of 'load', 'copy', 'extract', 'query'
"""
return self._JOB_TYPE
@property
def path(self):
"""URL path for the job's APIs.
:rtype: str
:returns: the path based on project and job ID.
"""
return "/projects/%s/jobs/%s" % (self.project, self.job_id)
@property
def labels(self):
"""Dict[str, str]: Labels for the job."""
return self._properties.setdefault("labels", {})
@property
def etag(self):
"""ETag for the job resource.
:rtype: str, or ``NoneType``
:returns: the ETag (None until set from the server).
"""
return self._properties.get("etag")
@property
def self_link(self):
"""URL for the job resource.
:rtype: str, or ``NoneType``
:returns: the URL (None until set from the server).
"""
return self._properties.get("selfLink")
@property
def user_email(self):
"""E-mail address of user who submitted the job.
:rtype: str, or ``NoneType``
:returns: the URL (None until set from the server).
"""
return self._properties.get("user_email")
@property
def created(self):
"""Datetime at which the job was created.
:rtype: ``datetime.datetime``, or ``NoneType``
:returns: the creation time (None until set from the server).
"""
statistics = self._properties.get("statistics")
if statistics is not None:
millis = statistics.get("creationTime")
if millis is not None:
return _helpers._datetime_from_microseconds(millis * 1000.0)
@property
def started(self):
"""Datetime at which the job was started.
:rtype: ``datetime.datetime``, or ``NoneType``
:returns: the start time (None until set from the server).
"""
statistics = self._properties.get("statistics")
if statistics is not None:
millis = statistics.get("startTime")
if millis is not None:
return _helpers._datetime_from_microseconds(millis * 1000.0)
@property
def ended(self):
"""Datetime at which the job finished.
:rtype: ``datetime.datetime``, or ``NoneType``
:returns: the end time (None until set from the server).
"""
statistics = self._properties.get("statistics")
if statistics is not None:
millis = statistics.get("endTime")
if millis is not None:
return _helpers._datetime_from_microseconds(millis * 1000.0)
def _job_statistics(self):
"""Helper for job-type specific statistics-based properties."""
statistics = self._properties.get("statistics", {})
return statistics.get(self._JOB_TYPE, {})
@property
def error_result(self):
"""Error information about the job as a whole.
:rtype: mapping, or ``NoneType``
:returns: the error information (None until set from the server).
"""
status = self._properties.get("status")
if status is not None:
return status.get("errorResult")
@property
def errors(self):
"""Information about individual errors generated by the job.
:rtype: list of mappings, or ``NoneType``
:returns: the error information (None until set from the server).
"""
status = self._properties.get("status")
if status is not None:
return status.get("errors")
@property
def state(self):
"""Status of the job.
:rtype: str, or ``NoneType``
:returns: the state (None until set from the server).
"""
status = self._properties.get("status")
if status is not None:
return status.get("state")
def _scrub_local_properties(self, cleaned):
"""Helper: handle subclass properties in cleaned."""
pass
def _copy_configuration_properties(self, configuration):
"""Helper: assign subclass configuration properties in cleaned."""
raise NotImplementedError("Abstract")
def _set_properties(self, api_response):
"""Update properties from resource in body of ``api_response``
:type api_response: dict
:param api_response: response returned from an API call
"""
cleaned = api_response.copy()
self._scrub_local_properties(cleaned)
statistics = cleaned.get("statistics", {})
if "creationTime" in statistics:
statistics["creationTime"] = float(statistics["creationTime"])
if "startTime" in statistics:
statistics["startTime"] = float(statistics["startTime"])
if "endTime" in statistics:
statistics["endTime"] = float(statistics["endTime"])
self._properties.clear()
self._properties.update(cleaned)
self._copy_configuration_properties(cleaned.get("configuration", {}))
# For Future interface
self._set_future_result()
@classmethod
def _get_resource_config(cls, resource):
"""Helper for :meth:`from_api_repr`
:type resource: dict
:param resource: resource for the job
:rtype: dict
:returns: tuple (string, dict), where the first element is the
job ID and the second contains job-specific configuration.
:raises: :class:`KeyError` if the resource has no identifier, or
is missing the appropriate configuration.
"""
if "jobReference" not in resource or "jobId" not in resource["jobReference"]:
raise KeyError(
"Resource lacks required identity information: "
'["jobReference"]["jobId"]'
)
job_id = resource["jobReference"]["jobId"]
if (
"configuration" not in resource
or cls._JOB_TYPE not in resource["configuration"]
):
raise KeyError(
"Resource lacks required configuration: "
'["configuration"]["%s"]' % cls._JOB_TYPE
)
return job_id, resource["configuration"]
def to_api_repr(self):
"""Generate a resource for the job."""
raise NotImplementedError("Abstract")
_build_resource = to_api_repr # backward-compatibility alias
def _begin(self, client=None, retry=DEFAULT_RETRY):
"""API call: begin the job via a POST request
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert
:type client: :class:`~google.cloud.bigquery.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
:raises: :exc:`ValueError` if the job has already begin.
"""
if self.state is not None:
raise ValueError("Job already begun.")
client = self._require_client(client)
path = "/projects/%s/jobs" % (self.project,)
# jobs.insert is idempotent because we ensure that every new
# job has an ID.
api_response = client._call_api(
retry, method="POST", path=path, data=self.to_api_repr()
)
self._set_properties(api_response)
def exists(self, client=None, retry=DEFAULT_RETRY):
"""API call: test for the existence of the job via a GET request
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get
:type client: :class:`~google.cloud.bigquery.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
:rtype: bool
:returns: Boolean indicating existence of the job.
"""
client = self._require_client(client)
extra_params = {"fields": "id"}
if self.location:
extra_params["location"] = self.location
try:
client._call_api(
retry, method="GET", path=self.path, query_params=extra_params
)
except NotFound:
return False
else:
return True
def reload(self, client=None, retry=DEFAULT_RETRY):
"""API call: refresh job properties via a GET request.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get
:type client: :class:`~google.cloud.bigquery.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
"""
client = self._require_client(client)
extra_params = {}
if self.location:
extra_params["location"] = self.location
api_response = client._call_api(
retry, method="GET", path=self.path, query_params=extra_params
)
self._set_properties(api_response)
def cancel(self, client=None):
"""API call: cancel job via a POST request
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel
:type client: :class:`~google.cloud.bigquery.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current dataset.
:rtype: bool
:returns: Boolean indicating that the cancel request was sent.
"""
client = self._require_client(client)
extra_params = {}
if self.location:
extra_params["location"] = self.location
api_response = client._connection.api_request(
method="POST", path="%s/cancel" % (self.path,), query_params=extra_params
)
self._set_properties(api_response["job"])
# The Future interface requires that we return True if the *attempt*
# to cancel was successful.
return True
# The following methods implement the PollingFuture interface. Note that
# the methods above are from the pre-Future interface and are left for
# compatibility. The only "overloaded" method is :meth:`cancel`, which
# satisfies both interfaces.
def _set_future_result(self):
"""Set the result or exception from the job if it is complete."""
# This must be done in a lock to prevent the polling thread
# and main thread from both executing the completion logic
# at the same time.
with self._completion_lock:
# If the operation isn't complete or if the result has already been
# set, do not call set_result/set_exception again.
# Note: self._result_set is set to True in set_result and
# set_exception, in case those methods are invoked directly.
if self.state != _DONE_STATE or self._result_set:
return
if self.error_result is not None:
exception = _error_result_to_exception(self.error_result)
self.set_exception(exception)
else:
self.set_result(self)
def done(self, retry=DEFAULT_RETRY):
"""Refresh the job and checks if it is complete.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
:rtype: bool
:returns: True if the job is complete, False otherwise.
"""
# Do not refresh is the state is already done, as the job will not
# change once complete.
if self.state != _DONE_STATE:
self.reload(retry=retry)
return self.state == _DONE_STATE
def result(self, timeout=None, retry=DEFAULT_RETRY):
"""Start the job and wait for it to complete and get the result.
:type timeout: float
:param timeout:
How long (in seconds) to wait for job to complete before raising
a :class:`concurrent.futures.TimeoutError`.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the RPC.
:rtype: _AsyncJob
:returns: This instance.
:raises:
:class:`~google.cloud.exceptions.GoogleCloudError` if the job
failed or :class:`concurrent.futures.TimeoutError` if the job did
not complete in the given timeout.
"""
if self.state is None:
self._begin(retry=retry)
# TODO: modify PollingFuture so it can pass a retry argument to done().
return super(_AsyncJob, self).result(timeout=timeout)
def cancelled(self):
"""Check if the job has been cancelled.
This always returns False. It's not possible to check if a job was
cancelled in the API. This method is here to satisfy the interface
for :class:`google.api_core.future.Future`.
:rtype: bool
:returns: False
"""
return (
self.error_result is not None
and self.error_result.get("reason") == _STOPPED_REASON
)
class _JobConfig(object):
"""Abstract base class for job configuration objects.
Arguments:
job_type (str): The key to use for the job configuration.
"""
def __init__(self, job_type, **kwargs):
self._job_type = job_type
self._properties = {job_type: {}}
for prop, val in kwargs.items():
setattr(self, prop, val)
@property
def labels(self):
"""Dict[str, str]: Labels for the job.
This method always returns a dict. To change a job's labels,
modify the dict, then call ``Client.update_job``. To delete a
label, set its value to :data:`None` before updating.
Raises:
ValueError: If ``value`` type is invalid.
"""
return self._properties.setdefault("labels", {})
@labels.setter
def labels(self, value):
if not isinstance(value, dict):
raise ValueError("Pass a dict")
self._properties["labels"] = value
def _get_sub_prop(self, key, default=None):
"""Get a value in the ``self._properties[self._job_type]`` dictionary.
Most job properties are inside the dictionary related to the job type
(e.g. 'copy', 'extract', 'load', 'query'). Use this method to access
those properties::
self._get_sub_prop('destinationTable')
This is equivalent to using the ``_helpers._get_sub_prop`` function::
_helpers._get_sub_prop(
self._properties, ['query', 'destinationTable'])
Arguments:
key (str):
Key for the value to get in the
``self._properties[self._job_type]`` dictionary.
default (object):
(Optional) Default value to return if the key is not found.
Defaults to :data:`None`.
Returns:
object: The value if present or the default.
"""
return _helpers._get_sub_prop(
self._properties, [self._job_type, key], default=default
)
def _set_sub_prop(self, key, value):
"""Set a value in the ``self._properties[self._job_type]`` dictionary.
Most job properties are inside the dictionary related to the job type
(e.g. 'copy', 'extract', 'load', 'query'). Use this method to set
those properties::
self._set_sub_prop('useLegacySql', False)
This is equivalent to using the ``_helper._set_sub_prop`` function::
_helper._set_sub_prop(
self._properties, ['query', 'useLegacySql'], False)
Arguments:
key (str):
Key to set in the ``self._properties[self._job_type]``
dictionary.
value (object): Value to set.
"""
_helpers._set_sub_prop(self._properties, [self._job_type, key], value)
def _del_sub_prop(self, key):
"""Remove ``key`` from the ``self._properties[self._job_type]`` dict.
Most job properties are inside the dictionary related to the job type
(e.g. 'copy', 'extract', 'load', 'query'). Use this method to clear
those properties::
self._del_sub_prop('useLegacySql')
This is equivalent to using the ``_helper._del_sub_prop`` function::
_helper._del_sub_prop(
self._properties, ['query', 'useLegacySql'])
Arguments:
key (str):
Key to remove in the ``self._properties[self._job_type]``
dictionary.
"""
_helpers._del_sub_prop(self._properties, [self._job_type, key])
def to_api_repr(self):
"""Build an API representation of the job config.
:rtype: dict
:returns: A dictionary in the format used by the BigQuery API.
"""
return copy.deepcopy(self._properties)
def _fill_from_default(self, default_job_config):
"""Merge this job config with a default job config.
The keys in this object take precedence over the keys in the default
config. The merge is done at the top-level as well as for keys one
level below the job type.
Arguments:
default_job_config (google.cloud.bigquery.job._JobConfig):
The default job config that will be used to fill in self.
Returns:
google.cloud.bigquery.job._JobConfig A new (merged) job config.
"""
if self._job_type != default_job_config._job_type:
raise TypeError(
"attempted to merge two incompatible job types: "
+ repr(self._job_type)
+ ", "
+ repr(default_job_config._job_type)
)
new_job_config = self.__class__()
default_job_properties = copy.deepcopy(default_job_config._properties)
for key in self._properties:
if key != self._job_type:
default_job_properties[key] = self._properties[key]
default_job_properties[self._job_type].update(self._properties[self._job_type])
new_job_config._properties = default_job_properties
return new_job_config
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct a job configuration given its API representation
:type resource: dict
:param resource:
An extract job configuration in the same representation as is
returned from the API.
:rtype: :class:`google.cloud.bigquery.job._JobConfig`
:returns: Configuration parsed from ``resource``.
"""
config = cls()
config._properties = copy.deepcopy(resource)
return config
class LoadJobConfig(_JobConfig):
"""Configuration options for load jobs.
All properties in this class are optional. Values which are :data:`None` ->
server defaults. Set properties on the constructed configuration by using
the property name as the name of a keyword argument.
"""
def __init__(self, **kwargs):
super(LoadJobConfig, self).__init__("load", **kwargs)
@property
def allow_jagged_rows(self):
"""bool: Allow missing trailing optional columns (CSV only).
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowJaggedRows
"""
return self._get_sub_prop("allowJaggedRows")
@allow_jagged_rows.setter
def allow_jagged_rows(self, value):
self._set_sub_prop("allowJaggedRows", value)
@property
def allow_quoted_newlines(self):
"""bool: Allow quoted data containing newline characters (CSV only).
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowQuotedNewlines
"""
return self._get_sub_prop("allowQuotedNewlines")
@allow_quoted_newlines.setter
def allow_quoted_newlines(self, value):
self._set_sub_prop("allowQuotedNewlines", value)
@property
def autodetect(self):
"""bool: Automatically infer the schema from a sample of the data.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.autodetect
"""
return self._get_sub_prop("autodetect")
@autodetect.setter
def autodetect(self, value):
self._set_sub_prop("autodetect", value)
@property
def clustering_fields(self):
"""Union[List[str], None]: Fields defining clustering for the table
(Defaults to :data:`None`).
Clustering fields are immutable after table creation.
.. note::
As of 2018-06-29, clustering fields cannot be set on a table
which does not also have time partioning defined.
"""
prop = self._get_sub_prop("clustering")
if prop is not None:
return list(prop.get("fields", ()))
@clustering_fields.setter
def clustering_fields(self, value):
"""Union[List[str], None]: Fields defining clustering for the table
(Defaults to :data:`None`).
"""
if value is not None:
self._set_sub_prop("clustering", {"fields": value})
else:
self._del_sub_prop("clustering")
@property
def create_disposition(self):
"""google.cloud.bigquery.job.CreateDisposition: Specifies behavior
for creating tables.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.createDisposition
"""
return self._get_sub_prop("createDisposition")
@create_disposition.setter
def create_disposition(self, value):
self._set_sub_prop("createDisposition", value)
@property
def destination_encryption_configuration(self):
"""google.cloud.bigquery.table.EncryptionConfiguration: Custom
encryption configuration for the destination table.
Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
if using default encryption.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationEncryptionConfiguration
"""
prop = self._get_sub_prop("destinationEncryptionConfiguration")
if prop is not None:
prop = EncryptionConfiguration.from_api_repr(prop)
return prop
@destination_encryption_configuration.setter
def destination_encryption_configuration(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("destinationEncryptionConfiguration", api_repr)
else:
self._del_sub_prop("destinationEncryptionConfiguration")
@property
def destination_table_description(self):
"""Union[str, None] name given to destination table.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.description
"""
prop = self._get_sub_prop("destinationTableProperties")
if prop is not None:
return prop["description"]
@destination_table_description.setter
def destination_table_description(self, value):
keys = [self._job_type, "destinationTableProperties", "description"]
if value is not None:
_helpers._set_sub_prop(self._properties, keys, value)
else:
_helpers._del_sub_prop(self._properties, keys)
@property
def destination_table_friendly_name(self):
"""Union[str, None] name given to destination table.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.friendlyName
"""
prop = self._get_sub_prop("destinationTableProperties")
if prop is not None:
return prop["friendlyName"]
@destination_table_friendly_name.setter
def destination_table_friendly_name(self, value):
keys = [self._job_type, "destinationTableProperties", "friendlyName"]
if value is not None:
_helpers._set_sub_prop(self._properties, keys, value)
else:
_helpers._del_sub_prop(self._properties, keys)
@property
def encoding(self):
"""google.cloud.bigquery.job.Encoding: The character encoding of the
data.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.encoding
"""
return self._get_sub_prop("encoding")
@encoding.setter
def encoding(self, value):
self._set_sub_prop("encoding", value)
@property
def field_delimiter(self):
"""str: The separator for fields in a CSV file.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.fieldDelimiter
"""
return self._get_sub_prop("fieldDelimiter")
@field_delimiter.setter
def field_delimiter(self, value):
self._set_sub_prop("fieldDelimiter", value)
@property
def ignore_unknown_values(self):
"""bool: Ignore extra values not represented in the table schema.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.ignoreUnknownValues
"""
return self._get_sub_prop("ignoreUnknownValues")
@ignore_unknown_values.setter
def ignore_unknown_values(self, value):
self._set_sub_prop("ignoreUnknownValues", value)
@property
def max_bad_records(self):
"""int: Number of invalid rows to ignore.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.maxBadRecords
"""
return _helpers._int_or_none(self._get_sub_prop("maxBadRecords"))
@max_bad_records.setter
def max_bad_records(self, value):
self._set_sub_prop("maxBadRecords", value)
@property
def null_marker(self):
"""str: Represents a null value (CSV only).
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.nullMarker
"""
return self._get_sub_prop("nullMarker")
@null_marker.setter
def null_marker(self, value):
self._set_sub_prop("nullMarker", value)
@property
def quote_character(self):
"""str: Character used to quote data sections (CSV only).
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.quote
"""
return self._get_sub_prop("quote")
@quote_character.setter
def quote_character(self, value):
self._set_sub_prop("quote", value)
@property
def schema(self):
"""List[google.cloud.bigquery.schema.SchemaField]: Schema of the
destination table.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema
"""
schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"])
if schema is None:
return
return [SchemaField.from_api_repr(field) for field in schema]
@schema.setter
def schema(self, value):
if not all(hasattr(field, "to_api_repr") for field in value):
raise ValueError("Schema items must be fields")
_helpers._set_sub_prop(
self._properties,
["load", "schema", "fields"],
[field.to_api_repr() for field in value],
)
@property
def schema_update_options(self):
"""List[google.cloud.bigquery.job.SchemaUpdateOption]: Specifies
updates to the destination table schema to allow as a side effect of
the load job.
"""
return self._get_sub_prop("schemaUpdateOptions")
@schema_update_options.setter
def schema_update_options(self, values):
self._set_sub_prop("schemaUpdateOptions", values)
@property
def skip_leading_rows(self):
"""int: Number of rows to skip when reading data (CSV only).
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.skipLeadingRows
"""
return _helpers._int_or_none(self._get_sub_prop("skipLeadingRows"))
@skip_leading_rows.setter
def skip_leading_rows(self, value):
self._set_sub_prop("skipLeadingRows", str(value))
@property
def source_format(self):
"""google.cloud.bigquery.job.SourceFormat: File format of the data.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceFormat
"""
return self._get_sub_prop("sourceFormat")
@source_format.setter
def source_format(self, value):
self._set_sub_prop("sourceFormat", value)
@property
def time_partitioning(self):
"""google.cloud.bigquery.table.TimePartitioning: Specifies time-based
partitioning for the destination table.
"""
prop = self._get_sub_prop("timePartitioning")
if prop is not None:
prop = TimePartitioning.from_api_repr(prop)
return prop
@time_partitioning.setter
def time_partitioning(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("timePartitioning", api_repr)
else:
self._del_sub_prop("timePartitioning")
@property
def use_avro_logical_types(self):
"""bool: For loads of Avro data, governs whether Avro logical types are
converted to their corresponding BigQuery types(e.g. TIMESTAMP) rather than
raw types (e.g. INTEGER).
"""
return self._get_sub_prop("useAvroLogicalTypes")
@use_avro_logical_types.setter
def use_avro_logical_types(self, value):
self._set_sub_prop("useAvroLogicalTypes", bool(value))
@property
def write_disposition(self):
"""google.cloud.bigquery.job.WriteDisposition: Action that occurs if
the destination table already exists.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.writeDisposition
"""
return self._get_sub_prop("writeDisposition")
@write_disposition.setter
def write_disposition(self, value):
self._set_sub_prop("writeDisposition", value)
class LoadJob(_AsyncJob):
"""Asynchronous job for loading data into a table.
Can load from Google Cloud Storage URIs or from a file.
:type job_id: str
:param job_id: the job's ID
:type source_uris: sequence of string or ``NoneType``
:param source_uris:
URIs of one or more data files to be loaded. See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceUris
for supported URI formats. Pass None for jobs that load from a file.
:type destination: :class:`google.cloud.bigquery.table.TableReference`
:param destination: reference to table into which data is to be loaded.
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
for the dataset (which requires a project).
"""
_JOB_TYPE = "load"
def __init__(self, job_id, source_uris, destination, client, job_config=None):
super(LoadJob, self).__init__(job_id, client)
if job_config is None:
job_config = LoadJobConfig()
self.source_uris = source_uris
self.destination = destination
self._configuration = job_config
@property
def allow_jagged_rows(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.allow_jagged_rows`.
"""
return self._configuration.allow_jagged_rows
@property
def allow_quoted_newlines(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.allow_quoted_newlines`.
"""
return self._configuration.allow_quoted_newlines
@property
def autodetect(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.autodetect`.
"""
return self._configuration.autodetect
@property
def create_disposition(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.create_disposition`.
"""
return self._configuration.create_disposition
@property
def encoding(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.encoding`.
"""
return self._configuration.encoding
@property
def field_delimiter(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.field_delimiter`.
"""
return self._configuration.field_delimiter
@property
def ignore_unknown_values(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.ignore_unknown_values`.
"""
return self._configuration.ignore_unknown_values
@property
def max_bad_records(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.max_bad_records`.
"""
return self._configuration.max_bad_records
@property
def null_marker(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.null_marker`.
"""
return self._configuration.null_marker
@property
def quote_character(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.quote_character`.
"""
return self._configuration.quote_character
@property
def skip_leading_rows(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.skip_leading_rows`.
"""
return self._configuration.skip_leading_rows
@property
def source_format(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.source_format`.
"""
return self._configuration.source_format
@property
def write_disposition(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.write_disposition`.
"""
return self._configuration.write_disposition
@property
def schema(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.schema`.
"""
return self._configuration.schema
@property
def destination_encryption_configuration(self):
"""google.cloud.bigquery.table.EncryptionConfiguration: Custom
encryption configuration for the destination table.
Custom encryption configuration (e.g., Cloud KMS keys)
or :data:`None` if using default encryption.
See
:attr:`google.cloud.bigquery.job.LoadJobConfig.destination_encryption_configuration`.
"""
return self._configuration.destination_encryption_configuration
@property
def time_partitioning(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.time_partitioning`.
"""
return self._configuration.time_partitioning
@property
def use_avro_logical_types(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.use_avro_logical_types`.
"""
return self._configuration.use_avro_logical_types
@property
def clustering_fields(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.clustering_fields`.
"""
return self._configuration.clustering_fields
@property
def schema_update_options(self):
"""See
:attr:`google.cloud.bigquery.job.LoadJobConfig.schema_update_options`.
"""
return self._configuration.schema_update_options
@property
def input_file_bytes(self):
"""Count of bytes loaded from source files.
:rtype: int, or ``NoneType``
:returns: the count (None until set from the server).
:raises: ValueError for invalid value types.
"""
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "inputFileBytes"]
)
)
@property
def input_files(self):
"""Count of source files.
:rtype: int, or ``NoneType``
:returns: the count (None until set from the server).
"""
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "inputFiles"]
)
)
@property
def output_bytes(self):
"""Count of bytes saved to destination table.
:rtype: int, or ``NoneType``
:returns: the count (None until set from the server).
"""
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "outputBytes"]
)
)
@property
def output_rows(self):
"""Count of rows saved to destination table.
:rtype: int, or ``NoneType``
:returns: the count (None until set from the server).
"""
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "outputRows"]
)
)
def to_api_repr(self):
"""Generate a resource for :meth:`_begin`."""
configuration = self._configuration.to_api_repr()
if self.source_uris is not None:
_helpers._set_sub_prop(
configuration, ["load", "sourceUris"], self.source_uris
)
_helpers._set_sub_prop(
configuration, ["load", "destinationTable"], self.destination.to_api_repr()
)
return {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
def _copy_configuration_properties(self, configuration):
"""Helper: assign subclass configuration properties in cleaned."""
self._configuration._properties = copy.deepcopy(configuration)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a job given its API representation
.. note:
This method assumes that the project found in the resource matches
the client's project.
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.LoadJob`
:returns: Job parsed from ``resource``.
"""
config_resource = resource.get("configuration", {})
config = LoadJobConfig.from_api_repr(config_resource)
# A load job requires a destination table.
dest_config = config_resource["load"]["destinationTable"]
ds_ref = DatasetReference(dest_config["projectId"], dest_config["datasetId"])
destination = TableReference(ds_ref, dest_config["tableId"])
# sourceUris will be absent if this is a file upload.
source_uris = _helpers._get_sub_prop(config_resource, ["load", "sourceUris"])
job_ref = _JobReference._from_api_repr(resource["jobReference"])
job = cls(job_ref, source_uris, destination, client, config)
job._set_properties(resource)
return job
class CopyJobConfig(_JobConfig):
"""Configuration options for copy jobs.
All properties in this class are optional. Values which are :data:`None` ->
server defaults. Set properties on the constructed configuration by using
the property name as the name of a keyword argument.
"""
def __init__(self, **kwargs):
super(CopyJobConfig, self).__init__("copy", **kwargs)
@property
def create_disposition(self):
"""google.cloud.bigquery.job.CreateDisposition: Specifies behavior
for creating tables.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.createDisposition
"""
return self._get_sub_prop("createDisposition")
@create_disposition.setter
def create_disposition(self, value):
self._set_sub_prop("createDisposition", value)
@property
def write_disposition(self):
"""google.cloud.bigquery.job.WriteDisposition: Action that occurs if
the destination table already exists.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.writeDisposition
"""
return self._get_sub_prop("writeDisposition")
@write_disposition.setter
def write_disposition(self, value):
self._set_sub_prop("writeDisposition", value)
@property
def destination_encryption_configuration(self):
"""google.cloud.bigquery.table.EncryptionConfiguration: Custom
encryption configuration for the destination table.
Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
if using default encryption.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.destinationEncryptionConfiguration
"""
prop = self._get_sub_prop("destinationEncryptionConfiguration")
if prop is not None:
prop = EncryptionConfiguration.from_api_repr(prop)
return prop
@destination_encryption_configuration.setter
def destination_encryption_configuration(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("destinationEncryptionConfiguration", api_repr)
class CopyJob(_AsyncJob):
"""Asynchronous job: copy data into a table from other tables.
:type job_id: str
:param job_id: the job's ID, within the project belonging to ``client``.
:type sources: list of :class:`google.cloud.bigquery.table.TableReference`
:param sources: Table from which data is to be loaded.
:type destination: :class:`google.cloud.bigquery.table.TableReference`
:param destination: Table into which data is to be loaded.
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
for the dataset (which requires a project).
:type job_config: :class:`~google.cloud.bigquery.job.CopyJobConfig`
:param job_config:
(Optional) Extra configuration options for the copy job.
"""
_JOB_TYPE = "copy"
def __init__(self, job_id, sources, destination, client, job_config=None):
super(CopyJob, self).__init__(job_id, client)
if job_config is None:
job_config = CopyJobConfig()
self.destination = destination
self.sources = sources
self._configuration = job_config
@property
def create_disposition(self):
"""See
:attr:`google.cloud.bigquery.job.CopyJobConfig.create_disposition`.
"""
return self._configuration.create_disposition
@property
def write_disposition(self):
"""See
:attr:`google.cloud.bigquery.job.CopyJobConfig.write_disposition`.
"""
return self._configuration.write_disposition
@property
def destination_encryption_configuration(self):
"""google.cloud.bigquery.table.EncryptionConfiguration: Custom
encryption configuration for the destination table.
Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
if using default encryption.
See
:attr:`google.cloud.bigquery.job.CopyJobConfig.destination_encryption_configuration`.
"""
return self._configuration.destination_encryption_configuration
def to_api_repr(self):
"""Generate a resource for :meth:`_begin`."""
source_refs = [
{
"projectId": table.project,
"datasetId": table.dataset_id,
"tableId": table.table_id,
}
for table in self.sources
]
configuration = self._configuration.to_api_repr()
_helpers._set_sub_prop(configuration, ["copy", "sourceTables"], source_refs)
_helpers._set_sub_prop(
configuration,
["copy", "destinationTable"],
{
"projectId": self.destination.project,
"datasetId": self.destination.dataset_id,
"tableId": self.destination.table_id,
},
)
return {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
def _copy_configuration_properties(self, configuration):
"""Helper: assign subclass configuration properties in cleaned."""
self._configuration._properties = copy.deepcopy(configuration)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a job given its API representation
.. note:
This method assumes that the project found in the resource matches
the client's project.
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.CopyJob`
:returns: Job parsed from ``resource``.
"""
job_id, config_resource = cls._get_resource_config(resource)
config = CopyJobConfig.from_api_repr(config_resource)
# Copy required fields to the job.
copy_resource = config_resource["copy"]
destination = TableReference.from_api_repr(copy_resource["destinationTable"])
sources = []
source_configs = copy_resource.get("sourceTables")
if source_configs is None:
single = copy_resource.get("sourceTable")
if single is None:
raise KeyError("Resource missing 'sourceTables' / 'sourceTable'")
source_configs = [single]
for source_config in source_configs:
table_ref = TableReference.from_api_repr(source_config)
sources.append(table_ref)
job = cls(job_id, sources, destination, client=client, job_config=config)
job._set_properties(resource)
return job
class ExtractJobConfig(_JobConfig):
"""Configuration options for extract jobs.
All properties in this class are optional. Values which are :data:`None` ->
server defaults. Set properties on the constructed configuration by using
the property name as the name of a keyword argument.
"""
def __init__(self, **kwargs):
super(ExtractJobConfig, self).__init__("extract", **kwargs)
@property
def compression(self):
"""google.cloud.bigquery.job.Compression: Compression type to use for
exported files.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.compression
"""
return self._get_sub_prop("compression")
@compression.setter
def compression(self, value):
self._set_sub_prop("compression", value)
@property
def destination_format(self):
"""google.cloud.bigquery.job.DestinationFormat: Exported file format.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.destinationFormat
"""
return self._get_sub_prop("destinationFormat")
@destination_format.setter
def destination_format(self, value):
self._set_sub_prop("destinationFormat", value)
@property
def field_delimiter(self):
"""str: Delimiter to use between fields in the exported data.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.fieldDelimiter
"""
return self._get_sub_prop("fieldDelimiter")
@field_delimiter.setter
def field_delimiter(self, value):
self._set_sub_prop("fieldDelimiter", value)
@property
def print_header(self):
"""bool: Print a header row in the exported data.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.printHeader
"""
return self._get_sub_prop("printHeader")
@print_header.setter
def print_header(self, value):
self._set_sub_prop("printHeader", value)
class ExtractJob(_AsyncJob):
"""Asynchronous job: extract data from a table into Cloud Storage.
:type job_id: str
:param job_id: the job's ID
:type source: :class:`google.cloud.bigquery.table.TableReference`
:param source: Table into which data is to be loaded.
:type destination_uris: list of string
:param destination_uris:
URIs describing where the extracted data will be written in Cloud
Storage, using the format ``gs://<bucket_name>/<object_name_or_glob>``.
:type client: :class:`google.cloud.bigquery.client.Client`
:param client:
A client which holds credentials and project configuration.
:type job_config: :class:`~google.cloud.bigquery.job.ExtractJobConfig`
:param job_config:
(Optional) Extra configuration options for the extract job.
"""
_JOB_TYPE = "extract"
def __init__(self, job_id, source, destination_uris, client, job_config=None):
super(ExtractJob, self).__init__(job_id, client)
if job_config is None:
job_config = ExtractJobConfig()
self.source = source
self.destination_uris = destination_uris
self._configuration = job_config
@property
def compression(self):
"""See
:attr:`google.cloud.bigquery.job.ExtractJobConfig.compression`.
"""
return self._configuration.compression
@property
def destination_format(self):
"""See
:attr:`google.cloud.bigquery.job.ExtractJobConfig.destination_format`.
"""
return self._configuration.destination_format
@property
def field_delimiter(self):
"""See
:attr:`google.cloud.bigquery.job.ExtractJobConfig.field_delimiter`.
"""
return self._configuration.field_delimiter
@property
def print_header(self):
"""See
:attr:`google.cloud.bigquery.job.ExtractJobConfig.print_header`.
"""
return self._configuration.print_header
@property
def destination_uri_file_counts(self):
"""Return file counts from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.extract.destinationUriFileCounts
Returns:
a list of integer counts, each representing the number of files
per destination URI or URI pattern specified in the extract
configuration. These values will be in the same order as the URIs
specified in the 'destinationUris' field. Returns None if job is
not yet complete.
"""
counts = self._job_statistics().get("destinationUriFileCounts")
if counts is not None:
return [int(count) for count in counts]
return None
def to_api_repr(self):
"""Generate a resource for :meth:`_begin`."""
source_ref = {
"projectId": self.source.project,
"datasetId": self.source.dataset_id,
"tableId": self.source.table_id,
}
configuration = self._configuration.to_api_repr()
_helpers._set_sub_prop(configuration, ["extract", "sourceTable"], source_ref)
_helpers._set_sub_prop(
configuration, ["extract", "destinationUris"], self.destination_uris
)
return {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
def _copy_configuration_properties(self, configuration):
"""Helper: assign subclass configuration properties in cleaned."""
self._configuration._properties = copy.deepcopy(configuration)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a job given its API representation
.. note:
This method assumes that the project found in the resource matches
the client's project.
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.ExtractJob`
:returns: Job parsed from ``resource``.
"""
job_id, config_resource = cls._get_resource_config(resource)
config = ExtractJobConfig.from_api_repr(config_resource)
source_config = _helpers._get_sub_prop(
config_resource, ["extract", "sourceTable"]
)
dataset = DatasetReference(
source_config["projectId"], source_config["datasetId"]
)
source = dataset.table(source_config["tableId"])
destination_uris = _helpers._get_sub_prop(
config_resource, ["extract", "destinationUris"]
)
job = cls(job_id, source, destination_uris, client=client, job_config=config)
job._set_properties(resource)
return job
def _from_api_repr_query_parameters(resource):
return [_query_param_from_api_repr(mapping) for mapping in resource]
def _to_api_repr_query_parameters(value):
return [query_parameter.to_api_repr() for query_parameter in value]
def _from_api_repr_udf_resources(resource):
udf_resources = []
for udf_mapping in resource:
for udf_type, udf_value in udf_mapping.items():
udf_resources.append(UDFResource(udf_type, udf_value))
return udf_resources
def _to_api_repr_udf_resources(value):
return [{udf_resource.udf_type: udf_resource.value} for udf_resource in value]
def _from_api_repr_table_defs(resource):
return {k: ExternalConfig.from_api_repr(v) for k, v in resource.items()}
def _to_api_repr_table_defs(value):
return {k: ExternalConfig.to_api_repr(v) for k, v in value.items()}
class QueryJobConfig(_JobConfig):
"""Configuration options for query jobs.
All properties in this class are optional. Values which are :data:`None` ->
server defaults. Set properties on the constructed configuration by using
the property name as the name of a keyword argument.
"""
def __init__(self, **kwargs):
super(QueryJobConfig, self).__init__("query", **kwargs)
@property
def destination_encryption_configuration(self):
"""google.cloud.bigquery.table.EncryptionConfiguration: Custom
encryption configuration for the destination table.
Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
if using default encryption.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationEncryptionConfiguration
"""
prop = self._get_sub_prop("destinationEncryptionConfiguration")
if prop is not None:
prop = EncryptionConfiguration.from_api_repr(prop)
return prop
@destination_encryption_configuration.setter
def destination_encryption_configuration(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("destinationEncryptionConfiguration", api_repr)
@property
def allow_large_results(self):
"""bool: Allow large query results tables (legacy SQL, only)
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.allowLargeResults
"""
return self._get_sub_prop("allowLargeResults")
@allow_large_results.setter
def allow_large_results(self, value):
self._set_sub_prop("allowLargeResults", value)
@property
def create_disposition(self):
"""google.cloud.bigquery.job.CreateDisposition: Specifies behavior
for creating tables.
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.createDisposition
"""
return self._get_sub_prop("createDisposition")
@create_disposition.setter
def create_disposition(self, value):
self._set_sub_prop("createDisposition", value)
@property
def default_dataset(self):
"""google.cloud.bigquery.dataset.DatasetReference: the default dataset
to use for unqualified table names in the query or :data:`None` if not
set.
See
https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.defaultDataset
"""
prop = self._get_sub_prop("defaultDataset")
if prop is not None:
prop = DatasetReference.from_api_repr(prop)
return prop
@default_dataset.setter
def default_dataset(self, value):
resource = None
if value is not None:
resource = value.to_api_repr()
self._set_sub_prop("defaultDataset", resource)
@property
def destination(self):
"""google.cloud.bigquery.table.TableReference: table where results are
written or :data:`None` if not set.
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationTable
"""
prop = self._get_sub_prop("destinationTable")
if prop is not None:
prop = TableReference.from_api_repr(prop)
return prop
@destination.setter
def destination(self, value):
resource = None
if value is not None:
resource = value.to_api_repr()
self._set_sub_prop("destinationTable", resource)
@property
def dry_run(self):
"""bool: :data:`True` if this query should be a dry run to estimate
costs.
See
https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.dryRun
"""
return self._properties.get("dryRun")
@dry_run.setter
def dry_run(self, value):
self._properties["dryRun"] = value
@property
def flatten_results(self):
"""bool: Flatten nested/repeated fields in results. (Legacy SQL only)
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.flattenResults
"""
return self._get_sub_prop("flattenResults")
@flatten_results.setter
def flatten_results(self, value):
self._set_sub_prop("flattenResults", value)
@property
def maximum_billing_tier(self):
"""int: Deprecated. Changes the billing tier to allow high-compute
queries.
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBillingTier
"""
return self._get_sub_prop("maximumBillingTier")
@maximum_billing_tier.setter
def maximum_billing_tier(self, value):
self._set_sub_prop("maximumBillingTier", value)
@property
def maximum_bytes_billed(self):
"""int: Maximum bytes to be billed for this job or :data:`None` if not set.
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBytesBilled
"""
return _helpers._int_or_none(self._get_sub_prop("maximumBytesBilled"))
@maximum_bytes_billed.setter
def maximum_bytes_billed(self, value):
self._set_sub_prop("maximumBytesBilled", str(value))
@property
def priority(self):
"""google.cloud.bigquery.job.QueryPriority: Priority of the query.
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.priority
"""
return self._get_sub_prop("priority")
@priority.setter
def priority(self, value):
self._set_sub_prop("priority", value)
@property
def query_parameters(self):
"""List[Union[google.cloud.bigquery.query.ArrayQueryParameter, \
google.cloud.bigquery.query.ScalarQueryParameter, \
google.cloud.bigquery.query.StructQueryParameter]]: list of parameters
for parameterized query (empty by default)
See:
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.queryParameters
"""
prop = self._get_sub_prop("queryParameters", default=[])
return _from_api_repr_query_parameters(prop)
@query_parameters.setter
def query_parameters(self, values):
self._set_sub_prop("queryParameters", _to_api_repr_query_parameters(values))
@property
def udf_resources(self):
"""List[google.cloud.bigquery.query.UDFResource]: user
defined function resources (empty by default)
See:
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.userDefinedFunctionResources
"""
prop = self._get_sub_prop("userDefinedFunctionResources", default=[])
return _from_api_repr_udf_resources(prop)
@udf_resources.setter
def udf_resources(self, values):
self._set_sub_prop(
"userDefinedFunctionResources", _to_api_repr_udf_resources(values)
)
@property
def use_legacy_sql(self):
"""bool: Use legacy SQL syntax.
See
https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.useLegacySql
"""
return self._get_sub_prop("useLegacySql")
@use_legacy_sql.setter
def use_legacy_sql(self, value):
self._set_sub_prop("useLegacySql", value)
@property
def use_query_cache(self):
"""bool: Look for the query result in the cache.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.useQueryCache
"""
return self._get_sub_prop("useQueryCache")
@use_query_cache.setter
def use_query_cache(self, value):
self._set_sub_prop("useQueryCache", value)
@property
def write_disposition(self):
"""google.cloud.bigquery.job.WriteDisposition: Action that occurs if
the destination table already exists.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.writeDisposition
"""
return self._get_sub_prop("writeDisposition")
@write_disposition.setter
def write_disposition(self, value):
self._set_sub_prop("writeDisposition", value)
@property
def table_definitions(self):
"""Dict[str, google.cloud.bigquery.external_config.ExternalConfig]:
Definitions for external tables or :data:`None` if not set.
See
https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions
"""
prop = self._get_sub_prop("tableDefinitions")
if prop is not None:
prop = _from_api_repr_table_defs(prop)
return prop
@table_definitions.setter
def table_definitions(self, values):
self._set_sub_prop("tableDefinitions", _to_api_repr_table_defs(values))
@property
def time_partitioning(self):
"""google.cloud.bigquery.table.TimePartitioning: Specifies time-based
partitioning for the destination table.
"""
prop = self._get_sub_prop("timePartitioning")
if prop is not None:
prop = TimePartitioning.from_api_repr(prop)
return prop
@time_partitioning.setter
def time_partitioning(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("timePartitioning", api_repr)
@property
def clustering_fields(self):
"""Union[List[str], None]: Fields defining clustering for the table
(Defaults to :data:`None`).
Clustering fields are immutable after table creation.
.. note::
As of 2018-06-29, clustering fields cannot be set on a table
which does not also have time partioning defined.
"""
prop = self._get_sub_prop("clustering")
if prop is not None:
return list(prop.get("fields", ()))
@clustering_fields.setter
def clustering_fields(self, value):
"""Union[List[str], None]: Fields defining clustering for the table
(Defaults to :data:`None`).
"""
if value is not None:
self._set_sub_prop("clustering", {"fields": value})
else:
self._del_sub_prop("clustering")
@property
def schema_update_options(self):
"""List[google.cloud.bigquery.job.SchemaUpdateOption]: Specifies
updates to the destination table schema to allow as a side effect of
the query job.
"""
return self._get_sub_prop("schemaUpdateOptions")
@schema_update_options.setter
def schema_update_options(self, values):
self._set_sub_prop("schemaUpdateOptions", values)
def to_api_repr(self):
"""Build an API representation of the query job config.
Returns:
dict: A dictionary in the format used by the BigQuery API.
"""
resource = copy.deepcopy(self._properties)
# Query parameters have an addition property associated with them
# to indicate if the query is using named or positional parameters.
query_parameters = resource["query"].get("queryParameters")
if query_parameters:
if query_parameters[0].get("name") is None:
resource["query"]["parameterMode"] = "POSITIONAL"
else:
resource["query"]["parameterMode"] = "NAMED"
return resource
class QueryJob(_AsyncJob):
"""Asynchronous job: query tables.
:type job_id: str
:param job_id: the job's ID, within the project belonging to ``client``.
:type query: str
:param query: SQL query string
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: A client which holds credentials and project configuration
for the dataset (which requires a project).
:type job_config: :class:`~google.cloud.bigquery.job.QueryJobConfig`
:param job_config:
(Optional) Extra configuration options for the query job.
"""
_JOB_TYPE = "query"
_UDF_KEY = "userDefinedFunctionResources"
def __init__(self, job_id, query, client, job_config=None):
super(QueryJob, self).__init__(job_id, client)
if job_config is None:
job_config = QueryJobConfig()
if job_config.use_legacy_sql is None:
job_config.use_legacy_sql = False
self.query = query
self._configuration = job_config
self._query_results = None
self._done_timeout = None
@property
def allow_large_results(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.allow_large_results`.
"""
return self._configuration.allow_large_results
@property
def create_disposition(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.create_disposition`.
"""
return self._configuration.create_disposition
@property
def default_dataset(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.default_dataset`.
"""
return self._configuration.default_dataset
@property
def destination(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.destination`.
"""
return self._configuration.destination
@property
def destination_encryption_configuration(self):
"""google.cloud.bigquery.table.EncryptionConfiguration: Custom
encryption configuration for the destination table.
Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None`
if using default encryption.
See
:attr:`google.cloud.bigquery.job.QueryJobConfig.destination_encryption_configuration`.
"""
return self._configuration.destination_encryption_configuration
@property
def dry_run(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.dry_run`.
"""
return self._configuration.dry_run
@property
def flatten_results(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.flatten_results`.
"""
return self._configuration.flatten_results
@property
def priority(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.priority`.
"""
return self._configuration.priority
@property
def query_parameters(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.query_parameters`.
"""
return self._configuration.query_parameters
@property
def udf_resources(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.udf_resources`.
"""
return self._configuration.udf_resources
@property
def use_legacy_sql(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.use_legacy_sql`.
"""
return self._configuration.use_legacy_sql
@property
def use_query_cache(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.use_query_cache`.
"""
return self._configuration.use_query_cache
@property
def write_disposition(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.write_disposition`.
"""
return self._configuration.write_disposition
@property
def maximum_billing_tier(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.maximum_billing_tier`.
"""
return self._configuration.maximum_billing_tier
@property
def maximum_bytes_billed(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.maximum_bytes_billed`.
"""
return self._configuration.maximum_bytes_billed
@property
def table_definitions(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.table_definitions`.
"""
return self._configuration.table_definitions
@property
def time_partitioning(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.time_partitioning`.
"""
return self._configuration.time_partitioning
@property
def clustering_fields(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.clustering_fields`.
"""
return self._configuration.clustering_fields
@property
def schema_update_options(self):
"""See
:attr:`google.cloud.bigquery.job.QueryJobConfig.schema_update_options`.
"""
return self._configuration.schema_update_options
def to_api_repr(self):
"""Generate a resource for :meth:`_begin`."""
configuration = self._configuration.to_api_repr()
resource = {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
configuration["query"]["query"] = self.query
return resource
def _copy_configuration_properties(self, configuration):
"""Helper: assign subclass configuration properties in cleaned."""
self._configuration._properties = copy.deepcopy(configuration)
self.query = _helpers._get_sub_prop(configuration, ["query", "query"])
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a job given its API representation
:type resource: dict
:param resource: dataset job representation returned from the API
:type client: :class:`google.cloud.bigquery.client.Client`
:param client: Client which holds credentials and project
configuration for the dataset.
:rtype: :class:`google.cloud.bigquery.job.QueryJob`
:returns: Job parsed from ``resource``.
"""
job_id, config = cls._get_resource_config(resource)
query = config["query"]["query"]
job = cls(job_id, query, client=client)
job._set_properties(resource)
return job
@property
def query_plan(self):
"""Return query plan from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.queryPlan
:rtype: list of :class:`QueryPlanEntry`
:returns: mappings describing the query plan, or an empty list
if the query has not yet completed.
"""
plan_entries = self._job_statistics().get("queryPlan", ())
return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries]
@property
def timeline(self):
"""List(TimelineEntry): Return the query execution timeline
from job statistics.
"""
raw = self._job_statistics().get("timeline", ())
return [TimelineEntry.from_api_repr(entry) for entry in raw]
@property
def total_bytes_processed(self):
"""Return total bytes processed from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesProcessed
:rtype: int or None
:returns: total bytes processed by the job, or None if job is not
yet complete.
"""
result = self._job_statistics().get("totalBytesProcessed")
if result is not None:
result = int(result)
return result
@property
def total_bytes_billed(self):
"""Return total bytes billed from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesBilled
:rtype: int or None
:returns: total bytes processed by the job, or None if job is not
yet complete.
"""
result = self._job_statistics().get("totalBytesBilled")
if result is not None:
result = int(result)
return result
@property
def billing_tier(self):
"""Return billing tier from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.billingTier
:rtype: int or None
:returns: billing tier used by the job, or None if job is not
yet complete.
"""
return self._job_statistics().get("billingTier")
@property
def cache_hit(self):
"""Return whether or not query results were served from cache.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.cacheHit
:rtype: bool or None
:returns: whether the query results were returned from cache, or None
if job is not yet complete.
"""
return self._job_statistics().get("cacheHit")
@property
def ddl_operation_performed(self):
"""Optional[str]: Return the DDL operation performed.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlOperationPerformed
"""
return self._job_statistics().get("ddlOperationPerformed")
@property
def ddl_target_table(self):
"""Optional[TableReference]: Return the DDL target table, present
for CREATE/DROP TABLE/VIEW queries.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable
"""
prop = self._job_statistics().get("ddlTargetTable")
if prop is not None:
prop = TableReference.from_api_repr(prop)
return prop
@property
def num_dml_affected_rows(self):
"""Return the number of DML rows affected by the job.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.numDmlAffectedRows
:rtype: int or None
:returns: number of DML rows affected by the job, or None if job is not
yet complete.
"""
result = self._job_statistics().get("numDmlAffectedRows")
if result is not None:
result = int(result)
return result
@property
def slot_millis(self):
"""Union[int, None]: Slot-milliseconds used by this query job."""
return _helpers._int_or_none(self._job_statistics().get("totalSlotMs"))
@property
def statement_type(self):
"""Return statement type from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.statementType
:rtype: str or None
:returns: type of statement used by the job, or None if job is not
yet complete.
"""
return self._job_statistics().get("statementType")
@property
def referenced_tables(self):
"""Return referenced tables from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables
:rtype: list of dict
:returns: mappings describing the query plan, or an empty list
if the query has not yet completed.
"""
tables = []
datasets_by_project_name = {}
for table in self._job_statistics().get("referencedTables", ()):
t_project = table["projectId"]
ds_id = table["datasetId"]
t_dataset = datasets_by_project_name.get((t_project, ds_id))
if t_dataset is None:
t_dataset = DatasetReference(t_project, ds_id)
datasets_by_project_name[(t_project, ds_id)] = t_dataset
t_name = table["tableId"]
tables.append(t_dataset.table(t_name))
return tables
@property
def undeclared_query_parameters(self):
"""Return undeclared query parameters from job statistics, if present.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.undeclaredQueryParameters
:rtype:
list of
:class:`~google.cloud.bigquery.ArrayQueryParameter`,
:class:`~google.cloud.bigquery.ScalarQueryParameter`, or
:class:`~google.cloud.bigquery.StructQueryParameter`
:returns: undeclared parameters, or an empty list if the query has
not yet completed.
"""
parameters = []
undeclared = self._job_statistics().get("undeclaredQueryParameters", ())
for parameter in undeclared:
p_type = parameter["parameterType"]
if "arrayType" in p_type:
klass = ArrayQueryParameter
elif "structTypes" in p_type:
klass = StructQueryParameter
else:
klass = ScalarQueryParameter
parameters.append(klass.from_api_repr(parameter))
return parameters
@property
def estimated_bytes_processed(self):
"""Return the estimated number of bytes processed by the query.
See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.estimatedBytesProcessed
:rtype: int or None
:returns: number of DML rows affected by the job, or None if job is not
yet complete.
"""
result = self._job_statistics().get("estimatedBytesProcessed")
if result is not None:
result = int(result)
return result
def done(self, retry=DEFAULT_RETRY):
"""Refresh the job and checks if it is complete.
:rtype: bool
:returns: True if the job is complete, False otherwise.
"""
# Since the API to getQueryResults can hang up to the timeout value
# (default of 10 seconds), set the timeout parameter to ensure that
# the timeout from the futures API is respected. See:
# https://github.com/GoogleCloudPlatform/google-cloud-python/issues/4135
timeout_ms = None
if self._done_timeout is not None:
# Subtract a buffer for context switching, network latency, etc.
timeout = self._done_timeout - _TIMEOUT_BUFFER_SECS
timeout = max(min(timeout, 10), 0)
self._done_timeout -= timeout
self._done_timeout = max(0, self._done_timeout)
timeout_ms = int(timeout * 1000)
# Do not refresh is the state is already done, as the job will not
# change once complete.
if self.state != _DONE_STATE:
self._query_results = self._client._get_query_results(
self.job_id,
retry,
project=self.project,
timeout_ms=timeout_ms,
location=self.location,
)
# Only reload the job once we know the query is complete.
# This will ensure that fields such as the destination table are
# correctly populated.
if self._query_results.complete:
self.reload(retry=retry)
return self.state == _DONE_STATE
def _blocking_poll(self, timeout=None):
self._done_timeout = timeout
super(QueryJob, self)._blocking_poll(timeout=timeout)
def result(self, timeout=None, retry=DEFAULT_RETRY):
"""Start the job and wait for it to complete and get the result.
:type timeout: float
:param timeout:
How long (in seconds) to wait for job to complete before raising
a :class:`concurrent.futures.TimeoutError`.
:type retry: :class:`google.api_core.retry.Retry`
:param retry: (Optional) How to retry the call that retrieves rows.
:rtype: :class:`~google.cloud.bigquery.table.RowIterator`
:returns:
Iterator of row data :class:`~google.cloud.bigquery.table.Row`-s.
During each page, the iterator will have the ``total_rows``
attribute set, which counts the total number of rows **in the
result set** (this is distinct from the total number of rows in
the current page: ``iterator.page.num_items``).
:raises:
:class:`~google.cloud.exceptions.GoogleCloudError` if the job
failed or :class:`concurrent.futures.TimeoutError` if the job did
not complete in the given timeout.
"""
super(QueryJob, self).result(timeout=timeout)
# Return an iterator instead of returning the job.
if not self._query_results:
self._query_results = self._client._get_query_results(
self.job_id, retry, project=self.project, location=self.location
)
# If the query job is complete but there are no query results, this was
# special job, such as a DDL query. Return an empty result set to
# indicate success and avoid calling tabledata.list on a table which
# can't be read (such as a view table).
if self._query_results.total_rows is None:
return _EmptyRowIterator()
schema = self._query_results.schema
dest_table_ref = self.destination
dest_table = Table(dest_table_ref, schema=schema)
return self._client.list_rows(dest_table, retry=retry)
def to_dataframe(self):
"""Return a pandas DataFrame from a QueryJob
Returns:
A :class:`~pandas.DataFrame` populated with row data and column
headers from the query results. The column headers are derived
from the destination table's schema.
Raises:
ValueError: If the `pandas` library cannot be imported.
"""
return self.result().to_dataframe()
def __iter__(self):
return iter(self.result())
class QueryPlanEntryStep(object):
"""Map a single step in a query plan entry.
:type kind: str
:param kind: step type
:type substeps:
:param substeps: names of substeps
"""
def __init__(self, kind, substeps):
self.kind = kind
self.substeps = list(substeps)
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct instance from the JSON repr.
:type resource: dict
:param resource: JSON representation of the entry
:rtype: :class:`QueryPlanEntryStep`
:return: new instance built from the resource
"""
return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ()))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.kind == other.kind and self.substeps == other.substeps
class QueryPlanEntry(object):
"""QueryPlanEntry represents a single stage of a query execution plan.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs
for the underlying API representation within query statistics.
"""
def __init__(self):
self._properties = {}
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct instance from the JSON repr.
Args:
resource(Dict[str: object]):
ExplainQueryStage representation returned from API
Returns:
google.cloud.bigquery.QueryPlanEntry:
Query plan entry parsed from ``resource``
"""
entry = cls()
entry._properties = resource
return entry
@property
def name(self):
"""Union[str, None]: Human-readable name of the stage."""
return self._properties.get("name")
@property
def entry_id(self):
"""Union[str, None]: Unique ID for the stage within the plan."""
return self._properties.get("id")
@property
def start(self):
"""Union[Datetime, None]: Datetime when the stage started."""
if self._properties.get("startMs") is None:
return None
return _helpers._datetime_from_microseconds(
int(self._properties.get("startMs")) * 1000.0
)
@property
def end(self):
"""Union[Datetime, None]: Datetime when the stage ended."""
if self._properties.get("endMs") is None:
return None
return _helpers._datetime_from_microseconds(
int(self._properties.get("endMs")) * 1000.0
)
@property
def input_stages(self):
"""List(int): Entry IDs for stages that were inputs for this stage."""
if self._properties.get("inputStages") is None:
return []
return [
_helpers._int_or_none(entry)
for entry in self._properties.get("inputStages")
]
@property
def parallel_inputs(self):
"""Union[int, None]: Number of parallel input segments within
the stage.
"""
return _helpers._int_or_none(self._properties.get("parallelInputs"))
@property
def completed_parallel_inputs(self):
"""Union[int, None]: Number of parallel input segments completed."""
return _helpers._int_or_none(self._properties.get("completedParallelInputs"))
@property
def wait_ms_avg(self):
"""Union[int, None]: Milliseconds the average worker spent waiting to
be scheduled.
"""
return _helpers._int_or_none(self._properties.get("waitMsAvg"))
@property
def wait_ms_max(self):
"""Union[int, None]: Milliseconds the slowest worker spent waiting to
be scheduled.
"""
return _helpers._int_or_none(self._properties.get("waitMsMax"))
@property
def wait_ratio_avg(self):
"""Union[float, None]: Ratio of time the average worker spent waiting
to be scheduled, relative to the longest time spent by any worker in
any stage of the overall plan.
"""
return self._properties.get("waitRatioAvg")
@property
def wait_ratio_max(self):
"""Union[float, None]: Ratio of time the slowest worker spent waiting
to be scheduled, relative to the longest time spent by any worker in
any stage of the overall plan.
"""
return self._properties.get("waitRatioMax")
@property
def read_ms_avg(self):
"""Union[int, None]: Milliseconds the average worker spent reading
input.
"""
return _helpers._int_or_none(self._properties.get("readMsAvg"))
@property
def read_ms_max(self):
"""Union[int, None]: Milliseconds the slowest worker spent reading
input.
"""
return _helpers._int_or_none(self._properties.get("readMsMax"))
@property
def read_ratio_avg(self):
"""Union[float, None]: Ratio of time the average worker spent reading
input, relative to the longest time spent by any worker in any stage
of the overall plan.
"""
return self._properties.get("readRatioAvg")
@property
def read_ratio_max(self):
"""Union[float, None]: Ratio of time the slowest worker spent reading
to be scheduled, relative to the longest time spent by any worker in
any stage of the overall plan.
"""
return self._properties.get("readRatioMax")
@property
def compute_ms_avg(self):
"""Union[int, None]: Milliseconds the average worker spent on CPU-bound
processing.
"""
return _helpers._int_or_none(self._properties.get("computeMsAvg"))
@property
def compute_ms_max(self):
"""Union[int, None]: Milliseconds the slowest worker spent on CPU-bound
processing.
"""
return _helpers._int_or_none(self._properties.get("computeMsMax"))
@property
def compute_ratio_avg(self):
"""Union[float, None]: Ratio of time the average worker spent on
CPU-bound processing, relative to the longest time spent by any
worker in any stage of the overall plan.
"""
return self._properties.get("computeRatioAvg")
@property
def compute_ratio_max(self):
"""Union[float, None]: Ratio of time the slowest worker spent on
CPU-bound processing, relative to the longest time spent by any
worker in any stage of the overall plan.
"""
return self._properties.get("computeRatioMax")
@property
def write_ms_avg(self):
"""Union[int, None]: Milliseconds the average worker spent writing
output data.
"""
return _helpers._int_or_none(self._properties.get("writeMsAvg"))
@property
def write_ms_max(self):
"""Union[int, None]: Milliseconds the slowest worker spent writing
output data.
"""
return _helpers._int_or_none(self._properties.get("writeMsMax"))
@property
def write_ratio_avg(self):
"""Union[float, None]: Ratio of time the average worker spent writing
output data, relative to the longest time spent by any worker in any
stage of the overall plan.
"""
return self._properties.get("writeRatioAvg")
@property
def write_ratio_max(self):
"""Union[float, None]: Ratio of time the slowest worker spent writing
output data, relative to the longest time spent by any worker in any
stage of the overall plan.
"""
return self._properties.get("writeRatioMax")
@property
def records_read(self):
"""Union[int, None]: Number of records read by this stage."""
return _helpers._int_or_none(self._properties.get("recordsRead"))
@property
def records_written(self):
"""Union[int, None]: Number of records written by this stage."""
return _helpers._int_or_none(self._properties.get("recordsWritten"))
@property
def status(self):
"""Union[str, None]: status of this stage."""
return self._properties.get("status")
@property
def shuffle_output_bytes(self):
"""Union[int, None]: Number of bytes written by this stage to
intermediate shuffle.
"""
return _helpers._int_or_none(self._properties.get("shuffleOutputBytes"))
@property
def shuffle_output_bytes_spilled(self):
"""Union[int, None]: Number of bytes written by this stage to
intermediate shuffle and spilled to disk.
"""
return _helpers._int_or_none(self._properties.get("shuffleOutputBytesSpilled"))
@property
def steps(self):
"""List(QueryPlanEntryStep): List of step operations performed by
each worker in the stage.
"""
return [
QueryPlanEntryStep.from_api_repr(step)
for step in self._properties.get("steps", [])
]
class TimelineEntry(object):
"""TimelineEntry represents progress of a query job at a particular
point in time.
See
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs
for the underlying API representation within query statistics.
"""
def __init__(self):
self._properties = {}
@classmethod
def from_api_repr(cls, resource):
"""Factory: construct instance from the JSON repr.
Args:
resource(Dict[str: object]):
QueryTimelineSample representation returned from API
Returns:
google.cloud.bigquery.TimelineEntry:
Timeline sample parsed from ``resource``
"""
entry = cls()
entry._properties = resource
return entry
@property
def elapsed_ms(self):
"""Union[int, None]: Milliseconds elapsed since start of query
execution."""
return _helpers._int_or_none(self._properties.get("elapsedMs"))
@property
def active_units(self):
"""Union[int, None]: Current number of input units being processed
by workers, reported as largest value since the last sample."""
return _helpers._int_or_none(self._properties.get("activeUnits"))
@property
def pending_units(self):
"""Union[int, None]: Current number of input units remaining for
query stages active at this sample time."""
return _helpers._int_or_none(self._properties.get("pendingUnits"))
@property
def completed_units(self):
"""Union[int, None]: Current number of input units completed by
this query."""
return _helpers._int_or_none(self._properties.get("completedUnits"))
@property
def slot_millis(self):
"""Union[int, None]: Cumulative slot-milliseconds consumed by
this query."""
return _helpers._int_or_none(self._properties.get("totalSlotMs"))
class UnknownJob(_AsyncJob):
"""A job whose type cannot be determined."""
@classmethod
def from_api_repr(cls, resource, client):
"""Construct an UnknownJob from the JSON representation.
Args:
resource (dict): JSON representation of a job.
client (google.cloud.bigquery.client.Client):
Client connected to BigQuery API.
Returns:
UnknownJob: Job corresponding to the resource.
"""
job_ref_properties = resource.get("jobReference", {"projectId": client.project})
job_ref = _JobReference._from_api_repr(job_ref_properties)
job = cls(job_ref, client)
# Populate the job reference with the project, even if it has been
# redacted, because we know it should equal that of the request.
resource["jobReference"] = job_ref_properties
job._properties = resource
return job
| 34 | 128 | 0.647908 |
import copy
import threading
from six.moves import http_client
import google.api_core.future.polling
from google.cloud import exceptions
from google.cloud.exceptions import NotFound
from google.cloud.bigquery.dataset import DatasetReference
from google.cloud.bigquery.external_config import ExternalConfig
from google.cloud.bigquery.query import _query_param_from_api_repr
from google.cloud.bigquery.query import ArrayQueryParameter
from google.cloud.bigquery.query import ScalarQueryParameter
from google.cloud.bigquery.query import StructQueryParameter
from google.cloud.bigquery.query import UDFResource
from google.cloud.bigquery.retry import DEFAULT_RETRY
from google.cloud.bigquery.schema import SchemaField
from google.cloud.bigquery.table import _EmptyRowIterator
from google.cloud.bigquery.table import EncryptionConfiguration
from google.cloud.bigquery.table import TableReference
from google.cloud.bigquery.table import Table
from google.cloud.bigquery.table import TimePartitioning
from google.cloud.bigquery import _helpers
_DONE_STATE = "DONE"
_STOPPED_REASON = "stopped"
_TIMEOUT_BUFFER_SECS = 0.1
_ERROR_REASON_TO_EXCEPTION = {
"accessDenied": http_client.FORBIDDEN,
"backendError": http_client.INTERNAL_SERVER_ERROR,
"billingNotEnabled": http_client.FORBIDDEN,
"billingTierLimitExceeded": http_client.BAD_REQUEST,
"blocked": http_client.FORBIDDEN,
"duplicate": http_client.CONFLICT,
"internalError": http_client.INTERNAL_SERVER_ERROR,
"invalid": http_client.BAD_REQUEST,
"invalidQuery": http_client.BAD_REQUEST,
"notFound": http_client.NOT_FOUND,
"notImplemented": http_client.NOT_IMPLEMENTED,
"quotaExceeded": http_client.FORBIDDEN,
"rateLimitExceeded": http_client.FORBIDDEN,
"resourceInUse": http_client.BAD_REQUEST,
"resourcesExceeded": http_client.BAD_REQUEST,
"responseTooLarge": http_client.FORBIDDEN,
"stopped": http_client.OK,
"tableUnavailable": http_client.BAD_REQUEST,
}
def _error_result_to_exception(error_result):
reason = error_result.get("reason")
status_code = _ERROR_REASON_TO_EXCEPTION.get(
reason, http_client.INTERNAL_SERVER_ERROR
)
return exceptions.from_http_status(
status_code, error_result.get("message", ""), errors=[error_result]
)
class Compression(object):
GZIP = "GZIP"
DEFLATE = "DEFLATE"
SNAPPY = "SNAPPY"
NONE = "NONE"
class CreateDisposition(object):
CREATE_IF_NEEDED = "CREATE_IF_NEEDED"
CREATE_NEVER = "CREATE_NEVER"
class DestinationFormat(object):
CSV = "CSV"
NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
AVRO = "AVRO"
class Encoding(object):
UTF_8 = "UTF-8"
ISO_8859_1 = "ISO-8859-1"
class QueryPriority(object):
INTERACTIVE = "INTERACTIVE"
BATCH = "BATCH"
class SourceFormat(object):
CSV = "CSV"
DATASTORE_BACKUP = "DATASTORE_BACKUP"
NEWLINE_DELIMITED_JSON = "NEWLINE_DELIMITED_JSON"
AVRO = "AVRO"
PARQUET = "PARQUET"
ORC = "ORC"
class WriteDisposition(object):
WRITE_APPEND = "WRITE_APPEND"
WRITE_TRUNCATE = "WRITE_TRUNCATE"
WRITE_EMPTY = "WRITE_EMPTY"
class SchemaUpdateOption(object):
ALLOW_FIELD_ADDITION = "ALLOW_FIELD_ADDITION"
ALLOW_FIELD_RELAXATION = "ALLOW_FIELD_RELAXATION"
class _JobReference(object):
def __init__(self, job_id, project, location):
self._properties = {"jobId": job_id, "projectId": project}
if location:
self._properties["location"] = location
@property
def job_id(self):
return self._properties.get("jobId")
@property
def project(self):
return self._properties.get("projectId")
@property
def location(self):
return self._properties.get("location")
def _to_api_repr(self):
return copy.deepcopy(self._properties)
@classmethod
def _from_api_repr(cls, resource):
job_id = resource.get("jobId")
project = resource.get("projectId")
location = resource.get("location")
job_ref = cls(job_id, project, location)
return job_ref
class _AsyncJob(google.api_core.future.polling.PollingFuture):
def __init__(self, job_id, client):
super(_AsyncJob, self).__init__()
job_ref = job_id
if not isinstance(job_id, _JobReference):
job_ref = _JobReference(job_id, client.project, None)
self._properties = {"jobReference": job_ref._to_api_repr()}
self._client = client
self._result_set = False
self._completion_lock = threading.Lock()
@property
def job_id(self):
return _helpers._get_sub_prop(self._properties, ["jobReference", "jobId"])
@property
def project(self):
return _helpers._get_sub_prop(self._properties, ["jobReference", "projectId"])
@property
def location(self):
return _helpers._get_sub_prop(self._properties, ["jobReference", "location"])
def _require_client(self, client):
if client is None:
client = self._client
return client
@property
def job_type(self):
return self._JOB_TYPE
@property
def path(self):
return "/projects/%s/jobs/%s" % (self.project, self.job_id)
@property
def labels(self):
return self._properties.setdefault("labels", {})
@property
def etag(self):
return self._properties.get("etag")
@property
def self_link(self):
return self._properties.get("selfLink")
@property
def user_email(self):
return self._properties.get("user_email")
@property
def created(self):
statistics = self._properties.get("statistics")
if statistics is not None:
millis = statistics.get("creationTime")
if millis is not None:
return _helpers._datetime_from_microseconds(millis * 1000.0)
@property
def started(self):
statistics = self._properties.get("statistics")
if statistics is not None:
millis = statistics.get("startTime")
if millis is not None:
return _helpers._datetime_from_microseconds(millis * 1000.0)
@property
def ended(self):
statistics = self._properties.get("statistics")
if statistics is not None:
millis = statistics.get("endTime")
if millis is not None:
return _helpers._datetime_from_microseconds(millis * 1000.0)
def _job_statistics(self):
statistics = self._properties.get("statistics", {})
return statistics.get(self._JOB_TYPE, {})
@property
def error_result(self):
status = self._properties.get("status")
if status is not None:
return status.get("errorResult")
@property
def errors(self):
status = self._properties.get("status")
if status is not None:
return status.get("errors")
@property
def state(self):
status = self._properties.get("status")
if status is not None:
return status.get("state")
def _scrub_local_properties(self, cleaned):
pass
def _copy_configuration_properties(self, configuration):
raise NotImplementedError("Abstract")
def _set_properties(self, api_response):
cleaned = api_response.copy()
self._scrub_local_properties(cleaned)
statistics = cleaned.get("statistics", {})
if "creationTime" in statistics:
statistics["creationTime"] = float(statistics["creationTime"])
if "startTime" in statistics:
statistics["startTime"] = float(statistics["startTime"])
if "endTime" in statistics:
statistics["endTime"] = float(statistics["endTime"])
self._properties.clear()
self._properties.update(cleaned)
self._copy_configuration_properties(cleaned.get("configuration", {}))
self._set_future_result()
@classmethod
def _get_resource_config(cls, resource):
if "jobReference" not in resource or "jobId" not in resource["jobReference"]:
raise KeyError(
"Resource lacks required identity information: "
'["jobReference"]["jobId"]'
)
job_id = resource["jobReference"]["jobId"]
if (
"configuration" not in resource
or cls._JOB_TYPE not in resource["configuration"]
):
raise KeyError(
"Resource lacks required configuration: "
'["configuration"]["%s"]' % cls._JOB_TYPE
)
return job_id, resource["configuration"]
def to_api_repr(self):
raise NotImplementedError("Abstract")
_build_resource = to_api_repr
def _begin(self, client=None, retry=DEFAULT_RETRY):
if self.state is not None:
raise ValueError("Job already begun.")
client = self._require_client(client)
path = "/projects/%s/jobs" % (self.project,)
api_response = client._call_api(
retry, method="POST", path=path, data=self.to_api_repr()
)
self._set_properties(api_response)
def exists(self, client=None, retry=DEFAULT_RETRY):
client = self._require_client(client)
extra_params = {"fields": "id"}
if self.location:
extra_params["location"] = self.location
try:
client._call_api(
retry, method="GET", path=self.path, query_params=extra_params
)
except NotFound:
return False
else:
return True
def reload(self, client=None, retry=DEFAULT_RETRY):
client = self._require_client(client)
extra_params = {}
if self.location:
extra_params["location"] = self.location
api_response = client._call_api(
retry, method="GET", path=self.path, query_params=extra_params
)
self._set_properties(api_response)
def cancel(self, client=None):
client = self._require_client(client)
extra_params = {}
if self.location:
extra_params["location"] = self.location
api_response = client._connection.api_request(
method="POST", path="%s/cancel" % (self.path,), query_params=extra_params
)
self._set_properties(api_response["job"])
return True
def _set_future_result(self):
with self._completion_lock:
# set, do not call set_result/set_exception again.
# Note: self._result_set is set to True in set_result and
# set_exception, in case those methods are invoked directly.
if self.state != _DONE_STATE or self._result_set:
return
if self.error_result is not None:
exception = _error_result_to_exception(self.error_result)
self.set_exception(exception)
else:
self.set_result(self)
def done(self, retry=DEFAULT_RETRY):
# Do not refresh is the state is already done, as the job will not
# change once complete.
if self.state != _DONE_STATE:
self.reload(retry=retry)
return self.state == _DONE_STATE
def result(self, timeout=None, retry=DEFAULT_RETRY):
if self.state is None:
self._begin(retry=retry)
# TODO: modify PollingFuture so it can pass a retry argument to done().
return super(_AsyncJob, self).result(timeout=timeout)
def cancelled(self):
return (
self.error_result is not None
and self.error_result.get("reason") == _STOPPED_REASON
)
class _JobConfig(object):
def __init__(self, job_type, **kwargs):
self._job_type = job_type
self._properties = {job_type: {}}
for prop, val in kwargs.items():
setattr(self, prop, val)
@property
def labels(self):
return self._properties.setdefault("labels", {})
@labels.setter
def labels(self, value):
if not isinstance(value, dict):
raise ValueError("Pass a dict")
self._properties["labels"] = value
def _get_sub_prop(self, key, default=None):
return _helpers._get_sub_prop(
self._properties, [self._job_type, key], default=default
)
def _set_sub_prop(self, key, value):
_helpers._set_sub_prop(self._properties, [self._job_type, key], value)
def _del_sub_prop(self, key):
_helpers._del_sub_prop(self._properties, [self._job_type, key])
def to_api_repr(self):
return copy.deepcopy(self._properties)
def _fill_from_default(self, default_job_config):
if self._job_type != default_job_config._job_type:
raise TypeError(
"attempted to merge two incompatible job types: "
+ repr(self._job_type)
+ ", "
+ repr(default_job_config._job_type)
)
new_job_config = self.__class__()
default_job_properties = copy.deepcopy(default_job_config._properties)
for key in self._properties:
if key != self._job_type:
default_job_properties[key] = self._properties[key]
default_job_properties[self._job_type].update(self._properties[self._job_type])
new_job_config._properties = default_job_properties
return new_job_config
@classmethod
def from_api_repr(cls, resource):
config = cls()
config._properties = copy.deepcopy(resource)
return config
class LoadJobConfig(_JobConfig):
def __init__(self, **kwargs):
super(LoadJobConfig, self).__init__("load", **kwargs)
@property
def allow_jagged_rows(self):
return self._get_sub_prop("allowJaggedRows")
@allow_jagged_rows.setter
def allow_jagged_rows(self, value):
self._set_sub_prop("allowJaggedRows", value)
@property
def allow_quoted_newlines(self):
return self._get_sub_prop("allowQuotedNewlines")
@allow_quoted_newlines.setter
def allow_quoted_newlines(self, value):
self._set_sub_prop("allowQuotedNewlines", value)
@property
def autodetect(self):
return self._get_sub_prop("autodetect")
@autodetect.setter
def autodetect(self, value):
self._set_sub_prop("autodetect", value)
@property
def clustering_fields(self):
prop = self._get_sub_prop("clustering")
if prop is not None:
return list(prop.get("fields", ()))
@clustering_fields.setter
def clustering_fields(self, value):
if value is not None:
self._set_sub_prop("clustering", {"fields": value})
else:
self._del_sub_prop("clustering")
@property
def create_disposition(self):
return self._get_sub_prop("createDisposition")
@create_disposition.setter
def create_disposition(self, value):
self._set_sub_prop("createDisposition", value)
@property
def destination_encryption_configuration(self):
prop = self._get_sub_prop("destinationEncryptionConfiguration")
if prop is not None:
prop = EncryptionConfiguration.from_api_repr(prop)
return prop
@destination_encryption_configuration.setter
def destination_encryption_configuration(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("destinationEncryptionConfiguration", api_repr)
else:
self._del_sub_prop("destinationEncryptionConfiguration")
@property
def destination_table_description(self):
prop = self._get_sub_prop("destinationTableProperties")
if prop is not None:
return prop["description"]
@destination_table_description.setter
def destination_table_description(self, value):
keys = [self._job_type, "destinationTableProperties", "description"]
if value is not None:
_helpers._set_sub_prop(self._properties, keys, value)
else:
_helpers._del_sub_prop(self._properties, keys)
@property
def destination_table_friendly_name(self):
prop = self._get_sub_prop("destinationTableProperties")
if prop is not None:
return prop["friendlyName"]
@destination_table_friendly_name.setter
def destination_table_friendly_name(self, value):
keys = [self._job_type, "destinationTableProperties", "friendlyName"]
if value is not None:
_helpers._set_sub_prop(self._properties, keys, value)
else:
_helpers._del_sub_prop(self._properties, keys)
@property
def encoding(self):
return self._get_sub_prop("encoding")
@encoding.setter
def encoding(self, value):
self._set_sub_prop("encoding", value)
@property
def field_delimiter(self):
return self._get_sub_prop("fieldDelimiter")
@field_delimiter.setter
def field_delimiter(self, value):
self._set_sub_prop("fieldDelimiter", value)
@property
def ignore_unknown_values(self):
return self._get_sub_prop("ignoreUnknownValues")
@ignore_unknown_values.setter
def ignore_unknown_values(self, value):
self._set_sub_prop("ignoreUnknownValues", value)
@property
def max_bad_records(self):
return _helpers._int_or_none(self._get_sub_prop("maxBadRecords"))
@max_bad_records.setter
def max_bad_records(self, value):
self._set_sub_prop("maxBadRecords", value)
@property
def null_marker(self):
return self._get_sub_prop("nullMarker")
@null_marker.setter
def null_marker(self, value):
self._set_sub_prop("nullMarker", value)
@property
def quote_character(self):
return self._get_sub_prop("quote")
@quote_character.setter
def quote_character(self, value):
self._set_sub_prop("quote", value)
@property
def schema(self):
schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"])
if schema is None:
return
return [SchemaField.from_api_repr(field) for field in schema]
@schema.setter
def schema(self, value):
if not all(hasattr(field, "to_api_repr") for field in value):
raise ValueError("Schema items must be fields")
_helpers._set_sub_prop(
self._properties,
["load", "schema", "fields"],
[field.to_api_repr() for field in value],
)
@property
def schema_update_options(self):
return self._get_sub_prop("schemaUpdateOptions")
@schema_update_options.setter
def schema_update_options(self, values):
self._set_sub_prop("schemaUpdateOptions", values)
@property
def skip_leading_rows(self):
return _helpers._int_or_none(self._get_sub_prop("skipLeadingRows"))
@skip_leading_rows.setter
def skip_leading_rows(self, value):
self._set_sub_prop("skipLeadingRows", str(value))
@property
def source_format(self):
return self._get_sub_prop("sourceFormat")
@source_format.setter
def source_format(self, value):
self._set_sub_prop("sourceFormat", value)
@property
def time_partitioning(self):
prop = self._get_sub_prop("timePartitioning")
if prop is not None:
prop = TimePartitioning.from_api_repr(prop)
return prop
@time_partitioning.setter
def time_partitioning(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("timePartitioning", api_repr)
else:
self._del_sub_prop("timePartitioning")
@property
def use_avro_logical_types(self):
return self._get_sub_prop("useAvroLogicalTypes")
@use_avro_logical_types.setter
def use_avro_logical_types(self, value):
self._set_sub_prop("useAvroLogicalTypes", bool(value))
@property
def write_disposition(self):
return self._get_sub_prop("writeDisposition")
@write_disposition.setter
def write_disposition(self, value):
self._set_sub_prop("writeDisposition", value)
class LoadJob(_AsyncJob):
_JOB_TYPE = "load"
def __init__(self, job_id, source_uris, destination, client, job_config=None):
super(LoadJob, self).__init__(job_id, client)
if job_config is None:
job_config = LoadJobConfig()
self.source_uris = source_uris
self.destination = destination
self._configuration = job_config
@property
def allow_jagged_rows(self):
return self._configuration.allow_jagged_rows
@property
def allow_quoted_newlines(self):
return self._configuration.allow_quoted_newlines
@property
def autodetect(self):
return self._configuration.autodetect
@property
def create_disposition(self):
return self._configuration.create_disposition
@property
def encoding(self):
return self._configuration.encoding
@property
def field_delimiter(self):
return self._configuration.field_delimiter
@property
def ignore_unknown_values(self):
return self._configuration.ignore_unknown_values
@property
def max_bad_records(self):
return self._configuration.max_bad_records
@property
def null_marker(self):
return self._configuration.null_marker
@property
def quote_character(self):
return self._configuration.quote_character
@property
def skip_leading_rows(self):
return self._configuration.skip_leading_rows
@property
def source_format(self):
return self._configuration.source_format
@property
def write_disposition(self):
return self._configuration.write_disposition
@property
def schema(self):
return self._configuration.schema
@property
def destination_encryption_configuration(self):
return self._configuration.destination_encryption_configuration
@property
def time_partitioning(self):
return self._configuration.time_partitioning
@property
def use_avro_logical_types(self):
return self._configuration.use_avro_logical_types
@property
def clustering_fields(self):
return self._configuration.clustering_fields
@property
def schema_update_options(self):
return self._configuration.schema_update_options
@property
def input_file_bytes(self):
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "inputFileBytes"]
)
)
@property
def input_files(self):
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "inputFiles"]
)
)
@property
def output_bytes(self):
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "outputBytes"]
)
)
@property
def output_rows(self):
return _helpers._int_or_none(
_helpers._get_sub_prop(
self._properties, ["statistics", "load", "outputRows"]
)
)
def to_api_repr(self):
configuration = self._configuration.to_api_repr()
if self.source_uris is not None:
_helpers._set_sub_prop(
configuration, ["load", "sourceUris"], self.source_uris
)
_helpers._set_sub_prop(
configuration, ["load", "destinationTable"], self.destination.to_api_repr()
)
return {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
def _copy_configuration_properties(self, configuration):
self._configuration._properties = copy.deepcopy(configuration)
@classmethod
def from_api_repr(cls, resource, client):
config_resource = resource.get("configuration", {})
config = LoadJobConfig.from_api_repr(config_resource)
# A load job requires a destination table.
dest_config = config_resource["load"]["destinationTable"]
ds_ref = DatasetReference(dest_config["projectId"], dest_config["datasetId"])
destination = TableReference(ds_ref, dest_config["tableId"])
# sourceUris will be absent if this is a file upload.
source_uris = _helpers._get_sub_prop(config_resource, ["load", "sourceUris"])
job_ref = _JobReference._from_api_repr(resource["jobReference"])
job = cls(job_ref, source_uris, destination, client, config)
job._set_properties(resource)
return job
class CopyJobConfig(_JobConfig):
def __init__(self, **kwargs):
super(CopyJobConfig, self).__init__("copy", **kwargs)
@property
def create_disposition(self):
return self._get_sub_prop("createDisposition")
@create_disposition.setter
def create_disposition(self, value):
self._set_sub_prop("createDisposition", value)
@property
def write_disposition(self):
return self._get_sub_prop("writeDisposition")
@write_disposition.setter
def write_disposition(self, value):
self._set_sub_prop("writeDisposition", value)
@property
def destination_encryption_configuration(self):
prop = self._get_sub_prop("destinationEncryptionConfiguration")
if prop is not None:
prop = EncryptionConfiguration.from_api_repr(prop)
return prop
@destination_encryption_configuration.setter
def destination_encryption_configuration(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("destinationEncryptionConfiguration", api_repr)
class CopyJob(_AsyncJob):
_JOB_TYPE = "copy"
def __init__(self, job_id, sources, destination, client, job_config=None):
super(CopyJob, self).__init__(job_id, client)
if job_config is None:
job_config = CopyJobConfig()
self.destination = destination
self.sources = sources
self._configuration = job_config
@property
def create_disposition(self):
return self._configuration.create_disposition
@property
def write_disposition(self):
return self._configuration.write_disposition
@property
def destination_encryption_configuration(self):
return self._configuration.destination_encryption_configuration
def to_api_repr(self):
source_refs = [
{
"projectId": table.project,
"datasetId": table.dataset_id,
"tableId": table.table_id,
}
for table in self.sources
]
configuration = self._configuration.to_api_repr()
_helpers._set_sub_prop(configuration, ["copy", "sourceTables"], source_refs)
_helpers._set_sub_prop(
configuration,
["copy", "destinationTable"],
{
"projectId": self.destination.project,
"datasetId": self.destination.dataset_id,
"tableId": self.destination.table_id,
},
)
return {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
def _copy_configuration_properties(self, configuration):
self._configuration._properties = copy.deepcopy(configuration)
@classmethod
def from_api_repr(cls, resource, client):
job_id, config_resource = cls._get_resource_config(resource)
config = CopyJobConfig.from_api_repr(config_resource)
# Copy required fields to the job.
copy_resource = config_resource["copy"]
destination = TableReference.from_api_repr(copy_resource["destinationTable"])
sources = []
source_configs = copy_resource.get("sourceTables")
if source_configs is None:
single = copy_resource.get("sourceTable")
if single is None:
raise KeyError("Resource missing 'sourceTables' / 'sourceTable'")
source_configs = [single]
for source_config in source_configs:
table_ref = TableReference.from_api_repr(source_config)
sources.append(table_ref)
job = cls(job_id, sources, destination, client=client, job_config=config)
job._set_properties(resource)
return job
class ExtractJobConfig(_JobConfig):
def __init__(self, **kwargs):
super(ExtractJobConfig, self).__init__("extract", **kwargs)
@property
def compression(self):
return self._get_sub_prop("compression")
@compression.setter
def compression(self, value):
self._set_sub_prop("compression", value)
@property
def destination_format(self):
return self._get_sub_prop("destinationFormat")
@destination_format.setter
def destination_format(self, value):
self._set_sub_prop("destinationFormat", value)
@property
def field_delimiter(self):
return self._get_sub_prop("fieldDelimiter")
@field_delimiter.setter
def field_delimiter(self, value):
self._set_sub_prop("fieldDelimiter", value)
@property
def print_header(self):
return self._get_sub_prop("printHeader")
@print_header.setter
def print_header(self, value):
self._set_sub_prop("printHeader", value)
class ExtractJob(_AsyncJob):
_JOB_TYPE = "extract"
def __init__(self, job_id, source, destination_uris, client, job_config=None):
super(ExtractJob, self).__init__(job_id, client)
if job_config is None:
job_config = ExtractJobConfig()
self.source = source
self.destination_uris = destination_uris
self._configuration = job_config
@property
def compression(self):
return self._configuration.compression
@property
def destination_format(self):
return self._configuration.destination_format
@property
def field_delimiter(self):
return self._configuration.field_delimiter
@property
def print_header(self):
return self._configuration.print_header
@property
def destination_uri_file_counts(self):
counts = self._job_statistics().get("destinationUriFileCounts")
if counts is not None:
return [int(count) for count in counts]
return None
def to_api_repr(self):
source_ref = {
"projectId": self.source.project,
"datasetId": self.source.dataset_id,
"tableId": self.source.table_id,
}
configuration = self._configuration.to_api_repr()
_helpers._set_sub_prop(configuration, ["extract", "sourceTable"], source_ref)
_helpers._set_sub_prop(
configuration, ["extract", "destinationUris"], self.destination_uris
)
return {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
def _copy_configuration_properties(self, configuration):
self._configuration._properties = copy.deepcopy(configuration)
@classmethod
def from_api_repr(cls, resource, client):
job_id, config_resource = cls._get_resource_config(resource)
config = ExtractJobConfig.from_api_repr(config_resource)
source_config = _helpers._get_sub_prop(
config_resource, ["extract", "sourceTable"]
)
dataset = DatasetReference(
source_config["projectId"], source_config["datasetId"]
)
source = dataset.table(source_config["tableId"])
destination_uris = _helpers._get_sub_prop(
config_resource, ["extract", "destinationUris"]
)
job = cls(job_id, source, destination_uris, client=client, job_config=config)
job._set_properties(resource)
return job
def _from_api_repr_query_parameters(resource):
return [_query_param_from_api_repr(mapping) for mapping in resource]
def _to_api_repr_query_parameters(value):
return [query_parameter.to_api_repr() for query_parameter in value]
def _from_api_repr_udf_resources(resource):
udf_resources = []
for udf_mapping in resource:
for udf_type, udf_value in udf_mapping.items():
udf_resources.append(UDFResource(udf_type, udf_value))
return udf_resources
def _to_api_repr_udf_resources(value):
return [{udf_resource.udf_type: udf_resource.value} for udf_resource in value]
def _from_api_repr_table_defs(resource):
return {k: ExternalConfig.from_api_repr(v) for k, v in resource.items()}
def _to_api_repr_table_defs(value):
return {k: ExternalConfig.to_api_repr(v) for k, v in value.items()}
class QueryJobConfig(_JobConfig):
def __init__(self, **kwargs):
super(QueryJobConfig, self).__init__("query", **kwargs)
@property
def destination_encryption_configuration(self):
prop = self._get_sub_prop("destinationEncryptionConfiguration")
if prop is not None:
prop = EncryptionConfiguration.from_api_repr(prop)
return prop
@destination_encryption_configuration.setter
def destination_encryption_configuration(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("destinationEncryptionConfiguration", api_repr)
@property
def allow_large_results(self):
return self._get_sub_prop("allowLargeResults")
@allow_large_results.setter
def allow_large_results(self, value):
self._set_sub_prop("allowLargeResults", value)
@property
def create_disposition(self):
return self._get_sub_prop("createDisposition")
@create_disposition.setter
def create_disposition(self, value):
self._set_sub_prop("createDisposition", value)
@property
def default_dataset(self):
prop = self._get_sub_prop("defaultDataset")
if prop is not None:
prop = DatasetReference.from_api_repr(prop)
return prop
@default_dataset.setter
def default_dataset(self, value):
resource = None
if value is not None:
resource = value.to_api_repr()
self._set_sub_prop("defaultDataset", resource)
@property
def destination(self):
prop = self._get_sub_prop("destinationTable")
if prop is not None:
prop = TableReference.from_api_repr(prop)
return prop
@destination.setter
def destination(self, value):
resource = None
if value is not None:
resource = value.to_api_repr()
self._set_sub_prop("destinationTable", resource)
@property
def dry_run(self):
return self._properties.get("dryRun")
@dry_run.setter
def dry_run(self, value):
self._properties["dryRun"] = value
@property
def flatten_results(self):
return self._get_sub_prop("flattenResults")
@flatten_results.setter
def flatten_results(self, value):
self._set_sub_prop("flattenResults", value)
@property
def maximum_billing_tier(self):
return self._get_sub_prop("maximumBillingTier")
@maximum_billing_tier.setter
def maximum_billing_tier(self, value):
self._set_sub_prop("maximumBillingTier", value)
@property
def maximum_bytes_billed(self):
return _helpers._int_or_none(self._get_sub_prop("maximumBytesBilled"))
@maximum_bytes_billed.setter
def maximum_bytes_billed(self, value):
self._set_sub_prop("maximumBytesBilled", str(value))
@property
def priority(self):
return self._get_sub_prop("priority")
@priority.setter
def priority(self, value):
self._set_sub_prop("priority", value)
@property
def query_parameters(self):
prop = self._get_sub_prop("queryParameters", default=[])
return _from_api_repr_query_parameters(prop)
@query_parameters.setter
def query_parameters(self, values):
self._set_sub_prop("queryParameters", _to_api_repr_query_parameters(values))
@property
def udf_resources(self):
prop = self._get_sub_prop("userDefinedFunctionResources", default=[])
return _from_api_repr_udf_resources(prop)
@udf_resources.setter
def udf_resources(self, values):
self._set_sub_prop(
"userDefinedFunctionResources", _to_api_repr_udf_resources(values)
)
@property
def use_legacy_sql(self):
return self._get_sub_prop("useLegacySql")
@use_legacy_sql.setter
def use_legacy_sql(self, value):
self._set_sub_prop("useLegacySql", value)
@property
def use_query_cache(self):
return self._get_sub_prop("useQueryCache")
@use_query_cache.setter
def use_query_cache(self, value):
self._set_sub_prop("useQueryCache", value)
@property
def write_disposition(self):
return self._get_sub_prop("writeDisposition")
@write_disposition.setter
def write_disposition(self, value):
self._set_sub_prop("writeDisposition", value)
@property
def table_definitions(self):
prop = self._get_sub_prop("tableDefinitions")
if prop is not None:
prop = _from_api_repr_table_defs(prop)
return prop
@table_definitions.setter
def table_definitions(self, values):
self._set_sub_prop("tableDefinitions", _to_api_repr_table_defs(values))
@property
def time_partitioning(self):
prop = self._get_sub_prop("timePartitioning")
if prop is not None:
prop = TimePartitioning.from_api_repr(prop)
return prop
@time_partitioning.setter
def time_partitioning(self, value):
api_repr = value
if value is not None:
api_repr = value.to_api_repr()
self._set_sub_prop("timePartitioning", api_repr)
@property
def clustering_fields(self):
prop = self._get_sub_prop("clustering")
if prop is not None:
return list(prop.get("fields", ()))
@clustering_fields.setter
def clustering_fields(self, value):
if value is not None:
self._set_sub_prop("clustering", {"fields": value})
else:
self._del_sub_prop("clustering")
@property
def schema_update_options(self):
return self._get_sub_prop("schemaUpdateOptions")
@schema_update_options.setter
def schema_update_options(self, values):
self._set_sub_prop("schemaUpdateOptions", values)
def to_api_repr(self):
resource = copy.deepcopy(self._properties)
# Query parameters have an addition property associated with them
# to indicate if the query is using named or positional parameters.
query_parameters = resource["query"].get("queryParameters")
if query_parameters:
if query_parameters[0].get("name") is None:
resource["query"]["parameterMode"] = "POSITIONAL"
else:
resource["query"]["parameterMode"] = "NAMED"
return resource
class QueryJob(_AsyncJob):
_JOB_TYPE = "query"
_UDF_KEY = "userDefinedFunctionResources"
def __init__(self, job_id, query, client, job_config=None):
super(QueryJob, self).__init__(job_id, client)
if job_config is None:
job_config = QueryJobConfig()
if job_config.use_legacy_sql is None:
job_config.use_legacy_sql = False
self.query = query
self._configuration = job_config
self._query_results = None
self._done_timeout = None
@property
def allow_large_results(self):
return self._configuration.allow_large_results
@property
def create_disposition(self):
return self._configuration.create_disposition
@property
def default_dataset(self):
return self._configuration.default_dataset
@property
def destination(self):
return self._configuration.destination
@property
def destination_encryption_configuration(self):
return self._configuration.destination_encryption_configuration
@property
def dry_run(self):
return self._configuration.dry_run
@property
def flatten_results(self):
return self._configuration.flatten_results
@property
def priority(self):
return self._configuration.priority
@property
def query_parameters(self):
return self._configuration.query_parameters
@property
def udf_resources(self):
return self._configuration.udf_resources
@property
def use_legacy_sql(self):
return self._configuration.use_legacy_sql
@property
def use_query_cache(self):
return self._configuration.use_query_cache
@property
def write_disposition(self):
return self._configuration.write_disposition
@property
def maximum_billing_tier(self):
return self._configuration.maximum_billing_tier
@property
def maximum_bytes_billed(self):
return self._configuration.maximum_bytes_billed
@property
def table_definitions(self):
return self._configuration.table_definitions
@property
def time_partitioning(self):
return self._configuration.time_partitioning
@property
def clustering_fields(self):
return self._configuration.clustering_fields
@property
def schema_update_options(self):
return self._configuration.schema_update_options
def to_api_repr(self):
configuration = self._configuration.to_api_repr()
resource = {
"jobReference": self._properties["jobReference"],
"configuration": configuration,
}
configuration["query"]["query"] = self.query
return resource
def _copy_configuration_properties(self, configuration):
self._configuration._properties = copy.deepcopy(configuration)
self.query = _helpers._get_sub_prop(configuration, ["query", "query"])
@classmethod
def from_api_repr(cls, resource, client):
job_id, config = cls._get_resource_config(resource)
query = config["query"]["query"]
job = cls(job_id, query, client=client)
job._set_properties(resource)
return job
@property
def query_plan(self):
plan_entries = self._job_statistics().get("queryPlan", ())
return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries]
@property
def timeline(self):
raw = self._job_statistics().get("timeline", ())
return [TimelineEntry.from_api_repr(entry) for entry in raw]
@property
def total_bytes_processed(self):
result = self._job_statistics().get("totalBytesProcessed")
if result is not None:
result = int(result)
return result
@property
def total_bytes_billed(self):
result = self._job_statistics().get("totalBytesBilled")
if result is not None:
result = int(result)
return result
@property
def billing_tier(self):
return self._job_statistics().get("billingTier")
@property
def cache_hit(self):
return self._job_statistics().get("cacheHit")
@property
def ddl_operation_performed(self):
return self._job_statistics().get("ddlOperationPerformed")
@property
def ddl_target_table(self):
prop = self._job_statistics().get("ddlTargetTable")
if prop is not None:
prop = TableReference.from_api_repr(prop)
return prop
@property
def num_dml_affected_rows(self):
result = self._job_statistics().get("numDmlAffectedRows")
if result is not None:
result = int(result)
return result
@property
def slot_millis(self):
return _helpers._int_or_none(self._job_statistics().get("totalSlotMs"))
@property
def statement_type(self):
return self._job_statistics().get("statementType")
@property
def referenced_tables(self):
tables = []
datasets_by_project_name = {}
for table in self._job_statistics().get("referencedTables", ()):
t_project = table["projectId"]
ds_id = table["datasetId"]
t_dataset = datasets_by_project_name.get((t_project, ds_id))
if t_dataset is None:
t_dataset = DatasetReference(t_project, ds_id)
datasets_by_project_name[(t_project, ds_id)] = t_dataset
t_name = table["tableId"]
tables.append(t_dataset.table(t_name))
return tables
@property
def undeclared_query_parameters(self):
parameters = []
undeclared = self._job_statistics().get("undeclaredQueryParameters", ())
for parameter in undeclared:
p_type = parameter["parameterType"]
if "arrayType" in p_type:
klass = ArrayQueryParameter
elif "structTypes" in p_type:
klass = StructQueryParameter
else:
klass = ScalarQueryParameter
parameters.append(klass.from_api_repr(parameter))
return parameters
@property
def estimated_bytes_processed(self):
result = self._job_statistics().get("estimatedBytesProcessed")
if result is not None:
result = int(result)
return result
def done(self, retry=DEFAULT_RETRY):
# Since the API to getQueryResults can hang up to the timeout value
# (default of 10 seconds), set the timeout parameter to ensure that
# the timeout from the futures API is respected. See:
# https://github.com/GoogleCloudPlatform/google-cloud-python/issues/4135
timeout_ms = None
if self._done_timeout is not None:
# Subtract a buffer for context switching, network latency, etc.
timeout = self._done_timeout - _TIMEOUT_BUFFER_SECS
timeout = max(min(timeout, 10), 0)
self._done_timeout -= timeout
self._done_timeout = max(0, self._done_timeout)
timeout_ms = int(timeout * 1000)
# Do not refresh is the state is already done, as the job will not
# change once complete.
if self.state != _DONE_STATE:
self._query_results = self._client._get_query_results(
self.job_id,
retry,
project=self.project,
timeout_ms=timeout_ms,
location=self.location,
)
# Only reload the job once we know the query is complete.
# This will ensure that fields such as the destination table are
# correctly populated.
if self._query_results.complete:
self.reload(retry=retry)
return self.state == _DONE_STATE
def _blocking_poll(self, timeout=None):
self._done_timeout = timeout
super(QueryJob, self)._blocking_poll(timeout=timeout)
def result(self, timeout=None, retry=DEFAULT_RETRY):
super(QueryJob, self).result(timeout=timeout)
# Return an iterator instead of returning the job.
if not self._query_results:
self._query_results = self._client._get_query_results(
self.job_id, retry, project=self.project, location=self.location
)
# If the query job is complete but there are no query results, this was
# special job, such as a DDL query. Return an empty result set to
# indicate success and avoid calling tabledata.list on a table which
# can't be read (such as a view table).
if self._query_results.total_rows is None:
return _EmptyRowIterator()
schema = self._query_results.schema
dest_table_ref = self.destination
dest_table = Table(dest_table_ref, schema=schema)
return self._client.list_rows(dest_table, retry=retry)
def to_dataframe(self):
return self.result().to_dataframe()
def __iter__(self):
return iter(self.result())
class QueryPlanEntryStep(object):
def __init__(self, kind, substeps):
self.kind = kind
self.substeps = list(substeps)
@classmethod
def from_api_repr(cls, resource):
return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ()))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.kind == other.kind and self.substeps == other.substeps
class QueryPlanEntry(object):
def __init__(self):
self._properties = {}
@classmethod
def from_api_repr(cls, resource):
entry = cls()
entry._properties = resource
return entry
@property
def name(self):
return self._properties.get("name")
@property
def entry_id(self):
return self._properties.get("id")
@property
def start(self):
if self._properties.get("startMs") is None:
return None
return _helpers._datetime_from_microseconds(
int(self._properties.get("startMs")) * 1000.0
)
@property
def end(self):
if self._properties.get("endMs") is None:
return None
return _helpers._datetime_from_microseconds(
int(self._properties.get("endMs")) * 1000.0
)
@property
def input_stages(self):
if self._properties.get("inputStages") is None:
return []
return [
_helpers._int_or_none(entry)
for entry in self._properties.get("inputStages")
]
@property
def parallel_inputs(self):
return _helpers._int_or_none(self._properties.get("parallelInputs"))
@property
def completed_parallel_inputs(self):
return _helpers._int_or_none(self._properties.get("completedParallelInputs"))
@property
def wait_ms_avg(self):
return _helpers._int_or_none(self._properties.get("waitMsAvg"))
@property
def wait_ms_max(self):
return _helpers._int_or_none(self._properties.get("waitMsMax"))
@property
def wait_ratio_avg(self):
return self._properties.get("waitRatioAvg")
@property
def wait_ratio_max(self):
return self._properties.get("waitRatioMax")
@property
def read_ms_avg(self):
return _helpers._int_or_none(self._properties.get("readMsAvg"))
@property
def read_ms_max(self):
return _helpers._int_or_none(self._properties.get("readMsMax"))
@property
def read_ratio_avg(self):
return self._properties.get("readRatioAvg")
@property
def read_ratio_max(self):
return self._properties.get("readRatioMax")
@property
def compute_ms_avg(self):
return _helpers._int_or_none(self._properties.get("computeMsAvg"))
@property
def compute_ms_max(self):
return _helpers._int_or_none(self._properties.get("computeMsMax"))
@property
def compute_ratio_avg(self):
return self._properties.get("computeRatioAvg")
@property
def compute_ratio_max(self):
return self._properties.get("computeRatioMax")
@property
def write_ms_avg(self):
return _helpers._int_or_none(self._properties.get("writeMsAvg"))
@property
def write_ms_max(self):
return _helpers._int_or_none(self._properties.get("writeMsMax"))
@property
def write_ratio_avg(self):
return self._properties.get("writeRatioAvg")
@property
def write_ratio_max(self):
return self._properties.get("writeRatioMax")
@property
def records_read(self):
return _helpers._int_or_none(self._properties.get("recordsRead"))
@property
def records_written(self):
return _helpers._int_or_none(self._properties.get("recordsWritten"))
@property
def status(self):
return self._properties.get("status")
@property
def shuffle_output_bytes(self):
return _helpers._int_or_none(self._properties.get("shuffleOutputBytes"))
@property
def shuffle_output_bytes_spilled(self):
return _helpers._int_or_none(self._properties.get("shuffleOutputBytesSpilled"))
@property
def steps(self):
return [
QueryPlanEntryStep.from_api_repr(step)
for step in self._properties.get("steps", [])
]
class TimelineEntry(object):
def __init__(self):
self._properties = {}
@classmethod
def from_api_repr(cls, resource):
entry = cls()
entry._properties = resource
return entry
@property
def elapsed_ms(self):
return _helpers._int_or_none(self._properties.get("elapsedMs"))
@property
def active_units(self):
return _helpers._int_or_none(self._properties.get("activeUnits"))
@property
def pending_units(self):
return _helpers._int_or_none(self._properties.get("pendingUnits"))
@property
def completed_units(self):
return _helpers._int_or_none(self._properties.get("completedUnits"))
@property
def slot_millis(self):
return _helpers._int_or_none(self._properties.get("totalSlotMs"))
class UnknownJob(_AsyncJob):
@classmethod
def from_api_repr(cls, resource, client):
job_ref_properties = resource.get("jobReference", {"projectId": client.project})
job_ref = _JobReference._from_api_repr(job_ref_properties)
job = cls(job_ref, client)
resource["jobReference"] = job_ref_properties
job._properties = resource
return job
| true | true |
f72bb4debf4ce516c6fa1d5f7294e9cce53c4b79 | 4,826 | py | Python | cccp/__init__.py | sloev/cccp | 829c359a8607d3f3fb2e1e82f2114bb1c8404ce3 | [
"MIT"
] | 1 | 2020-03-14T11:56:09.000Z | 2020-03-14T11:56:09.000Z | cccp/__init__.py | sloev/cccp | 829c359a8607d3f3fb2e1e82f2114bb1c8404ce3 | [
"MIT"
] | null | null | null | cccp/__init__.py | sloev/cccp | 829c359a8607d3f3fb2e1e82f2114bb1c8404ce3 | [
"MIT"
] | 3 | 2020-03-08T13:23:37.000Z | 2021-12-06T19:46:27.000Z | from string import Template
from dominate.tags import script, link, style
from dominate.util import raw
import json
REQUIRED = [
script(
src="https://unpkg.com/axios@0.19.0/dist/axios.min.js", crossorigin="anonymous"
),
script(
src="https://code.jquery.com/jquery-3.3.1.slim.min.js",
integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo",
crossorigin="anonymous",
),
]
BOOTSTRAP = [
link(
rel="stylesheet",
href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T",
crossorigin="anonymous",
),
script(
src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
integrity="sha384-UO2eT0CpHqdSJQ6hJty5KVphtPhzWj9WO1clHTMGa3JDZwrnQq4sF86dIHNDz0W1",
crossorigin="anonymous",
),
script(
src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
integrity="sha384-JjSmVgyd0p3pXB1rRibZUAYoIIy6OrQ6VrjIEaFf/nJGzIxFDsf4x0xIM+B07jRM",
crossorigin="anonymous",
),
]
CHARTJS = script(
src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.8.0/Chart.bundle.min.js"
)
def render(x):
if isinstance(x, list):
return "".join(e.render(pretty=False) for e in x)
return x.render(pretty=False)
class CustomTemplate(Template):
delimiter = "$$"
class JavaScript:
defaults = None
js_source = ""
def render(self, values, with_script_tag=True):
template = CustomTemplate(self.js_source)
rendered = raw(template.substitute(values).strip())
if with_script_tag:
return script(rendered, type="text/javascript")
else:
return rendered
def __new__(cls, with_script_tag=True, **kwargs):
values = cls.defaults or {}
values.update(kwargs)
inst = super(JavaScript, cls).__new__(cls)
return inst.render(values, with_script_tag)
class CreateReplaceOuterHtmlFunc(JavaScript):
js_source = """
function ReplaceOuterHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then(function (response) {
document.getElementById(id).outerHTML = response.data;
});
};
"""
def replaceOuterHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"ReplaceOuterHtml('{url}', '{id}', {params})"
class CreateReplaceInnerHtmlFunc(JavaScript):
js_source = """
function ReplaceInnerHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then(function (response) {
document.getElementById(id).innerHTML = response.data;
});
};
"""
def replaceInnerHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"ReplaceInnerHtml('{url}', '{id}', {params})"
# backwards compatibility
replaceHtml = replaceInnerHtml
class CreateAppendHtmlFunc(JavaScript):
js_source = """
function AppendHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then( function (response) {
$("#"+id).append(response.data);
});
};
"""
def appendHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"AppendHtml('{url}', '{id}', {params})"
class CreatePrependHtmlFunc(JavaScript):
js_source = """
function PrependHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then( function(response) {
$("#"+id).prepend(response.data);
});
};
"""
def prependHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"PrependHtml('{url}', '{id}', {params})"
class CreateRemoveHtmlFunc(JavaScript):
js_source = """
function RemoveHtml(id){
$("#"+id).remove();
};
"""
class CreateSetAttributeFunction(JavaScript):
js_source = """
function SetAttribute(id, attribute, value){
$("#"+id).attr(attribute, value)
};
"""
def removeHtml(id):
return f"RemoveHtml('{id}')"
def chain_functions(*function_strings):
return "; ".join(function_strings) + ";"
def style_tag_with_css(css):
return style(raw(css))
class LineChart(JavaScript):
js_source = """
const canvas$$id = $('#$$id')
const lineChart$$id = new Chart(canvas$$id, {
type: "line",
data: {
labels: $$xlabels,
datasets: $$datasets
},
options: $$options
});
"""
def line_chart(id, xlabels, datasets, options):
return LineChart(id=id, xlabels=json.dumps(xlabels), datasets=json.dumps(datasets), options=json.dumps(options or {}))
| 26.371585 | 122 | 0.623291 | from string import Template
from dominate.tags import script, link, style
from dominate.util import raw
import json
REQUIRED = [
script(
src="https://unpkg.com/axios@0.19.0/dist/axios.min.js", crossorigin="anonymous"
),
script(
src="https://code.jquery.com/jquery-3.3.1.slim.min.js",
integrity="sha384-q8i/X+965DzO0rT7abK41JStQIAqVgRVzpbzo5smXKp4YfRvH+8abtTE1Pi6jizo",
crossorigin="anonymous",
),
]
BOOTSTRAP = [
link(
rel="stylesheet",
href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css",
integrity="sha384-ggOyR0iXCbMQv3Xipma34MD+dH/1fQ784/j6cY/iJTQUOhcWr7x9JvoRxT2MZw1T",
crossorigin="anonymous",
),
script(
src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js",
integrity="sha384-UO2eT0CpHqdSJQ6hJty5KVphtPhzWj9WO1clHTMGa3JDZwrnQq4sF86dIHNDz0W1",
crossorigin="anonymous",
),
script(
src="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js",
integrity="sha384-JjSmVgyd0p3pXB1rRibZUAYoIIy6OrQ6VrjIEaFf/nJGzIxFDsf4x0xIM+B07jRM",
crossorigin="anonymous",
),
]
CHARTJS = script(
src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/2.8.0/Chart.bundle.min.js"
)
def render(x):
if isinstance(x, list):
return "".join(e.render(pretty=False) for e in x)
return x.render(pretty=False)
class CustomTemplate(Template):
delimiter = "$$"
class JavaScript:
defaults = None
js_source = ""
def render(self, values, with_script_tag=True):
template = CustomTemplate(self.js_source)
rendered = raw(template.substitute(values).strip())
if with_script_tag:
return script(rendered, type="text/javascript")
else:
return rendered
def __new__(cls, with_script_tag=True, **kwargs):
values = cls.defaults or {}
values.update(kwargs)
inst = super(JavaScript, cls).__new__(cls)
return inst.render(values, with_script_tag)
class CreateReplaceOuterHtmlFunc(JavaScript):
js_source = """
function ReplaceOuterHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then(function (response) {
document.getElementById(id).outerHTML = response.data;
});
};
"""
def replaceOuterHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"ReplaceOuterHtml('{url}', '{id}', {params})"
class CreateReplaceInnerHtmlFunc(JavaScript):
js_source = """
function ReplaceInnerHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then(function (response) {
document.getElementById(id).innerHTML = response.data;
});
};
"""
def replaceInnerHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"ReplaceInnerHtml('{url}', '{id}', {params})"
replaceHtml = replaceInnerHtml
class CreateAppendHtmlFunc(JavaScript):
js_source = """
function AppendHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then( function (response) {
$("#"+id).append(response.data);
});
};
"""
def appendHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"AppendHtml('{url}', '{id}', {params})"
class CreatePrependHtmlFunc(JavaScript):
js_source = """
function PrependHtml(url, id, params){
axios.get(url, {params: params === undefined ? {} : params})
.then( function(response) {
$("#"+id).prepend(response.data);
});
};
"""
def prependHtml(url, id, **kwargs):
params = json.dumps(kwargs)
return f"PrependHtml('{url}', '{id}', {params})"
class CreateRemoveHtmlFunc(JavaScript):
js_source = """
function RemoveHtml(id){
$("#"+id).remove();
};
"""
class CreateSetAttributeFunction(JavaScript):
js_source = """
function SetAttribute(id, attribute, value){
$("#"+id).attr(attribute, value)
};
"""
def removeHtml(id):
return f"RemoveHtml('{id}')"
def chain_functions(*function_strings):
return "; ".join(function_strings) + ";"
def style_tag_with_css(css):
return style(raw(css))
class LineChart(JavaScript):
js_source = """
const canvas$$id = $('#$$id')
const lineChart$$id = new Chart(canvas$$id, {
type: "line",
data: {
labels: $$xlabels,
datasets: $$datasets
},
options: $$options
});
"""
def line_chart(id, xlabels, datasets, options):
return LineChart(id=id, xlabels=json.dumps(xlabels), datasets=json.dumps(datasets), options=json.dumps(options or {}))
| true | true |
f72bb5154ac3efc57f3e05c96a98460ac4436137 | 4,661 | py | Python | lbrynet/core/PaymentRateManager.py | anon4040/lbry | 1f1b34863805f4954fbef3f163ef65268a66771a | [
"MIT"
] | 1 | 2018-12-08T04:42:11.000Z | 2018-12-08T04:42:11.000Z | lbrynet/core/PaymentRateManager.py | mrlucky9/lbry | bf6bc02828ed55e98a3002f487041acbd7841883 | [
"MIT"
] | null | null | null | lbrynet/core/PaymentRateManager.py | mrlucky9/lbry | bf6bc02828ed55e98a3002f487041acbd7841883 | [
"MIT"
] | 1 | 2018-05-01T09:28:52.000Z | 2018-05-01T09:28:52.000Z | from lbrynet.core.Strategy import get_default_strategy, OnlyFreeStrategy
from lbrynet import conf
from decimal import Decimal
class BasePaymentRateManager(object):
def __init__(self, rate=None, info_rate=None):
self.min_blob_data_payment_rate = rate if rate is not None else conf.settings['data_rate']
self.min_blob_info_payment_rate = (
info_rate if info_rate is not None else conf.settings['min_info_rate'])
class PaymentRateManager(object):
def __init__(self, base, rate=None):
"""
@param base: a BasePaymentRateManager
@param rate: the min blob data payment rate
"""
self.base = base
self.min_blob_data_payment_rate = rate
self.points_paid = 0.0
def get_rate_blob_data(self, peer):
return self.get_effective_min_blob_data_payment_rate()
def accept_rate_blob_data(self, peer, payment_rate):
return payment_rate >= self.get_effective_min_blob_data_payment_rate()
def get_effective_min_blob_data_payment_rate(self):
if self.min_blob_data_payment_rate is None:
return self.base.min_blob_data_payment_rate
return self.min_blob_data_payment_rate
def record_points_paid(self, amount):
self.points_paid += amount
class NegotiatedPaymentRateManager(object):
def __init__(self, base, availability_tracker, generous=None):
"""
@param base: a BasePaymentRateManager
@param availability_tracker: a BlobAvailabilityTracker
@param rate: the min blob data payment rate
"""
self.base = base
self.min_blob_data_payment_rate = base.min_blob_data_payment_rate
self.points_paid = 0.0
self.blob_tracker = availability_tracker
self.generous = generous if generous is not None else conf.settings['is_generous_host']
self.strategy = get_default_strategy(self.blob_tracker,
base_price=self.base.min_blob_data_payment_rate,
is_generous=generous)
def get_rate_blob_data(self, peer, blobs):
response = self.strategy.make_offer(peer, blobs)
return response.rate
def accept_rate_blob_data(self, peer, blobs, offer):
offer = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, offer)
return offer.is_accepted
def reply_to_offer(self, peer, blobs, offer):
reply = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, reply)
return reply
def get_rate_for_peer(self, peer):
return self.strategy.accepted_offers.get(peer, False)
def record_points_paid(self, amount):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
return (offer.is_too_low and
round(Decimal.from_float(offer.rate), 5) >= round(self.strategy.max_rate, 5))
return False
class OnlyFreePaymentsManager(object):
def __init__(self, **kwargs):
"""
A payment rate manager that will only ever accept and offer a rate of 0.0,
Used for testing
"""
self.base = BasePaymentRateManager(0.0, 0.0)
self.points_paid = 0.0
self.generous = True
self.strategy = OnlyFreeStrategy()
def get_rate_blob_data(self, peer, blobs):
response = self.strategy.make_offer(peer, blobs)
return response.rate
def accept_rate_blob_data(self, peer, blobs, offer):
offer = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, offer)
return offer.is_accepted
def reply_to_offer(self, peer, blobs, offer):
reply = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, reply)
return reply
def get_rate_for_peer(self, peer):
return self.strategy.accepted_offers.get(peer, False)
def record_points_paid(self, amount):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
if offer.rate > 0.0:
return True
return False
| 36.414063 | 98 | 0.679897 | from lbrynet.core.Strategy import get_default_strategy, OnlyFreeStrategy
from lbrynet import conf
from decimal import Decimal
class BasePaymentRateManager(object):
def __init__(self, rate=None, info_rate=None):
self.min_blob_data_payment_rate = rate if rate is not None else conf.settings['data_rate']
self.min_blob_info_payment_rate = (
info_rate if info_rate is not None else conf.settings['min_info_rate'])
class PaymentRateManager(object):
def __init__(self, base, rate=None):
self.base = base
self.min_blob_data_payment_rate = rate
self.points_paid = 0.0
def get_rate_blob_data(self, peer):
return self.get_effective_min_blob_data_payment_rate()
def accept_rate_blob_data(self, peer, payment_rate):
return payment_rate >= self.get_effective_min_blob_data_payment_rate()
def get_effective_min_blob_data_payment_rate(self):
if self.min_blob_data_payment_rate is None:
return self.base.min_blob_data_payment_rate
return self.min_blob_data_payment_rate
def record_points_paid(self, amount):
self.points_paid += amount
class NegotiatedPaymentRateManager(object):
def __init__(self, base, availability_tracker, generous=None):
self.base = base
self.min_blob_data_payment_rate = base.min_blob_data_payment_rate
self.points_paid = 0.0
self.blob_tracker = availability_tracker
self.generous = generous if generous is not None else conf.settings['is_generous_host']
self.strategy = get_default_strategy(self.blob_tracker,
base_price=self.base.min_blob_data_payment_rate,
is_generous=generous)
def get_rate_blob_data(self, peer, blobs):
response = self.strategy.make_offer(peer, blobs)
return response.rate
def accept_rate_blob_data(self, peer, blobs, offer):
offer = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, offer)
return offer.is_accepted
def reply_to_offer(self, peer, blobs, offer):
reply = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, reply)
return reply
def get_rate_for_peer(self, peer):
return self.strategy.accepted_offers.get(peer, False)
def record_points_paid(self, amount):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
return (offer.is_too_low and
round(Decimal.from_float(offer.rate), 5) >= round(self.strategy.max_rate, 5))
return False
class OnlyFreePaymentsManager(object):
def __init__(self, **kwargs):
self.base = BasePaymentRateManager(0.0, 0.0)
self.points_paid = 0.0
self.generous = True
self.strategy = OnlyFreeStrategy()
def get_rate_blob_data(self, peer, blobs):
response = self.strategy.make_offer(peer, blobs)
return response.rate
def accept_rate_blob_data(self, peer, blobs, offer):
offer = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, offer)
return offer.is_accepted
def reply_to_offer(self, peer, blobs, offer):
reply = self.strategy.respond_to_offer(offer, peer, blobs)
self.strategy.update_accepted_offers(peer, reply)
return reply
def get_rate_for_peer(self, peer):
return self.strategy.accepted_offers.get(peer, False)
def record_points_paid(self, amount):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
if offer.rate > 0.0:
return True
return False
| true | true |
f72bb56ed6f1866f992171ab177c9a2074f78328 | 1,005 | py | Python | cea/examples/extract_reference_case.py | AlexJew/CityEnergyAnalyst | 6eb372c79e5100a2d0abce78561ae368fb409cd1 | [
"MIT"
] | null | null | null | cea/examples/extract_reference_case.py | AlexJew/CityEnergyAnalyst | 6eb372c79e5100a2d0abce78561ae368fb409cd1 | [
"MIT"
] | null | null | null | cea/examples/extract_reference_case.py | AlexJew/CityEnergyAnalyst | 6eb372c79e5100a2d0abce78561ae368fb409cd1 | [
"MIT"
] | null | null | null | """
Extract the reference case (``cea/examples/reference-case-open.zip``).
"""
from __future__ import division
import os
import zipfile
import cea.examples
import cea.config
import cea.inputlocator
# list the sections in the configuration file that are used by this script
# this value is used to generate the help menu for the command-line interface
CEA_CONFIG_SECTIONS = ['extract-reference-case']
def main(config):
"""
Extract the reference case in ``reference-case-open.zip`` to the destination folder.
:param config: Contains the PathParameter ``config.extract_reference_case.destination``
:type config: cea.config.Configuration
:return:
"""
reference_case = 'reference-case-{case}.zip'.format(case=config.extract_reference_case.case)
archive = zipfile.ZipFile(os.path.join(os.path.dirname(cea.examples.__file__), reference_case))
archive.extractall(config.extract_reference_case.destination)
if __name__ == '__main__':
main(cea.config.Configuration())
| 31.40625 | 99 | 0.758209 | from __future__ import division
import os
import zipfile
import cea.examples
import cea.config
import cea.inputlocator
CEA_CONFIG_SECTIONS = ['extract-reference-case']
def main(config):
reference_case = 'reference-case-{case}.zip'.format(case=config.extract_reference_case.case)
archive = zipfile.ZipFile(os.path.join(os.path.dirname(cea.examples.__file__), reference_case))
archive.extractall(config.extract_reference_case.destination)
if __name__ == '__main__':
main(cea.config.Configuration())
| true | true |
f72bb6f0058e6022c5654db644c4f3a8d1350c2a | 18,976 | py | Python | ssd/modeling/backbone/basic.py | Sethan/deeplearning-graphics | ce164847a323d3f07cfe241f4bbed6029777c58d | [
"MIT"
] | null | null | null | ssd/modeling/backbone/basic.py | Sethan/deeplearning-graphics | ce164847a323d3f07cfe241f4bbed6029777c58d | [
"MIT"
] | null | null | null | ssd/modeling/backbone/basic.py | Sethan/deeplearning-graphics | ce164847a323d3f07cfe241f4bbed6029777c58d | [
"MIT"
] | null | null | null | import torch
class BasicModel(torch.nn.Module):
"""
This is a basic backbone for SSD.
The feature extractor outputs a list of 6 feature maps, with the sizes:
[shape(-1, output_channels[0], 38, 38),
shape(-1, output_channels[1], 19, 19),
shape(-1, output_channels[2], 10, 10),
shape(-1, output_channels[3], 5, 5),
shape(-1, output_channels[3], 3, 3),
shape(-1, output_channels[4], 1, 1)]
where "output_channels" is the same as cfg.BACKBONE.OUT_CHANNELS
"""
def __init__(self, cfg):
super().__init__()
image_size = cfg.INPUT.IMAGE_SIZE
output_channels = cfg.MODEL.BACKBONE.OUT_CHANNELS
self.output_channels = output_channels
image_channels = cfg.MODEL.BACKBONE.INPUT_CHANNELS
self.output_feature_size = cfg.MODEL.PRIORS.FEATURE_MAPS
self.num_filters = [32,64]
self.feature_extractor38 = torch.nn.Sequential(
#part 1 38x38
torch.nn.Conv2d(
in_channels=image_channels,
out_channels=self.num_filters[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[0]),
torch.nn.MaxPool2d(2, stride=2),
torch.nn.ELU(),
torch.nn.Dropout2d(0.05),
torch.nn.Conv2d(
in_channels=self.num_filters[0],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.06),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.07),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.08),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.09),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.01),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.MaxPool2d(2, stride=2),
torch.nn.ELU(),
torch.nn.Dropout2d(0.11),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.12),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.13),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.14),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.output_channels[0],
kernel_size=3,
stride=2,
padding=1
)
)
self.feature_extractor19 = torch.nn.Sequential(
#part 2 19x19
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.15),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.16),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.17),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.18),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.19),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.2),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.21),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.22),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[1],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor9 = torch.nn.Sequential(
#part 3 10x10
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.23),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.24),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.25),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.26),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.27),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.28),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.29),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.30),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[2],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor5 = torch.nn.Sequential(
#part 4 5x5
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.31),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.32),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.33),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.34),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.35),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.36),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.37),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.38),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[3],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor3 = torch.nn.Sequential(
#part 5 3x3
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.39),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.40),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.ELU(),
torch.nn.Dropout2d(0.41),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.42),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.43),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.44),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.45),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.46),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[4],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor1 = torch.nn.Sequential(
#part 6 1x1
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.48),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.49),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.50),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.51),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.52),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.53),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.54),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.55),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[5],
kernel_size=3,
stride=1,
padding=0
))
def forward(self, x):
"""
The forward functiom should output features with shape:
[shape(-1, output_channels[0], 38, 38),
shape(-1, output_channels[1], 19, 19),
shape(-1, output_channels[2], 10, 10),
shape(-1, output_channels[3], 5, 5),
shape(-1, output_channels[3], 3, 3),
shape(-1, output_channels[4], 1, 1)]
We have added assertion tests to check this, iteration through out_features,
where out_features[0] should have the shape:
shape(-1, output_channels[0], 38, 38),
"""
out_features = []
out = self.feature_extractor38(x)
out_features.append(out)
out = self.feature_extractor19(out)
out_features.append(out)
out = self.feature_extractor9(out)
out_features.append(out)
out = self.feature_extractor5(out)
out_features.append(out)
out = self.feature_extractor3(out)
out_features.append(out)
out = self.feature_extractor1(out)
out_features.append(out)
feature_list = [38,19,10,5,3,1]
for idx, feature in enumerate(out_features):
expected_shape = (self.output_channels[idx], feature_list[idx], feature_list[idx])
assert feature.shape[1:] == expected_shape, \
f"Expected shape: {expected_shape}, got: {feature.shape[1:]} at output IDX: {idx}"
return tuple(out_features)
| 32.108291 | 98 | 0.539155 | import torch
class BasicModel(torch.nn.Module):
def __init__(self, cfg):
super().__init__()
image_size = cfg.INPUT.IMAGE_SIZE
output_channels = cfg.MODEL.BACKBONE.OUT_CHANNELS
self.output_channels = output_channels
image_channels = cfg.MODEL.BACKBONE.INPUT_CHANNELS
self.output_feature_size = cfg.MODEL.PRIORS.FEATURE_MAPS
self.num_filters = [32,64]
self.feature_extractor38 = torch.nn.Sequential(
torch.nn.Conv2d(
in_channels=image_channels,
out_channels=self.num_filters[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[0]),
torch.nn.MaxPool2d(2, stride=2),
torch.nn.ELU(),
torch.nn.Dropout2d(0.05),
torch.nn.Conv2d(
in_channels=self.num_filters[0],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.06),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.07),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.08),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.09),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.01),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.MaxPool2d(2, stride=2),
torch.nn.ELU(),
torch.nn.Dropout2d(0.11),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.12),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.13),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.num_filters[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.num_filters[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.14),
torch.nn.Conv2d(
in_channels=self.num_filters[1],
out_channels=self.output_channels[0],
kernel_size=3,
stride=2,
padding=1
)
)
self.feature_extractor19 = torch.nn.Sequential(
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.15),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.16),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.17),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.18),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.19),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.2),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.21),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[0],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[0]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.22),
torch.nn.Conv2d(
in_channels=self.output_channels[0],
out_channels=self.output_channels[1],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor9 = torch.nn.Sequential(
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.23),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.24),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.25),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.26),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.27),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.28),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.29),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[1],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[1]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.30),
torch.nn.Conv2d(
in_channels=self.output_channels[1],
out_channels=self.output_channels[2],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor5 = torch.nn.Sequential(
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.31),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.32),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.33),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.34),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.35),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.36),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.37),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[2],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[2]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.38),
torch.nn.Conv2d(
in_channels=self.output_channels[2],
out_channels=self.output_channels[3],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor3 = torch.nn.Sequential(
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.39),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.40),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.ELU(),
torch.nn.Dropout2d(0.41),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.42),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.43),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.44),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.45),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[3],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[3]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.46),
torch.nn.Conv2d(
in_channels=self.output_channels[3],
out_channels=self.output_channels[4],
kernel_size=3,
stride=2,
padding=1
))
self.feature_extractor1 = torch.nn.Sequential(
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.48),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.49),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.50),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.51),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.52),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.53),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.54),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[4],
kernel_size=3,
stride=1,
padding=1
),
torch.nn.BatchNorm2d(self.output_channels[4]),
torch.nn.ELU(),
torch.nn.Dropout2d(0.55),
torch.nn.Conv2d(
in_channels=self.output_channels[4],
out_channels=self.output_channels[5],
kernel_size=3,
stride=1,
padding=0
))
def forward(self, x):
out_features = []
out = self.feature_extractor38(x)
out_features.append(out)
out = self.feature_extractor19(out)
out_features.append(out)
out = self.feature_extractor9(out)
out_features.append(out)
out = self.feature_extractor5(out)
out_features.append(out)
out = self.feature_extractor3(out)
out_features.append(out)
out = self.feature_extractor1(out)
out_features.append(out)
feature_list = [38,19,10,5,3,1]
for idx, feature in enumerate(out_features):
expected_shape = (self.output_channels[idx], feature_list[idx], feature_list[idx])
assert feature.shape[1:] == expected_shape, \
f"Expected shape: {expected_shape}, got: {feature.shape[1:]} at output IDX: {idx}"
return tuple(out_features)
| true | true |
f72bb74728607daee649873c96466e293323f858 | 400 | py | Python | packages/pytea/pytest/unit_tests/passes/pass_argmax_dim01.py | lego0901/pytea | 8ede650def2e68f4610ba816451d8b9e28f09f76 | [
"MIT"
] | null | null | null | packages/pytea/pytest/unit_tests/passes/pass_argmax_dim01.py | lego0901/pytea | 8ede650def2e68f4610ba816451d8b9e28f09f76 | [
"MIT"
] | null | null | null | packages/pytea/pytest/unit_tests/passes/pass_argmax_dim01.py | lego0901/pytea | 8ede650def2e68f4610ba816451d8b9e28f09f76 | [
"MIT"
] | null | null | null | '''
pass_argmax_dim01.py
Copyright (c) Seoul National University
Licensed under the MIT license.
Author: Woo Sung Song
torch.Tensor.argmax with dim parameter.
! This is not available since maximum stack size exceeding error has been occured
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
a = torch.rand(2, 3)
#m = a.argmax(dim=1)
# shape assertion
#m + torch.rand(2, 4, 5) | 21.052632 | 81 | 0.75 |
import torch
import torch.nn as nn
import torch.nn.functional as F
a = torch.rand(2, 3)
| true | true |
f72bb761694ffb3a80f6774d7de879a210505611 | 5,192 | py | Python | backend/game.py | KatrichKostiantin/CheckersGame | 3a552335d3ce344b203129a230d64db303491bb2 | [
"MIT"
] | null | null | null | backend/game.py | KatrichKostiantin/CheckersGame | 3a552335d3ce344b203129a230d64db303491bb2 | [
"MIT"
] | null | null | null | backend/game.py | KatrichKostiantin/CheckersGame | 3a552335d3ce344b203129a230d64db303491bb2 | [
"MIT"
] | null | null | null | import asyncio
import datetime
import logging
import secrets
from main import game
class GameError(Exception):
pass
class ForbiddenMoveError(GameError):
pass
class MoveIsNotPossible(GameError):
pass
class Game:
def __init__(self):
self._game = game
self._is_started = False
self._is_finished = False
self._available_move_time = 2.2 # 200 ms plus, cause for network latency
self._available_current_move_time = self._available_move_time
self._players = {}
self._lost_time_player = None
self._last_move = None
self._colors_table = {
1: 'RED',
2: 'BLACK',
None: 'None'
}
def _whose_turn(self):
return self._colors_table[self._game.whose_turn()]
def _status(self):
if not self._is_started:
return 'Not yet started'
if self._lost_time_player:
return f'Player {self._colors_table[self._lost_time_player]} reached time limit'
return 'Game is over' if self._game.is_over() else 'Game is playing'
def _winner(self):
if self._lost_time_player:
return self._colors_table[1] \
if self._lost_time_player == 2 \
else self._colors_table[2]
return self._colors_table[self._game.get_winner()] if self._game.get_winner() else None
def _board(self):
output = []
for piece in self._game.board.pieces:
if not piece.captured:
output.append({
'color': 'RED' if piece.player == 1 else 'BLACK',
'row': piece.get_row(),
'column': piece.get_column(),
'king': piece.king,
'position': piece.position
})
return output
def add_player(self, team_name):
if self._is_started:
return
player_num = 2 if 1 in self._players else 1
token = secrets.token_hex(16)
self._players[player_num] = {
'token': token,
'team_name': team_name
}
if 1 in self._players and 2 in self._players:
asyncio.ensure_future(self.start())
return {
'color': self._colors_table[player_num],
'token': token
}
async def start(self):
logging.info(f'...GAME IS STARTED at {datetime.datetime.now().isoformat()}...')
logging.info(
f'1 player, color: {self._colors_table[1]}, team name: {self._players[1]["team_name"]}'
)
logging.info(
f'2 player, color: {self._colors_table[2]}, team name: {self._players[2]["team_name"]}'
)
self._is_started = True
while True:
logging.info(
f'Available time for player "{self._colors_table[self._game.whose_turn()]}" '
f'move: {self._available_current_move_time}'
)
await asyncio.sleep(0.05)
self._available_current_move_time -= 0.05
if self._available_current_move_time < 0:
self._lost_time_player = self._game.whose_turn()
self._is_finished = True
break
if self._game.is_over():
self._is_finished = True
break
if self._lost_time_player == 1:
winner = 2
elif self._lost_time_player == 2:
winner = 1
else:
winner = self._game.get_winner()
self._game.set_winner({
'color': self._colors_table[winner],
'team_name': self._players[winner]['team_name']
})
logging.info(
f'...GAME WAS FINISHED at {datetime.datetime.now().isoformat()}, winner: {self._game.get_board_winner()}'
)
def move(self, token, move):
player = self._players[self._game.whose_turn()]
if player['token'] != token:
raise ForbiddenMoveError
try:
if self._last_move is not None and self._last_move['player'] == self._whose_turn():
self._last_move['last_moves'].append(move)
else:
self._last_move = {
'player': self._whose_turn(),
'last_moves': [move]
}
self._game.move(move)
logging.info(
f'{player["team_name"]} made move ({move}) at {datetime.datetime.now().isoformat()}'
)
self._available_current_move_time = self._available_move_time
except ValueError as e:
raise MoveIsNotPossible(str(e))
def is_started(self):
return self._is_started
def is_finished(self):
return self._is_finished
@property
def json(self):
return {
'status': self._status(),
'whose_turn': self._whose_turn(),
'winner': self._winner(),
'board': self._board(),
'available_time': self._available_current_move_time,
'last_move': self._last_move,
'is_started': self.is_started(),
'is_finished': self.is_finished()
}
| 30.011561 | 117 | 0.55624 | import asyncio
import datetime
import logging
import secrets
from main import game
class GameError(Exception):
pass
class ForbiddenMoveError(GameError):
pass
class MoveIsNotPossible(GameError):
pass
class Game:
def __init__(self):
self._game = game
self._is_started = False
self._is_finished = False
self._available_move_time = 2.2
self._available_current_move_time = self._available_move_time
self._players = {}
self._lost_time_player = None
self._last_move = None
self._colors_table = {
1: 'RED',
2: 'BLACK',
None: 'None'
}
def _whose_turn(self):
return self._colors_table[self._game.whose_turn()]
def _status(self):
if not self._is_started:
return 'Not yet started'
if self._lost_time_player:
return f'Player {self._colors_table[self._lost_time_player]} reached time limit'
return 'Game is over' if self._game.is_over() else 'Game is playing'
def _winner(self):
if self._lost_time_player:
return self._colors_table[1] \
if self._lost_time_player == 2 \
else self._colors_table[2]
return self._colors_table[self._game.get_winner()] if self._game.get_winner() else None
def _board(self):
output = []
for piece in self._game.board.pieces:
if not piece.captured:
output.append({
'color': 'RED' if piece.player == 1 else 'BLACK',
'row': piece.get_row(),
'column': piece.get_column(),
'king': piece.king,
'position': piece.position
})
return output
def add_player(self, team_name):
if self._is_started:
return
player_num = 2 if 1 in self._players else 1
token = secrets.token_hex(16)
self._players[player_num] = {
'token': token,
'team_name': team_name
}
if 1 in self._players and 2 in self._players:
asyncio.ensure_future(self.start())
return {
'color': self._colors_table[player_num],
'token': token
}
async def start(self):
logging.info(f'...GAME IS STARTED at {datetime.datetime.now().isoformat()}...')
logging.info(
f'1 player, color: {self._colors_table[1]}, team name: {self._players[1]["team_name"]}'
)
logging.info(
f'2 player, color: {self._colors_table[2]}, team name: {self._players[2]["team_name"]}'
)
self._is_started = True
while True:
logging.info(
f'Available time for player "{self._colors_table[self._game.whose_turn()]}" '
f'move: {self._available_current_move_time}'
)
await asyncio.sleep(0.05)
self._available_current_move_time -= 0.05
if self._available_current_move_time < 0:
self._lost_time_player = self._game.whose_turn()
self._is_finished = True
break
if self._game.is_over():
self._is_finished = True
break
if self._lost_time_player == 1:
winner = 2
elif self._lost_time_player == 2:
winner = 1
else:
winner = self._game.get_winner()
self._game.set_winner({
'color': self._colors_table[winner],
'team_name': self._players[winner]['team_name']
})
logging.info(
f'...GAME WAS FINISHED at {datetime.datetime.now().isoformat()}, winner: {self._game.get_board_winner()}'
)
def move(self, token, move):
player = self._players[self._game.whose_turn()]
if player['token'] != token:
raise ForbiddenMoveError
try:
if self._last_move is not None and self._last_move['player'] == self._whose_turn():
self._last_move['last_moves'].append(move)
else:
self._last_move = {
'player': self._whose_turn(),
'last_moves': [move]
}
self._game.move(move)
logging.info(
f'{player["team_name"]} made move ({move}) at {datetime.datetime.now().isoformat()}'
)
self._available_current_move_time = self._available_move_time
except ValueError as e:
raise MoveIsNotPossible(str(e))
def is_started(self):
return self._is_started
def is_finished(self):
return self._is_finished
@property
def json(self):
return {
'status': self._status(),
'whose_turn': self._whose_turn(),
'winner': self._winner(),
'board': self._board(),
'available_time': self._available_current_move_time,
'last_move': self._last_move,
'is_started': self.is_started(),
'is_finished': self.is_finished()
}
| true | true |
f72bb7c009ac666bf6b4d3d4ca6e8865981ed9cf | 3,079 | py | Python | model/app/LearnTfidfCNB.py | jgharris7/DocClass | 9ef62e655272cca8374187040eb3dd73f3f82b72 | [
"MIT"
] | null | null | null | model/app/LearnTfidfCNB.py | jgharris7/DocClass | 9ef62e655272cca8374187040eb3dd73f3f82b72 | [
"MIT"
] | null | null | null | model/app/LearnTfidfCNB.py | jgharris7/DocClass | 9ef62e655272cca8374187040eb3dd73f3f82b72 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 22 22:43:22 2021
@author: jgharris
"""
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 22 21:09:34 2021
@author: jgharris
"""
root='C:/Users/jgharris/DocClass/'
dataFile='/data/shuffled-full-set-hashed.csv'
import statistics as stat
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pickle
from DocClfTfidfCNB import DocClfTfidfCNB
from Documents import Documents
#dataFile='/test/testshort.csv'
modelName="nbtfidfv0"
maxlines=80000000
testsize=.3
random_state=45
MAXSTRINGLENGH=4000
FIRSTSTRINGLENGTH=80
conf_mat=[]
def main():
# Set up corpus for training
corpus=Documents()
corpus.readFromFile(root+dataFile,maxline=maxlines)
'''
model1=DocClfComplNB(maxStringLength=MAXSTRINGLENGH, \
firstStringLength=FIRSTSTRINGLENGTH)
'''
model1=DocClfTfidfCNB(maxStringLength=MAXSTRINGLENGH, \
firstStringLength=FIRSTSTRINGLENGTH)
print()
# split into test and training sets
xtrain,xtest,ytrain,ytest=\
train_test_split(corpus.words,corpus.y,test_size=testsize, \
random_state=random_state)
ytrainpred=model1.fit(xtrain,ytrain)
ytestpred=model1.predict(xtest)
trainAccuracy=accuracy_score(ytrain,ytrainpred)
testAccuracy=accuracy_score(ytest,ytestpred)
controlAccuracy=accuracy_score(np.random.permutation(ytest),ytestpred)
global conf_mat
conf_mat =model1.confidence(ytest, ytestpred)
print(model1.confidence)
print()
print( np.unique(ytestpred,return_counts=True))
print()
[print("%-20s" % key +" %5.3f" % value) for key,value in model1.confidence.items()]
for row in range(0,conf_mat.shape[0]):
print( [" %4d" % conf_mat[row,col] for col in range(0,conf_mat.shape[1])])
rowsum=conf_mat.sum(axis=0)
colsum=conf_mat.sum(axis=1)
labels=[]
[labels.append(key) for key in model1.confidence.keys()]
print("item rowsum colsum")
for ic in range(0,conf_mat.shape[0]):
print("%-25s" % labels[ic] + " %5d" % rowsum[ic]+ " %5d" % colsum[ic])
print("")
print('train=%6.2f test=%6.2f control=%6.2f' %
(trainAccuracy,testAccuracy,controlAccuracy))
# compute accuracy given predicted value
pickle.dump(model1,open(root+modelName+".pckmdl","wb"))
print(ytestpred[0])
print(xtest[0][0:20])
testfile=open(root+modelName+"testdata.txt","wt")
testfile.write(ytestpred[0])
testfile.write("\n")
testfile.write(xtest[0])
testfile.write("\n")
testfile.write(ytestpred[10])
testfile.write("\n")
testfile.write(xtest[10])
testfile.write("\n")
testfile.close()
print( model1.message)
if __name__=='__main__':
main()
| 27.008772 | 88 | 0.642416 |
root='C:/Users/jgharris/DocClass/'
dataFile='/data/shuffled-full-set-hashed.csv'
import statistics as stat
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pickle
from DocClfTfidfCNB import DocClfTfidfCNB
from Documents import Documents
modelName="nbtfidfv0"
maxlines=80000000
testsize=.3
random_state=45
MAXSTRINGLENGH=4000
FIRSTSTRINGLENGTH=80
conf_mat=[]
def main():
corpus=Documents()
corpus.readFromFile(root+dataFile,maxline=maxlines)
model1=DocClfTfidfCNB(maxStringLength=MAXSTRINGLENGH, \
firstStringLength=FIRSTSTRINGLENGTH)
print()
xtrain,xtest,ytrain,ytest=\
train_test_split(corpus.words,corpus.y,test_size=testsize, \
random_state=random_state)
ytrainpred=model1.fit(xtrain,ytrain)
ytestpred=model1.predict(xtest)
trainAccuracy=accuracy_score(ytrain,ytrainpred)
testAccuracy=accuracy_score(ytest,ytestpred)
controlAccuracy=accuracy_score(np.random.permutation(ytest),ytestpred)
global conf_mat
conf_mat =model1.confidence(ytest, ytestpred)
print(model1.confidence)
print()
print( np.unique(ytestpred,return_counts=True))
print()
[print("%-20s" % key +" %5.3f" % value) for key,value in model1.confidence.items()]
for row in range(0,conf_mat.shape[0]):
print( [" %4d" % conf_mat[row,col] for col in range(0,conf_mat.shape[1])])
rowsum=conf_mat.sum(axis=0)
colsum=conf_mat.sum(axis=1)
labels=[]
[labels.append(key) for key in model1.confidence.keys()]
print("item rowsum colsum")
for ic in range(0,conf_mat.shape[0]):
print("%-25s" % labels[ic] + " %5d" % rowsum[ic]+ " %5d" % colsum[ic])
print("")
print('train=%6.2f test=%6.2f control=%6.2f' %
(trainAccuracy,testAccuracy,controlAccuracy))
pickle.dump(model1,open(root+modelName+".pckmdl","wb"))
print(ytestpred[0])
print(xtest[0][0:20])
testfile=open(root+modelName+"testdata.txt","wt")
testfile.write(ytestpred[0])
testfile.write("\n")
testfile.write(xtest[0])
testfile.write("\n")
testfile.write(ytestpred[10])
testfile.write("\n")
testfile.write(xtest[10])
testfile.write("\n")
testfile.close()
print( model1.message)
if __name__=='__main__':
main()
| true | true |
f72bb7e51487a16740709aa74b453a9e4b4dfec5 | 1,107 | py | Python | sortByWeather.py | LeahGabrielle/Clothes | 72a829358ad6a60aef26b7fce80d854451124a32 | [
"Apache-2.0"
] | null | null | null | sortByWeather.py | LeahGabrielle/Clothes | 72a829358ad6a60aef26b7fce80d854451124a32 | [
"Apache-2.0"
] | null | null | null | sortByWeather.py | LeahGabrielle/Clothes | 72a829358ad6a60aef26b7fce80d854451124a32 | [
"Apache-2.0"
] | null | null | null | #clothes by weather
import random
def pickTop(clothesList):
return random.choice(clothesList[0])
def pickBottoms(clothesList):
return random.choice(clothesList[1])
#sorts clothes into weather type and returns a list of clothes of the correct weather
def sortWeather(clothesList, weather):
#eventually combine the two loops
'''
for i in range(0,len(clothesList)):
for j in range(0, len(clothesList[i]):
'''
#change to switch later
#go through tops
i=0
for top in clothesList[0]:
if top[2] != weather:
clothesList[0].pop(i)
i+=1
#go through bottoms
i=0
for bottom in clothesList[1]:
if bottom[2] != weather:
clothesList[1].pop(i)
i+=1
return clothesList
#Asks user for their weather choice
def requestWeather(clothesList):
weather = input("Is the weather hot or cold?\n")
clothesList = sortWeather(clothesList, weather)
finalChoice = []
finalChoice.append(pickTop(clothesList))
finalChoice.append(pickBottoms(clothesList))
return finalChoice
| 27 | 85 | 0.65673 |
import random
def pickTop(clothesList):
return random.choice(clothesList[0])
def pickBottoms(clothesList):
return random.choice(clothesList[1])
def sortWeather(clothesList, weather):
i=0
for top in clothesList[0]:
if top[2] != weather:
clothesList[0].pop(i)
i+=1
i=0
for bottom in clothesList[1]:
if bottom[2] != weather:
clothesList[1].pop(i)
i+=1
return clothesList
def requestWeather(clothesList):
weather = input("Is the weather hot or cold?\n")
clothesList = sortWeather(clothesList, weather)
finalChoice = []
finalChoice.append(pickTop(clothesList))
finalChoice.append(pickBottoms(clothesList))
return finalChoice
| true | true |
f72bb87a2474ba7c90c9d75c1d943aae0014a860 | 403 | py | Python | core/mixon_core.py | onuratakan/MIXON | 74d8b1fc7ec2e84dbe4e29f411ae09d701838579 | [
"MIT"
] | 14 | 2021-01-22T20:39:43.000Z | 2022-02-20T00:30:41.000Z | core/mixon_core.py | onuratakan/MIXON | 74d8b1fc7ec2e84dbe4e29f411ae09d701838579 | [
"MIT"
] | null | null | null | core/mixon_core.py | onuratakan/MIXON | 74d8b1fc7ec2e84dbe4e29f411ae09d701838579 | [
"MIT"
] | 3 | 2021-03-20T00:02:24.000Z | 2021-03-22T07:36:49.000Z |
import sys
import os
def start_tojas():
os.system("cd tojas && python3 tojas.py -nb && cd ..")
def start_tojas_gui():
os.system("python3 lib/tojas_gui.py -nb")
def start_scanizen():
os.system("cd scanizen && python3 scanizen.py -nb && cd ..")
def start_doser():
os.system("cd doser && python3 doser.py -nb && cd ..")
def start_routersploit():
os.system("python3 routersploit/rsf.py")
| 16.791667 | 62 | 0.667494 |
import sys
import os
def start_tojas():
os.system("cd tojas && python3 tojas.py -nb && cd ..")
def start_tojas_gui():
os.system("python3 lib/tojas_gui.py -nb")
def start_scanizen():
os.system("cd scanizen && python3 scanizen.py -nb && cd ..")
def start_doser():
os.system("cd doser && python3 doser.py -nb && cd ..")
def start_routersploit():
os.system("python3 routersploit/rsf.py")
| true | true |
f72bb9cc461f8b012834228510d2ae37ee6fc7a0 | 2,184 | py | Python | p130_surrounded_regions.py | feigaochn/leetcode | abf0877fae02aa9c2549051f0b68df0ace952512 | [
"MIT"
] | null | null | null | p130_surrounded_regions.py | feigaochn/leetcode | abf0877fae02aa9c2549051f0b68df0ace952512 | [
"MIT"
] | null | null | null | p130_surrounded_regions.py | feigaochn/leetcode | abf0877fae02aa9c2549051f0b68df0ace952512 | [
"MIT"
] | null | null | null | # Surrounded Regions
# Total Accepted: 7716 Total Submissions: 56446 My Submissions
#
# Given a 2D board containing 'X' and 'O', capture all regions surrounded by
# 'X'. A region is captured by flipping all 'O's into 'X's in that surrounded
# region.
#
# For example,
# X X X X
# X O O X
# X X O X
# X O X X
#
# After running your function, the board should be:
# X X X X
# X X X X
# X X X X
# X O X X
class Solution:
# @param board, a 2D array
# Capture all regions by modifying the input board in-place.
# Do not return any value.
def solve(self, board):
if len(board) is 0:
return
q = []
# first row
for j in range(len(board[0])):
if board[0][j] is 'O':
q.append((0, j))
board[0][j] = 'o'
# last row
for j in range(len(board[-1])):
if board[len(board)-1][j] is 'O':
q.append((len(board)-1, j))
board[len(board)-1][j] = 'o'
for i in range(len(board)):
n = len(board[i])
if n > 0:
# first column
if board[i][0] is 'O':
board[i][0] = 'o'
q.append((i, 0))
# last column
if board[i][n-1] is 'O':
board[i][n-1] = 'o'
q.append((i, n-1))
while len(q) > 0:
i, j = q.pop(0)
for di, dj in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
if 0 <= i+di < len(board) and 0 <= j+dj < len(board[i+di]) \
and board[i+di][j+dj] is 'O':
board[i+di][j+dj] = 'o'
q.append((i+di, j+dj))
for row in board:
for j in range(len(row)):
if row[j] is 'o':
row[j] = 'O'
elif row[j] is 'O':
row[j] = 'X'
if __name__ == '__main__':
board = [['X', 'X', 'X', 'X'],
['X', 'O', 'O', 'X'],
['X', 'X', 'O', 'X'],
['X', 'O', 'X', 'X']]
Solution().solve(board)
print(board)
board = []
Solution().solve(board)
print(board)
| 28.736842 | 77 | 0.422619 |
class Solution:
def solve(self, board):
if len(board) is 0:
return
q = []
for j in range(len(board[0])):
if board[0][j] is 'O':
q.append((0, j))
board[0][j] = 'o'
for j in range(len(board[-1])):
if board[len(board)-1][j] is 'O':
q.append((len(board)-1, j))
board[len(board)-1][j] = 'o'
for i in range(len(board)):
n = len(board[i])
if n > 0:
if board[i][0] is 'O':
board[i][0] = 'o'
q.append((i, 0))
if board[i][n-1] is 'O':
board[i][n-1] = 'o'
q.append((i, n-1))
while len(q) > 0:
i, j = q.pop(0)
for di, dj in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
if 0 <= i+di < len(board) and 0 <= j+dj < len(board[i+di]) \
and board[i+di][j+dj] is 'O':
board[i+di][j+dj] = 'o'
q.append((i+di, j+dj))
for row in board:
for j in range(len(row)):
if row[j] is 'o':
row[j] = 'O'
elif row[j] is 'O':
row[j] = 'X'
if __name__ == '__main__':
board = [['X', 'X', 'X', 'X'],
['X', 'O', 'O', 'X'],
['X', 'X', 'O', 'X'],
['X', 'O', 'X', 'X']]
Solution().solve(board)
print(board)
board = []
Solution().solve(board)
print(board)
| true | true |
f72bb9ed94ed86bf9e276452065cfabb93084887 | 647 | py | Python | 4.py | Michanix/Math-Stat-Homework | 18f4f915ea0f2dd7fa14ebaeb0d357510aef4808 | [
"MIT"
] | null | null | null | 4.py | Michanix/Math-Stat-Homework | 18f4f915ea0f2dd7fa14ebaeb0d357510aef4808 | [
"MIT"
] | null | null | null | 4.py | Michanix/Math-Stat-Homework | 18f4f915ea0f2dd7fa14ebaeb0d357510aef4808 | [
"MIT"
] | null | null | null | from math import sqrt
from random import randrange
arr1 = [i for i in range(1, 11)]
arr2 = [i for i in range(1, 11)]
arr3 = [randrange(i) for i in range(1, 11)]
arr4 = [randrange(i) for i in range(1, 11)]
def avg(data):
return sum(data) / len(data)
def std(data):
mu = avg(data)
std = (sum([(i - mu)**2 for i in data]) / len(data))**0.5
return std
def koef_pirsona(x, y):
# x,y - list of values
avg_x = avg(x)
avg_y = avg(y)
std_x = std(x)
std_y = std(y)
avg_xy = avg([i*j for i,j in zip(x,y)])
result = (avg_xy - avg_x * avg_y) / (std_x * std_y)
return result
print(koef_pirsona(arr3, arr4))
| 22.310345 | 61 | 0.591963 | from math import sqrt
from random import randrange
arr1 = [i for i in range(1, 11)]
arr2 = [i for i in range(1, 11)]
arr3 = [randrange(i) for i in range(1, 11)]
arr4 = [randrange(i) for i in range(1, 11)]
def avg(data):
return sum(data) / len(data)
def std(data):
mu = avg(data)
std = (sum([(i - mu)**2 for i in data]) / len(data))**0.5
return std
def koef_pirsona(x, y):
avg_x = avg(x)
avg_y = avg(y)
std_x = std(x)
std_y = std(y)
avg_xy = avg([i*j for i,j in zip(x,y)])
result = (avg_xy - avg_x * avg_y) / (std_x * std_y)
return result
print(koef_pirsona(arr3, arr4))
| true | true |
f72bba260dbdc0d48d601f070334e34f0095da0b | 6,016 | py | Python | scripts/gdc_req_legacy.py | dampierch/herv | 9f1ce0e676977b6c8d25fdf446c0807826b80bea | [
"MIT"
] | null | null | null | scripts/gdc_req_legacy.py | dampierch/herv | 9f1ce0e676977b6c8d25fdf446c0807826b80bea | [
"MIT"
] | null | null | null | scripts/gdc_req_legacy.py | dampierch/herv | 9f1ce0e676977b6c8d25fdf446c0807826b80bea | [
"MIT"
] | null | null | null | '''
this script queries the gdc legacy archive via the search and retrieve api and
returns msi_status object (from files endpoint on legacy)
-- get uuids of xml files with the msi annotations from legacy server
-- download each xml file
-- parse xml files to extract msi annotations for each subject
script should be called from within gdc_ann_make, which itself should be called
as part of snakemake pipeline
-- usage: snakemake setup_tcga
'''
import io
import json
import os
import pandas as pd
import requests
import re
import subprocess
import glob
import xml.etree.ElementTree as ET
modname = 'gdc_req_legacy'
def set_filters():
'''
set filters for gdc legacy files endpoint search
-- json format
-- for files.data_type, values for MSI status are 'Auxiliary test' and
'Microsatellite instability'
-- here use 'Auxiliary test' per TCGAbiolinks examples
'''
filters = {
'op':'and',
'content':[
{'op':'or',
'content':[
{'op':'in',
'content':{
'field':'cases.project.project_id',
'value':'TCGA-COAD'
}
},
{'op':'in',
'content':{
'field':'cases.project.project_id',
'value':'TCGA-READ'
}
}
]
},
{'op':'and',
'content':[
{'op':'in',
'content':{
'field':'files.data_category',
'value':'Other'
}
},
{'op':'in',
'content':{
'field':'files.data_type',
'value':'Auxiliary test'
}
},
{'op':'in',
'content':{
'field':'files.access',
'value':'open'
}
}
]
}
]
}
filters = json.dumps(filters)
return filters
def set_fields():
'''
set fields for extraction from endpoint
'''
fields = [
'file_name',
'file_id',
'md5sum',
'file_size',
'state'
]
fields = ','.join(fields)
return fields
def set_params(filters,fields):
'''
set parameters for https get request to endpoint
-- set size parameter empirically to a level greater than number of target
cases to get all records at once
'''
params = {
'filters': filters,
'fields': fields,
'format': 'TSV',
'size': '1500'
}
return params
def get_results(endpoint,params):
'''
given an endpoint and parameters, execute https GET request for xml file_id
entities and build results dataframe with msi results
'''
response = requests.get(endpoint, params=params)
object = io.StringIO(response.content.decode('utf-8'))
results = pd.read_table(object)
return results
def download_xml_uuid(files_res,dest):
'''
download xml files one at a time by uuid
'''
file_count = 0
for uuid in files_res.id:
cmd = ' '.join(['gdc-client download',uuid,'-d',dest])
subprocess.call(cmd, shell=True)
print(' '.join([uuid,'downloaded']))
file_count = file_count + 1
print(' '.join([str(file_count),'files downloaded']))
def download_xml_manifest(files_res,dest):
'''
-- create manifest object
-- write manifest to file
-- use manifest for bulk download
'''
select = ['file_id', 'file_name', 'md5sum', 'file_size', 'state']
manifest = files_res[select]
manifest.columns = ['id', 'filename', 'md5', 'size', 'state']
manifest = manifest.sort_values(by=['id'])
out_file = dest + 'manifest.tsv'
manifest.to_csv(out_file, sep='\t', index=False)
cmd = ' '.join(['gdc-client download','-m',out_file,'-d',dest])
subprocess.call(cmd, shell=True)
print('manifest downloaded')
def parse_xml(files_res,dest):
'''
parse xml files to extract msi status
'''
msi_dict = {}
msi_dict['subject_id'] = []
msi_dict['msi_status'] = []
tag1 = 'mononucleotide_and_dinucleotide_marker_panel_analysis_status'
tag2 = 'mononucleotide_marker_panel_analysis_status'
file_count = 0
for uuid in files_res.id:
pattern = dest + uuid + '/*.xml'
fn = glob.glob(pattern)[0]
tree = ET.parse(fn)
for elem in tree.getiterator():
if 'bcr_patient_barcode' in elem.tag:
subject_id = elem.text
if tag1 in elem.tag and elem.text != None:
msi_status = elem.text
elif tag2 in elem.tag and elem.text != None:
msi_status = elem.text
msi_dict['subject_id'].append(subject_id)
msi_dict['msi_status'].append(msi_status)
file_count = file_count + 1
print(' '.join([str(file_count),'files parsed']))
msi_res = pd.DataFrame.from_dict(msi_dict)
return msi_res
def check_outpath(out_path):
'''
check for presence of absence of out_path and make directory if absent
'''
l = out_path.strip('/').split('/')
d = ''
for e in l:
d = d + '/' + e
if os.path.exists(d):
print(d,'present')
else:
print(d,'absent')
print('making',d,'now')
os.mkdir(d)
def main():
endpoint = 'https://api.gdc.cancer.gov/legacy/files/'
filters = set_filters()
fields = set_fields()
params = set_params(filters, fields)
files_res = get_results(endpoint, params)
dest = os.environ['ann_dir'] + 'tcga/msi/'
check_outpath(dest)
download_xml_manifest(files_res, dest)
msi_res = parse_xml(files_res, dest)
return msi_res
if __name__ == '__main__':
print('This script is not meant to be run as main. See usage statment:')
print('usage: snakemake setup_tcga')
else:
msi_res = main()
| 27.723502 | 79 | 0.562001 |
import io
import json
import os
import pandas as pd
import requests
import re
import subprocess
import glob
import xml.etree.ElementTree as ET
modname = 'gdc_req_legacy'
def set_filters():
filters = {
'op':'and',
'content':[
{'op':'or',
'content':[
{'op':'in',
'content':{
'field':'cases.project.project_id',
'value':'TCGA-COAD'
}
},
{'op':'in',
'content':{
'field':'cases.project.project_id',
'value':'TCGA-READ'
}
}
]
},
{'op':'and',
'content':[
{'op':'in',
'content':{
'field':'files.data_category',
'value':'Other'
}
},
{'op':'in',
'content':{
'field':'files.data_type',
'value':'Auxiliary test'
}
},
{'op':'in',
'content':{
'field':'files.access',
'value':'open'
}
}
]
}
]
}
filters = json.dumps(filters)
return filters
def set_fields():
fields = [
'file_name',
'file_id',
'md5sum',
'file_size',
'state'
]
fields = ','.join(fields)
return fields
def set_params(filters,fields):
params = {
'filters': filters,
'fields': fields,
'format': 'TSV',
'size': '1500'
}
return params
def get_results(endpoint,params):
response = requests.get(endpoint, params=params)
object = io.StringIO(response.content.decode('utf-8'))
results = pd.read_table(object)
return results
def download_xml_uuid(files_res,dest):
file_count = 0
for uuid in files_res.id:
cmd = ' '.join(['gdc-client download',uuid,'-d',dest])
subprocess.call(cmd, shell=True)
print(' '.join([uuid,'downloaded']))
file_count = file_count + 1
print(' '.join([str(file_count),'files downloaded']))
def download_xml_manifest(files_res,dest):
select = ['file_id', 'file_name', 'md5sum', 'file_size', 'state']
manifest = files_res[select]
manifest.columns = ['id', 'filename', 'md5', 'size', 'state']
manifest = manifest.sort_values(by=['id'])
out_file = dest + 'manifest.tsv'
manifest.to_csv(out_file, sep='\t', index=False)
cmd = ' '.join(['gdc-client download','-m',out_file,'-d',dest])
subprocess.call(cmd, shell=True)
print('manifest downloaded')
def parse_xml(files_res,dest):
msi_dict = {}
msi_dict['subject_id'] = []
msi_dict['msi_status'] = []
tag1 = 'mononucleotide_and_dinucleotide_marker_panel_analysis_status'
tag2 = 'mononucleotide_marker_panel_analysis_status'
file_count = 0
for uuid in files_res.id:
pattern = dest + uuid + '/*.xml'
fn = glob.glob(pattern)[0]
tree = ET.parse(fn)
for elem in tree.getiterator():
if 'bcr_patient_barcode' in elem.tag:
subject_id = elem.text
if tag1 in elem.tag and elem.text != None:
msi_status = elem.text
elif tag2 in elem.tag and elem.text != None:
msi_status = elem.text
msi_dict['subject_id'].append(subject_id)
msi_dict['msi_status'].append(msi_status)
file_count = file_count + 1
print(' '.join([str(file_count),'files parsed']))
msi_res = pd.DataFrame.from_dict(msi_dict)
return msi_res
def check_outpath(out_path):
l = out_path.strip('/').split('/')
d = ''
for e in l:
d = d + '/' + e
if os.path.exists(d):
print(d,'present')
else:
print(d,'absent')
print('making',d,'now')
os.mkdir(d)
def main():
endpoint = 'https://api.gdc.cancer.gov/legacy/files/'
filters = set_filters()
fields = set_fields()
params = set_params(filters, fields)
files_res = get_results(endpoint, params)
dest = os.environ['ann_dir'] + 'tcga/msi/'
check_outpath(dest)
download_xml_manifest(files_res, dest)
msi_res = parse_xml(files_res, dest)
return msi_res
if __name__ == '__main__':
print('This script is not meant to be run as main. See usage statment:')
print('usage: snakemake setup_tcga')
else:
msi_res = main()
| true | true |
f72bba363e60dcda05c1e3254d05eaf5e83fbdac | 10,188 | py | Python | esse3api/esse3api.py | hpsc-smartlab/esse3api | 416c52149f28c886cab72671b20b209b40857edf | [
"MIT"
] | 2 | 2018-04-04T15:56:40.000Z | 2018-05-23T11:46:06.000Z | esse3api/esse3api.py | hpsc-smartlab/esse3api | 416c52149f28c886cab72671b20b209b40857edf | [
"MIT"
] | null | null | null | esse3api/esse3api.py | hpsc-smartlab/esse3api | 416c52149f28c886cab72671b20b209b40857edf | [
"MIT"
] | null | null | null |
import json, sys, re, urllib, urllib2, socket, json, pydoc, cgi, os, time, inspect
from hashlib import md5
from datetime import datetime
import time
import csv
from scraper import Scraper
from flask import Flask
from flask import Response
from flask import request
from flask import jsonify
from flask import current_app
from flask import make_response
from flask import session
from flask import url_for
from flask import redirect
from flask import render_template
from flask import abort
from flask import g
from flask import flash
from flask import _app_ctx_stack
from flask_restplus import Resource, Api
from flask_restplus import fields
from functools import wraps
from functools import update_wrapper
import logging
import traceback
log = logging.getLogger(__name__)
app = Flask(__name__)
api = Api(app)
app.config.from_object(__name__) # load config from this file , esse3api.py
# Load default config and override config from an environment variable
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
app.config.from_envvar('ESSE3API_SETTINGS', silent=True)
@api.errorhandler
def default_error_handler(e):
message = 'An unhandled exception occurred.'
log.exception(message)
if not settings.FLASK_DEBUG:
return {'message': message}, 500
#### CROSSDOMAIN DECORATOR ####
def crossdomain(origin=None, methods=None, headers=None, max_age=21600, attach_to_all=True, automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
h['Access-Control-Allow-Credentials'] = 'true'
h['Access-Control-Allow-Headers'] = "Origin, X-Requested-With, Content-Type, Accept, Authorization"
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
#### JSONP DECORATOR ####
def jsonp(func):
""" Wrap json as jsonp """
@wraps(func)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
parser = api.parser()
parser.add_argument('username', help='The username', location='form')
parser.add_argument('password', help='The passowrd', location='form')
@api.route('/dati_anagrafici')
class DatiAnagrafici(Resource):
@api.doc(parser=parser)
def post(self):
"""Recuper i dati anagrafici
:param: username: Nome utente
:param: password: Password
:example: /dati_anagrafici
:returns: json -- I dati personali
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
log.info(username)
s = Scraper(username, password)
return jsonify(s.dati_anagrafici())
@api.route('/login')
class Login(Resource):
@api.doc(parser=parser)
def post(self):
"""Permette il login al portale esse3
:param: username: Nome utente
:param: password: Password
:example: /login
:returns: json -- Risultato del login
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.login())
@api.route('/riepilogo_esami')
class RiepilogoEsami(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce il riepilogo degli esami effettuati dallo studente
:param: username: Nome utente
:param: password: Password
:example: /riepilogo_esami
:returns: json -- Lista degli esami sostenuti
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.riepilogo_esami())
@api.route('/residenza')
class Residenza(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce la residenza dello studente
:param: username: Nome utente
:param: password: Password
:example: /residenza
:returns: json -- Residenza dello studente
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.residenza())
@api.route('/domicilio')
class Domicilio(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce il domicilio dello studente
:param: username: Nome utente
:param: password: Password
:example: /domicilio
:returns: json -- Domicilio dello studente
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.domicilio())
@api.route('/libretto')
class Libretto(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce il libretto universitario dello studente
:param: username: Nome utente
:param: password: Password
:example: /libretto
:returns: json -- Libretto dello studente
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.libretto())
@api.route('/pagamenti')
class Pagamenti(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce i pagamenti effettuati dello studente
:param: username: Nome utente
:param: password: Password
:example: /pagamenti
:returns: json -- Pagamenti effettuati dallo studente
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.pagamenti())
@api.route('/prenotazioni_effettuate')
class PrenotazioniEffettuate(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce le prenotazioni alle sedute d'esame effettuati dello studente
:param: username: Nome utente
:param: password: Password
:example: /prenotazioni_effettuate
:returns: json -- Prenotazioni effettuate dallo studente
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.prenotazioni_effettuate())
@api.route('/piano')
class Piano(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce il piano di studio dello studente
:param: username: Nome utente
:param: password: Password
:example: /piano
:returns: json -- Lista degli esami sostenuti
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.piano())
@api.route('/pannello')
class Pannello(Resource):
@api.doc(parser=parser)
def post(self) :
"""Restituisce il pannello di controllo dello studente
:param: username: Nome utente
:param: password: Password
:example: /pannello
:returns: json -- Lista degli esami sostenuti
-------------------------------------------------------------------------------------------
"""
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.pannello_di_controllo())
if __name__ == '__main__':
app.debug = True
app.run()
| 31.060976 | 116 | 0.575775 |
import json, sys, re, urllib, urllib2, socket, json, pydoc, cgi, os, time, inspect
from hashlib import md5
from datetime import datetime
import time
import csv
from scraper import Scraper
from flask import Flask
from flask import Response
from flask import request
from flask import jsonify
from flask import current_app
from flask import make_response
from flask import session
from flask import url_for
from flask import redirect
from flask import render_template
from flask import abort
from flask import g
from flask import flash
from flask import _app_ctx_stack
from flask_restplus import Resource, Api
from flask_restplus import fields
from functools import wraps
from functools import update_wrapper
import logging
import traceback
log = logging.getLogger(__name__)
app = Flask(__name__)
api = Api(app)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
app.config.from_envvar('ESSE3API_SETTINGS', silent=True)
@api.errorhandler
def default_error_handler(e):
message = 'An unhandled exception occurred.'
log.exception(message)
if not settings.FLASK_DEBUG:
return {'message': message}, 500
matic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
h['Access-Control-Allow-Credentials'] = 'true'
h['Access-Control-Allow-Headers'] = "Origin, X-Requested-With, Content-Type, Accept, Authorization"
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
callback = request.args.get('callback', False)
if callback:
data = str(func(*args, **kwargs).data)
content = str(callback) + '(' + data + ')'
mimetype = 'application/javascript'
return current_app.response_class(content, mimetype=mimetype)
else:
return func(*args, **kwargs)
return decorated_function
parser = api.parser()
parser.add_argument('username', help='The username', location='form')
parser.add_argument('password', help='The passowrd', location='form')
@api.route('/dati_anagrafici')
class DatiAnagrafici(Resource):
@api.doc(parser=parser)
def post(self):
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
log.info(username)
s = Scraper(username, password)
return jsonify(s.dati_anagrafici())
@api.route('/login')
class Login(Resource):
@api.doc(parser=parser)
def post(self):
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.login())
@api.route('/riepilogo_esami')
class RiepilogoEsami(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.riepilogo_esami())
@api.route('/residenza')
class Residenza(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.residenza())
@api.route('/domicilio')
class Domicilio(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.domicilio())
@api.route('/libretto')
class Libretto(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.libretto())
@api.route('/pagamenti')
class Pagamenti(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.pagamenti())
@api.route('/prenotazioni_effettuate')
class PrenotazioniEffettuate(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.prenotazioni_effettuate())
@api.route('/piano')
class Piano(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.piano())
@api.route('/pannello')
class Pannello(Resource):
@api.doc(parser=parser)
def post(self) :
args = parser.parse_args(strict=True)
username = args['username']
password = args['password']
s = Scraper(username, password)
return jsonify(s.pannello_di_controllo())
if __name__ == '__main__':
app.debug = True
app.run()
| true | true |
f72bba8e3526c1ea6ffcb838cd2c3598c2a72e30 | 3,094 | py | Python | app/forms.py | credwood/bitplayers | 4ca6b6c6a21bb21d7cd963c64028415559c3dcc4 | [
"MIT"
] | 1 | 2020-06-26T21:49:14.000Z | 2020-06-26T21:49:14.000Z | app/forms.py | credwood/bitplayers | 4ca6b6c6a21bb21d7cd963c64028415559c3dcc4 | [
"MIT"
] | 2 | 2020-03-31T11:11:04.000Z | 2021-12-13T20:38:48.000Z | app/forms.py | credwood/bitplayers | 4ca6b6c6a21bb21d7cd963c64028415559c3dcc4 | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm, RecaptchaField
from wtforms import BooleanField, TextAreaField
from wtforms import PasswordField
from wtforms import StringField
from wtforms import SubmitField, TextField
from wtforms import Form, BooleanField, validators
from wtforms.validators import DataRequired, InputRequired, EqualTo, Length, Email, ValidationError
from wtforms.fields.html5 import EmailField
from wtf_tinymce.forms.fields import TinyMceField
from .models import Blog, User
class NewPost(FlaskForm):
blog_title = StringField('Title', validators=[DataRequired(message="All posts must have a title")])
blog_slug = StringField('Slug', validators=[DataRequired()])
blog_author = StringField('By', validators=[DataRequired()])
blog_content = TextAreaField(validators=[DataRequired()])
submit = SubmitField('submit', validators=[DataRequired()])
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
submit = SubmitField('Login')
remember_me = BooleanField('Remember Me')
class RequestResetForm(FlaskForm):
email = EmailField('Email address', [validators.DataRequired(), validators.Email()])
submit = SubmitField('Request Password Reset')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if not user:
raise ValidationError("No account registered with that email. ")
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
confirm = PasswordField('Confirm Password', validators=[DataRequired(),InputRequired(), EqualTo('password', message='Passwords must match')])
email = EmailField('Email address', [validators.DataRequired(), validators.Email()])
submit = SubmitField('Register')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError("That username is already taken. Please try another")
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError("There is an account associated with this email address already.")
class ResetPassword(FlaskForm):
password = PasswordField('Password', validators=[DataRequired()])
confirm = PasswordField('Confirm Password', validators=[DataRequired(),InputRequired(), EqualTo('password', message='Passwords must match')])
submit = SubmitField('Reset Password')
class ContactForm(FlaskForm):
"""Contact form."""
name = StringField('Name', [
DataRequired()])
email = StringField('Email', [
Email(message=('Not a valid email address.')),
DataRequired()])
body = TextField('Message', [
DataRequired(),
Length(min=4, message=('Your message is too short.'))])
#recaptcha = RecaptchaField()
submit = SubmitField('Submit')
| 44.84058 | 145 | 0.72075 | from flask_wtf import FlaskForm, RecaptchaField
from wtforms import BooleanField, TextAreaField
from wtforms import PasswordField
from wtforms import StringField
from wtforms import SubmitField, TextField
from wtforms import Form, BooleanField, validators
from wtforms.validators import DataRequired, InputRequired, EqualTo, Length, Email, ValidationError
from wtforms.fields.html5 import EmailField
from wtf_tinymce.forms.fields import TinyMceField
from .models import Blog, User
class NewPost(FlaskForm):
blog_title = StringField('Title', validators=[DataRequired(message="All posts must have a title")])
blog_slug = StringField('Slug', validators=[DataRequired()])
blog_author = StringField('By', validators=[DataRequired()])
blog_content = TextAreaField(validators=[DataRequired()])
submit = SubmitField('submit', validators=[DataRequired()])
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
submit = SubmitField('Login')
remember_me = BooleanField('Remember Me')
class RequestResetForm(FlaskForm):
email = EmailField('Email address', [validators.DataRequired(), validators.Email()])
submit = SubmitField('Request Password Reset')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if not user:
raise ValidationError("No account registered with that email. ")
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
confirm = PasswordField('Confirm Password', validators=[DataRequired(),InputRequired(), EqualTo('password', message='Passwords must match')])
email = EmailField('Email address', [validators.DataRequired(), validators.Email()])
submit = SubmitField('Register')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError("That username is already taken. Please try another")
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError("There is an account associated with this email address already.")
class ResetPassword(FlaskForm):
password = PasswordField('Password', validators=[DataRequired()])
confirm = PasswordField('Confirm Password', validators=[DataRequired(),InputRequired(), EqualTo('password', message='Passwords must match')])
submit = SubmitField('Reset Password')
class ContactForm(FlaskForm):
name = StringField('Name', [
DataRequired()])
email = StringField('Email', [
Email(message=('Not a valid email address.')),
DataRequired()])
body = TextField('Message', [
DataRequired(),
Length(min=4, message=('Your message is too short.'))])
submit = SubmitField('Submit')
| true | true |
f72bba9d57d9b9c86ad129995eaf5cd82c8bbdc8 | 5,764 | py | Python | test/integration/ggrc/proposal/test_proposal_email.py | MikalaiMikalalai/ggrc-core | f0f83b3638574bb64de474f3b70ed27436ca812a | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2019-01-12T23:46:00.000Z | 2019-01-12T23:46:00.000Z | test/integration/ggrc/proposal/test_proposal_email.py | MikalaiMikalalai/ggrc-core | f0f83b3638574bb64de474f3b70ed27436ca812a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | test/integration/ggrc/proposal/test_proposal_email.py | MikalaiMikalalai/ggrc-core | f0f83b3638574bb64de474f3b70ed27436ca812a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright (C) 2020 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""This module contains test about sending emails for proposals."""
import ddt
import mock
from ggrc.notifications import fast_digest
from integration.ggrc import TestCase
from integration.ggrc.api_helper import Api
from integration.ggrc.models import factories
@ddt.ddt
class TestProposalEmail(TestCase):
"""Test case about email sending and email presenting for proposals."""
def setUp(self):
super(TestProposalEmail, self).setUp()
self.api = Api()
self.client.get("/login")
@ddt.data(True, False)
def test_email_presentation(self, is_admin):
"""Test presentation of proposal digest email if is_admin is {0}."""
person = factories.PersonFactory()
self.api.set_user(person=person)
with mock.patch("ggrc.rbac.permissions.is_admin", return_value=is_admin):
resp = self.client.get("/_notifications/show_fast_digest")
if is_admin:
self.assert200(resp)
else:
self.assert403(resp)
def test_email_sending(self):
"""Test sending emails about proposals."""
role_1 = factories.AccessControlRoleFactory(object_type="Program",
notify_about_proposal=True)
role_2 = factories.AccessControlRoleFactory(object_type="Program",
notify_about_proposal=True)
role_3 = factories.AccessControlRoleFactory(object_type="Program",
notify_about_proposal=False)
with factories.single_commit():
program = factories.ProgramFactory()
person_1 = factories.PersonFactory() # has 1 role
person_2 = factories.PersonFactory() # has no roles
person_3 = factories.PersonFactory() # has 2 roles
factories.PersonFactory() # not related to program at all
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_1],
person=person_1
)
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_1],
person=person_3
)
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_2],
person=person_3
)
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_3],
person=person_2
)
proposal_1 = factories.ProposalFactory(
instance=program,
content={
"fields": {"title": "a"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 1")
proposal_2 = factories.ProposalFactory(
instance=program,
content={
"fields": {"title": "b"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 2")
self.assertIsNone(proposal_1.proposed_notified_datetime)
self.assertIsNone(proposal_2.proposed_notified_datetime)
with mock.patch("ggrc.notifications.common.send_email") as send_email_mock:
with mock.patch.object(fast_digest.DIGEST_TMPL,
"render") as bodybuilder_mock:
fast_digest.send_notification()
self.assertIsNotNone(proposal_1.proposed_notified_datetime)
self.assertIsNotNone(proposal_2.proposed_notified_datetime)
self.assertEqual(2, len(bodybuilder_mock.call_args_list))
self.assertEqual(2, len(send_email_mock.call_args_list))
# email to each required person
self.assertListEqual(
sorted([person_1.email, person_3.email]),
sorted([a[1]["user_email"] for a in send_email_mock.call_args_list]))
# no matter how many roles each proposal should be otified
# only once for that person
self.assertListEqual(
[2] * 2,
[len(a[1]["proposals"]) for a in bodybuilder_mock.call_args_list])
@ddt.data(
'Program Managers',
'Program Editors',
'Primary Contacts'
)
def test_email_proposal_program(self, role_name):
"""Test sending email to Program manager/Editor/Primary Contacts"""
from ggrc.models import all_models
role_1 = all_models.AccessControlRole.query.filter(
all_models.AccessControlRole.name == role_name,
all_models.AccessControlRole.object_type == 'Program',
).one()
with factories.single_commit():
program = factories.ProgramFactory()
person_1 = factories.PersonFactory() # has 1 role
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_1],
person=person_1
)
proposal_1 = factories.ProposalFactory(
instance=program,
content={
"fields": {"title": "a"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 1")
self.assertIsNone(proposal_1.proposed_notified_datetime)
with mock.patch("ggrc.notifications.common.send_email") as send_email_mock:
with mock.patch.object(fast_digest.DIGEST_TMPL,
"render") as bodybuilder_mock:
fast_digest.send_notification()
self.assertIsNotNone(proposal_1.proposed_notified_datetime)
self.assertEqual(1, len(bodybuilder_mock.call_args_list))
self.assertEqual(1, len(send_email_mock.call_args_list))
# email to each required person
self.assertEqual(
[person_1.email],
[a[1]["user_email"] for a in send_email_mock.call_args_list])
| 39.479452 | 79 | 0.653539 |
import ddt
import mock
from ggrc.notifications import fast_digest
from integration.ggrc import TestCase
from integration.ggrc.api_helper import Api
from integration.ggrc.models import factories
@ddt.ddt
class TestProposalEmail(TestCase):
def setUp(self):
super(TestProposalEmail, self).setUp()
self.api = Api()
self.client.get("/login")
@ddt.data(True, False)
def test_email_presentation(self, is_admin):
person = factories.PersonFactory()
self.api.set_user(person=person)
with mock.patch("ggrc.rbac.permissions.is_admin", return_value=is_admin):
resp = self.client.get("/_notifications/show_fast_digest")
if is_admin:
self.assert200(resp)
else:
self.assert403(resp)
def test_email_sending(self):
role_1 = factories.AccessControlRoleFactory(object_type="Program",
notify_about_proposal=True)
role_2 = factories.AccessControlRoleFactory(object_type="Program",
notify_about_proposal=True)
role_3 = factories.AccessControlRoleFactory(object_type="Program",
notify_about_proposal=False)
with factories.single_commit():
program = factories.ProgramFactory()
person_1 = factories.PersonFactory()
person_2 = factories.PersonFactory()
person_3 = factories.PersonFactory()
factories.PersonFactory()
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_1],
person=person_1
)
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_1],
person=person_3
)
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_2],
person=person_3
)
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_3],
person=person_2
)
proposal_1 = factories.ProposalFactory(
instance=program,
content={
"fields": {"title": "a"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 1")
proposal_2 = factories.ProposalFactory(
instance=program,
content={
"fields": {"title": "b"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 2")
self.assertIsNone(proposal_1.proposed_notified_datetime)
self.assertIsNone(proposal_2.proposed_notified_datetime)
with mock.patch("ggrc.notifications.common.send_email") as send_email_mock:
with mock.patch.object(fast_digest.DIGEST_TMPL,
"render") as bodybuilder_mock:
fast_digest.send_notification()
self.assertIsNotNone(proposal_1.proposed_notified_datetime)
self.assertIsNotNone(proposal_2.proposed_notified_datetime)
self.assertEqual(2, len(bodybuilder_mock.call_args_list))
self.assertEqual(2, len(send_email_mock.call_args_list))
self.assertListEqual(
sorted([person_1.email, person_3.email]),
sorted([a[1]["user_email"] for a in send_email_mock.call_args_list]))
self.assertListEqual(
[2] * 2,
[len(a[1]["proposals"]) for a in bodybuilder_mock.call_args_list])
@ddt.data(
'Program Managers',
'Program Editors',
'Primary Contacts'
)
def test_email_proposal_program(self, role_name):
from ggrc.models import all_models
role_1 = all_models.AccessControlRole.query.filter(
all_models.AccessControlRole.name == role_name,
all_models.AccessControlRole.object_type == 'Program',
).one()
with factories.single_commit():
program = factories.ProgramFactory()
person_1 = factories.PersonFactory()
factories.AccessControlPersonFactory(
ac_list=program.acr_acl_map[role_1],
person=person_1
)
proposal_1 = factories.ProposalFactory(
instance=program,
content={
"fields": {"title": "a"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 1")
self.assertIsNone(proposal_1.proposed_notified_datetime)
with mock.patch("ggrc.notifications.common.send_email") as send_email_mock:
with mock.patch.object(fast_digest.DIGEST_TMPL,
"render") as bodybuilder_mock:
fast_digest.send_notification()
self.assertIsNotNone(proposal_1.proposed_notified_datetime)
self.assertEqual(1, len(bodybuilder_mock.call_args_list))
self.assertEqual(1, len(send_email_mock.call_args_list))
self.assertEqual(
[person_1.email],
[a[1]["user_email"] for a in send_email_mock.call_args_list])
| true | true |
f72bbb6109eb7cd126564a721c3685fff15dd47b | 1,919 | py | Python | pyretrommo/gen/player_stats.py | snwhd/pyretrommo | fabb523d9b4385ed8a1ff0b2ac787cc5d88a23b7 | [
"blessing"
] | 1 | 2021-11-25T09:33:30.000Z | 2021-11-25T09:33:30.000Z | pyretrommo/gen/player_stats.py | snwhd/pyretrommo | fabb523d9b4385ed8a1ff0b2ac787cc5d88a23b7 | [
"blessing"
] | null | null | null | pyretrommo/gen/player_stats.py | snwhd/pyretrommo | fabb523d9b4385ed8a1ff0b2ac787cc5d88a23b7 | [
"blessing"
] | null | null | null | #!/usr/bin/env python3
# this file is auto-generated by gen_from_wiki.py
from __future__ import annotations
from .player_class import PlayerClass
from ..stats import Stats
STATS_BY_PLAYER_CLASS = {
PlayerClass.Cleric: [
Stats(*(0, 0, 0, 0, 0, 0, 0, 0)),
Stats(*(17, 11, 8, 9, 12, 12, 10, 11)),
Stats(*(23, 15, 9, 11, 15, 14, 12, 13)),
Stats(*(29, 19, 11, 12, 17, 16, 14, 16)),
Stats(*(35, 23, 12, 14, 20, 19, 16, 18)),
Stats(*(40, 26, 14, 16, 22, 21, 18, 20)),
Stats(*(46, 30, 16, 18, 25, 23, 20, 22)),
Stats(*(52, 34, 17, 20, 27, 26, 22, 24)),
Stats(*(58, 38, 19, 21, 30, 28, 23, 27)),
Stats(*(63, 41, 20, 23, 32, 30, 25, 29)),
Stats(*(69, 45, 22, 25, 35, 33, 27, 31)),
],
PlayerClass.Warrior: [
Stats(*(0, 0, 0, 0, 0, 0, 0, 0)),
Stats(*(17, 11, 8, 9, 12, 12, 10, 11)),
Stats(*(23, 15, 9, 11, 15, 14, 12, 13)),
Stats(*(29, 19, 11, 12, 17, 16, 14, 16)),
Stats(*(35, 23, 12, 14, 20, 19, 16, 18)),
Stats(*(40, 26, 14, 16, 22, 21, 18, 20)),
Stats(*(46, 30, 16, 18, 25, 23, 20, 22)),
Stats(*(52, 34, 17, 20, 27, 26, 22, 24)),
Stats(*(58, 38, 19, 21, 30, 28, 23, 27)),
Stats(*(63, 41, 20, 23, 32, 30, 25, 29)),
Stats(*(69, 45, 22, 25, 35, 33, 27, 31)),
],
PlayerClass.Wizard: [
Stats(*(0, 0, 0, 0, 0, 0, 0, 0)),
Stats(*(17, 11, 8, 9, 12, 12, 10, 11)),
Stats(*(23, 15, 9, 11, 15, 14, 12, 13)),
Stats(*(29, 19, 11, 12, 17, 16, 14, 16)),
Stats(*(35, 23, 12, 14, 20, 19, 16, 18)),
Stats(*(40, 26, 14, 16, 22, 21, 18, 20)),
Stats(*(46, 30, 16, 18, 25, 23, 20, 22)),
Stats(*(52, 34, 17, 20, 27, 26, 22, 24)),
Stats(*(58, 38, 19, 21, 30, 28, 23, 27)),
Stats(*(63, 41, 20, 23, 32, 30, 25, 29)),
Stats(*(69, 45, 22, 25, 35, 33, 27, 31)),
],
}
| 39.163265 | 49 | 0.454924 |
from __future__ import annotations
from .player_class import PlayerClass
from ..stats import Stats
STATS_BY_PLAYER_CLASS = {
PlayerClass.Cleric: [
Stats(*(0, 0, 0, 0, 0, 0, 0, 0)),
Stats(*(17, 11, 8, 9, 12, 12, 10, 11)),
Stats(*(23, 15, 9, 11, 15, 14, 12, 13)),
Stats(*(29, 19, 11, 12, 17, 16, 14, 16)),
Stats(*(35, 23, 12, 14, 20, 19, 16, 18)),
Stats(*(40, 26, 14, 16, 22, 21, 18, 20)),
Stats(*(46, 30, 16, 18, 25, 23, 20, 22)),
Stats(*(52, 34, 17, 20, 27, 26, 22, 24)),
Stats(*(58, 38, 19, 21, 30, 28, 23, 27)),
Stats(*(63, 41, 20, 23, 32, 30, 25, 29)),
Stats(*(69, 45, 22, 25, 35, 33, 27, 31)),
],
PlayerClass.Warrior: [
Stats(*(0, 0, 0, 0, 0, 0, 0, 0)),
Stats(*(17, 11, 8, 9, 12, 12, 10, 11)),
Stats(*(23, 15, 9, 11, 15, 14, 12, 13)),
Stats(*(29, 19, 11, 12, 17, 16, 14, 16)),
Stats(*(35, 23, 12, 14, 20, 19, 16, 18)),
Stats(*(40, 26, 14, 16, 22, 21, 18, 20)),
Stats(*(46, 30, 16, 18, 25, 23, 20, 22)),
Stats(*(52, 34, 17, 20, 27, 26, 22, 24)),
Stats(*(58, 38, 19, 21, 30, 28, 23, 27)),
Stats(*(63, 41, 20, 23, 32, 30, 25, 29)),
Stats(*(69, 45, 22, 25, 35, 33, 27, 31)),
],
PlayerClass.Wizard: [
Stats(*(0, 0, 0, 0, 0, 0, 0, 0)),
Stats(*(17, 11, 8, 9, 12, 12, 10, 11)),
Stats(*(23, 15, 9, 11, 15, 14, 12, 13)),
Stats(*(29, 19, 11, 12, 17, 16, 14, 16)),
Stats(*(35, 23, 12, 14, 20, 19, 16, 18)),
Stats(*(40, 26, 14, 16, 22, 21, 18, 20)),
Stats(*(46, 30, 16, 18, 25, 23, 20, 22)),
Stats(*(52, 34, 17, 20, 27, 26, 22, 24)),
Stats(*(58, 38, 19, 21, 30, 28, 23, 27)),
Stats(*(63, 41, 20, 23, 32, 30, 25, 29)),
Stats(*(69, 45, 22, 25, 35, 33, 27, 31)),
],
}
| true | true |
f72bbba8b340d6dadd4bd602a0a79cc67d8d633b | 236 | py | Python | src/silicium/scene.py | PH-KDX/silicium | 813e8719a4ba381691d3d1b11ea5738bb2ee2d36 | [
"MIT"
] | 2 | 2021-12-12T12:06:46.000Z | 2021-12-12T12:21:18.000Z | src/silicium/scene.py | PH-KDX/silicium | 813e8719a4ba381691d3d1b11ea5738bb2ee2d36 | [
"MIT"
] | 1 | 2021-12-12T12:21:43.000Z | 2021-12-12T22:49:46.000Z | src/silicium/scene.py | PH-KDX/silicium | 813e8719a4ba381691d3d1b11ea5738bb2ee2d36 | [
"MIT"
] | 2 | 2021-12-12T15:13:54.000Z | 2021-12-21T09:08:42.000Z | from dataclasses import dataclass
from abc import ABC, abstractmethod
from .builder import AbstractBuilder
@dataclass
class AbstractScene(ABC):
builder: AbstractBuilder
@abstractmethod
def run(self) -> None:
...
| 16.857143 | 36 | 0.728814 | from dataclasses import dataclass
from abc import ABC, abstractmethod
from .builder import AbstractBuilder
@dataclass
class AbstractScene(ABC):
builder: AbstractBuilder
@abstractmethod
def run(self) -> None:
...
| true | true |
f72bbc0a917dfbe976b673ecf7b7fb9a01ec6ce3 | 1,391 | py | Python | app/user/serializers.py | faridos/my_delivery_app_django | 87b487a064b190bfb8e71ba40e6edb9ebc9bd817 | [
"MIT"
] | null | null | null | app/user/serializers.py | faridos/my_delivery_app_django | 87b487a064b190bfb8e71ba40e6edb9ebc9bd817 | [
"MIT"
] | null | null | null | app/user/serializers.py | faridos/my_delivery_app_django | 87b487a064b190bfb8e71ba40e6edb9ebc9bd817 | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model, authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
"""Serializer for the users object"""
class Meta:
model = get_user_model()
fields = ('email', 'password', 'name')
extra_kwargs = {'password': {'write_only': True, 'min_length': 5}}
def create(self, validated_data):
"""Create a new user with encrypted password and return it"""
return get_user_model().objects.create_user(**validated_data)
class AuthTokenSerializer(serializers.Serializer):
"""Serializer for the user authentication object"""
email = serializers.CharField()
password = serializers.CharField(
style={'input_type': 'password'},
trim_whitespace=False
)
def validate(self, attrs):
"""Validate and authenticate the user"""
email = attrs.get('email')
password = attrs.get('password')
user = authenticate(
request=self.context.get('request'),
username=email,
password=password
)
if not user:
msg = _('Unable to authenticate with provided credentials')
raise serializers.ValidationError(msg, code='authorization')
attrs['user'] = user
return attrs | 33.119048 | 74 | 0.65133 | from django.contrib.auth import get_user_model, authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = get_user_model()
fields = ('email', 'password', 'name')
extra_kwargs = {'password': {'write_only': True, 'min_length': 5}}
def create(self, validated_data):
return get_user_model().objects.create_user(**validated_data)
class AuthTokenSerializer(serializers.Serializer):
email = serializers.CharField()
password = serializers.CharField(
style={'input_type': 'password'},
trim_whitespace=False
)
def validate(self, attrs):
email = attrs.get('email')
password = attrs.get('password')
user = authenticate(
request=self.context.get('request'),
username=email,
password=password
)
if not user:
msg = _('Unable to authenticate with provided credentials')
raise serializers.ValidationError(msg, code='authorization')
attrs['user'] = user
return attrs | true | true |
f72bbc4cec7a7d7ee13488d5aad10549c5740f87 | 171 | py | Python | tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingAverage_Seasonal_MonthOfYear_ARX.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingAverage_Seasonal_MonthOfYear_ARX.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingAverage_Seasonal_MonthOfYear_ARX.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['MovingAverage'] , ['Seasonal_MonthOfYear'] , ['ARX'] ); | 42.75 | 93 | 0.766082 | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['MovingAverage'] , ['Seasonal_MonthOfYear'] , ['ARX'] ); | true | true |
f72bbca3b2e8a2f4ec524e851e80d4cf38f9ab9d | 4,759 | py | Python | David and Pooja/++Validating Linked Mods/Python-3.0/Lib/lib2to3/pgen2/driver.py | LinkedModernismProject/web_code | 4cf6bf53d5c3249e52a75f0a3f57d106e31daf9e | [
"Apache-2.0"
] | 1 | 2015-05-21T23:47:54.000Z | 2015-05-21T23:47:54.000Z | front-end/testsuite-python-lib/Python-3.0/Lib/lib2to3/pgen2/driver.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2015-10-29T20:51:31.000Z | 2015-10-29T20:51:31.000Z | front-end/testsuite-python-lib/Python-3.0/Lib/lib2to3/pgen2/driver.py | MalloyPower/parsing-python | b2bca5eed07ea2af7a2001cd4f63becdfb0570be | [
"MIT"
] | 1 | 2019-04-11T11:27:01.000Z | 2019-04-11T11:27:01.000Z | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
# Modifications:
# Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Parser driver.
This provides a high-level interface to parse a file into a syntax tree.
"""
__author__ = "Guido van Rossum <guido@python.org>"
__all__ = ["Driver", "load_grammar"]
# Python imports
import os
import logging
import sys
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
class Driver(object):
def __init__(self, grammar, convert=None, logger=None):
self.grammar = grammar
if logger is None:
logger = logging.getLogger()
self.logger = logger
self.convert = convert
def parse_tokens(self, tokens, debug=False):
"""Parse a series of tokens and return the syntax tree."""
# XXX Move the prefix computation into a wrapper around tokenize.
p = parse.Parser(self.grammar, self.convert)
p.setup()
lineno = 1
column = 0
type = value = start = end = line_text = None
prefix = ""
for quintuple in tokens:
type, value, start, end, line_text = quintuple
if start != (lineno, column):
assert (lineno, column) <= start, ((lineno, column), start)
s_lineno, s_column = start
if lineno < s_lineno:
prefix += "\n" * (s_lineno - lineno)
lineno = s_lineno
column = 0
if column < s_column:
prefix += line_text[column:s_column]
column = s_column
if type in (tokenize.COMMENT, tokenize.NL):
prefix += value
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
continue
if type == token.OP:
type = grammar.opmap[value]
if debug:
self.logger.debug("%s %r (prefix=%r)",
token.tok_name[type], value, prefix)
if p.addtoken(type, value, (prefix, start)):
if debug:
self.logger.debug("Stop.")
break
prefix = ""
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
else:
# We never broke out -- EOF is too soon (how can this happen???)
raise parse.ParseError("incomplete input", t, v, x)
return p.rootnode
def parse_stream_raw(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
tokens = tokenize.generate_tokens(stream.readline)
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
"""Parse a stream and return the syntax tree."""
return self.parse_stream_raw(stream, debug)
def parse_file(self, filename, debug=False):
"""Parse a file and return the syntax tree."""
stream = open(filename)
try:
return self.parse_stream(stream, debug)
finally:
stream.close()
def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(generate_lines(text).__next__)
return self.parse_tokens(tokens, debug)
def generate_lines(text):
"""Generator that behaves like readline without using StringIO."""
for line in text.splitlines(True):
yield line
while True:
yield ""
def load_grammar(gt="Grammar.txt", gp=None,
save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle)."""
if logger is None:
logger = logging.getLogger()
if gp is None:
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
g = pgen.generate_grammar(gt)
if save:
logger.info("Writing grammar tables to %s", gp)
try:
g.dump(gp)
except IOError as e:
logger.info("Writing failed:"+str(e))
else:
g = grammar.Grammar()
g.load(gp)
return g
def _newer(a, b):
"""Inquire whether file a was written since file b."""
if not os.path.exists(a):
return False
if not os.path.exists(b):
return True
return os.path.getmtime(a) >= os.path.getmtime(b)
| 32.59589 | 76 | 0.562093 |
__author__ = "Guido van Rossum <guido@python.org>"
__all__ = ["Driver", "load_grammar"]
import os
import logging
import sys
from . import grammar, parse, token, tokenize, pgen
class Driver(object):
def __init__(self, grammar, convert=None, logger=None):
self.grammar = grammar
if logger is None:
logger = logging.getLogger()
self.logger = logger
self.convert = convert
def parse_tokens(self, tokens, debug=False):
p = parse.Parser(self.grammar, self.convert)
p.setup()
lineno = 1
column = 0
type = value = start = end = line_text = None
prefix = ""
for quintuple in tokens:
type, value, start, end, line_text = quintuple
if start != (lineno, column):
assert (lineno, column) <= start, ((lineno, column), start)
s_lineno, s_column = start
if lineno < s_lineno:
prefix += "\n" * (s_lineno - lineno)
lineno = s_lineno
column = 0
if column < s_column:
prefix += line_text[column:s_column]
column = s_column
if type in (tokenize.COMMENT, tokenize.NL):
prefix += value
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
continue
if type == token.OP:
type = grammar.opmap[value]
if debug:
self.logger.debug("%s %r (prefix=%r)",
token.tok_name[type], value, prefix)
if p.addtoken(type, value, (prefix, start)):
if debug:
self.logger.debug("Stop.")
break
prefix = ""
lineno, column = end
if value.endswith("\n"):
lineno += 1
column = 0
else:
raise parse.ParseError("incomplete input", t, v, x)
return p.rootnode
def parse_stream_raw(self, stream, debug=False):
tokens = tokenize.generate_tokens(stream.readline)
return self.parse_tokens(tokens, debug)
def parse_stream(self, stream, debug=False):
return self.parse_stream_raw(stream, debug)
def parse_file(self, filename, debug=False):
stream = open(filename)
try:
return self.parse_stream(stream, debug)
finally:
stream.close()
def parse_string(self, text, debug=False):
tokens = tokenize.generate_tokens(generate_lines(text).__next__)
return self.parse_tokens(tokens, debug)
def generate_lines(text):
for line in text.splitlines(True):
yield line
while True:
yield ""
def load_grammar(gt="Grammar.txt", gp=None,
save=True, force=False, logger=None):
if logger is None:
logger = logging.getLogger()
if gp is None:
head, tail = os.path.splitext(gt)
if tail == ".txt":
tail = ""
gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
g = pgen.generate_grammar(gt)
if save:
logger.info("Writing grammar tables to %s", gp)
try:
g.dump(gp)
except IOError as e:
logger.info("Writing failed:"+str(e))
else:
g = grammar.Grammar()
g.load(gp)
return g
def _newer(a, b):
if not os.path.exists(a):
return False
if not os.path.exists(b):
return True
return os.path.getmtime(a) >= os.path.getmtime(b)
| true | true |
f72bbd8a0db5fae809ca9c1b2443d5383f6ae83e | 5,635 | py | Python | sdk/python/pulumi_azure_native/devtestlab/v20180915/list_service_fabric_applicable_schedules.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/devtestlab/v20180915/list_service_fabric_applicable_schedules.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/devtestlab/v20180915/list_service_fabric_applicable_schedules.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'ListServiceFabricApplicableSchedulesResult',
'AwaitableListServiceFabricApplicableSchedulesResult',
'list_service_fabric_applicable_schedules',
]
@pulumi.output_type
class ListServiceFabricApplicableSchedulesResult:
"""
Schedules applicable to a virtual machine. The schedules may have been defined on a VM or on lab level.
"""
def __init__(__self__, id=None, lab_vms_shutdown=None, lab_vms_startup=None, location=None, name=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if lab_vms_shutdown and not isinstance(lab_vms_shutdown, dict):
raise TypeError("Expected argument 'lab_vms_shutdown' to be a dict")
pulumi.set(__self__, "lab_vms_shutdown", lab_vms_shutdown)
if lab_vms_startup and not isinstance(lab_vms_startup, dict):
raise TypeError("Expected argument 'lab_vms_startup' to be a dict")
pulumi.set(__self__, "lab_vms_startup", lab_vms_startup)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The identifier of the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="labVmsShutdown")
def lab_vms_shutdown(self) -> Optional['outputs.ScheduleResponse']:
"""
The auto-shutdown schedule, if one has been set at the lab or lab resource level.
"""
return pulumi.get(self, "lab_vms_shutdown")
@property
@pulumi.getter(name="labVmsStartup")
def lab_vms_startup(self) -> Optional['outputs.ScheduleResponse']:
"""
The auto-startup schedule, if one has been set at the lab or lab resource level.
"""
return pulumi.get(self, "lab_vms_startup")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableListServiceFabricApplicableSchedulesResult(ListServiceFabricApplicableSchedulesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListServiceFabricApplicableSchedulesResult(
id=self.id,
lab_vms_shutdown=self.lab_vms_shutdown,
lab_vms_startup=self.lab_vms_startup,
location=self.location,
name=self.name,
tags=self.tags,
type=self.type)
def list_service_fabric_applicable_schedules(lab_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
user_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListServiceFabricApplicableSchedulesResult:
"""
Schedules applicable to a virtual machine. The schedules may have been defined on a VM or on lab level.
:param str lab_name: The name of the lab.
:param str name: The name of the service fabric.
:param str resource_group_name: The name of the resource group.
:param str user_name: The name of the user profile.
"""
__args__ = dict()
__args__['labName'] = lab_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['userName'] = user_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:devtestlab/v20180915:listServiceFabricApplicableSchedules', __args__, opts=opts, typ=ListServiceFabricApplicableSchedulesResult).value
return AwaitableListServiceFabricApplicableSchedulesResult(
id=__ret__.id,
lab_vms_shutdown=__ret__.lab_vms_shutdown,
lab_vms_startup=__ret__.lab_vms_startup,
location=__ret__.location,
name=__ret__.name,
tags=__ret__.tags,
type=__ret__.type)
| 37.317881 | 184 | 0.644898 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'ListServiceFabricApplicableSchedulesResult',
'AwaitableListServiceFabricApplicableSchedulesResult',
'list_service_fabric_applicable_schedules',
]
@pulumi.output_type
class ListServiceFabricApplicableSchedulesResult:
def __init__(__self__, id=None, lab_vms_shutdown=None, lab_vms_startup=None, location=None, name=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if lab_vms_shutdown and not isinstance(lab_vms_shutdown, dict):
raise TypeError("Expected argument 'lab_vms_shutdown' to be a dict")
pulumi.set(__self__, "lab_vms_shutdown", lab_vms_shutdown)
if lab_vms_startup and not isinstance(lab_vms_startup, dict):
raise TypeError("Expected argument 'lab_vms_startup' to be a dict")
pulumi.set(__self__, "lab_vms_startup", lab_vms_startup)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="labVmsShutdown")
def lab_vms_shutdown(self) -> Optional['outputs.ScheduleResponse']:
return pulumi.get(self, "lab_vms_shutdown")
@property
@pulumi.getter(name="labVmsStartup")
def lab_vms_startup(self) -> Optional['outputs.ScheduleResponse']:
return pulumi.get(self, "lab_vms_startup")
@property
@pulumi.getter
def location(self) -> Optional[str]:
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableListServiceFabricApplicableSchedulesResult(ListServiceFabricApplicableSchedulesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListServiceFabricApplicableSchedulesResult(
id=self.id,
lab_vms_shutdown=self.lab_vms_shutdown,
lab_vms_startup=self.lab_vms_startup,
location=self.location,
name=self.name,
tags=self.tags,
type=self.type)
def list_service_fabric_applicable_schedules(lab_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
user_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListServiceFabricApplicableSchedulesResult:
__args__ = dict()
__args__['labName'] = lab_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['userName'] = user_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:devtestlab/v20180915:listServiceFabricApplicableSchedules', __args__, opts=opts, typ=ListServiceFabricApplicableSchedulesResult).value
return AwaitableListServiceFabricApplicableSchedulesResult(
id=__ret__.id,
lab_vms_shutdown=__ret__.lab_vms_shutdown,
lab_vms_startup=__ret__.lab_vms_startup,
location=__ret__.location,
name=__ret__.name,
tags=__ret__.tags,
type=__ret__.type)
| true | true |
f72bbedeed16b94339a910792bc18059766dde94 | 5,337 | py | Python | alacrity/core.py | OLeoghain/alacrity | d49453f5f998dbaf36006fe7ec6c6753cd5ef4c1 | [
"MIT"
] | null | null | null | alacrity/core.py | OLeoghain/alacrity | d49453f5f998dbaf36006fe7ec6c6753cd5ef4c1 | [
"MIT"
] | null | null | null | alacrity/core.py | OLeoghain/alacrity | d49453f5f998dbaf36006fe7ec6c6753cd5ef4c1 | [
"MIT"
] | null | null | null | #!/bin/python
import importlib
import logging
import os
import sys
import argparse
import shutil
from clint.textui import colored
try:
from alacrity import lib
except ImportError:
lib = importlib.import_module('lib', '../alacrity')
def main():
"""
Entry point for the package, alacrity.exe in win and alacrity in linux
:return: None
"""
# Start the process
try:
from alacrity import version
except ImportError:
version = importlib.import_module('version', '../alacrity')
# Get version information from version.py
v = version.version()
parser = argparse.ArgumentParser(description="Alacrity : "
"Quickstart your Python "
"package from a terminal")
parser.add_argument('--make', action='store_true', help="Rebuild "
"persistence")
parser.add_argument('--debug', action='store_true', help="Display verbose "
"debug messages")
parser.add_argument('--version', action="version", version=v)
parser.add_argument('package_name')
args = parser.parse_args()
if args.make:
lib.rebuild_persistence()
if not args.package_name:
logging.error(" package_name is a required argument")
sys.exit()
# Initialize logging depending on debug mode
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.CRITICAL)
# Initialise status dictionary
status = {
'structure_created': False,
'gitignore_created': False,
'setup_created': False,
'license_created': False,
'manifest_created': False,
'readme_created': False,
'requirements_created': False,
'tests_created': False,
'git_initialized': False,
'venv_created': False,
'sphinx_created': False
}
try:
try:
package_name = args.package_name
# Check if the package already exists
logging.debug("[-] Checking if the package already exists")
check_is_file = os.path.isfile(
"{0}/{0}/__init__.py".format(package_name))
# Check for clean_make
if os.path.isdir(package_name) or check_is_file:
logging.debug("[-] Package already exists, "
"launching clean make prompt")
print(colored.red("[!] A package by that name already exists, "
"destroy and clean make? (y/n) : "), end="")
choice = input()
logging.debug("[-] Choice prompt input : {}".format(choice))
if choice == 'y':
logging.debug("[-] Removing existing package")
lib.remove_package(package_name)
elif choice == 'n':
logging.debug("[-] Clean make cancelled")
print(colored.red("[!] Please pick a different package "
"name, aborting."))
sys.exit()
else:
logging.error(colored.red(" Invalid choice"))
print(colored.red("[!] Invalid choice, aborting"))
sys.exit()
# Create the initial structure
logging.debug("[-] Creating package structure")
lib.create_package_structure(package_name, status)
# Create starter files
logging.debug("[-] Creating starter files in package")
author, version = lib.create_starter_files(package_name, status)
# Create tests directory
logging.debug("[-] Creating tests package in structure")
lib.create_tests_package(package_name, status)
# Initialize git if required and available
logging.debug("[-] Launching git init submodule")
lib.git_init(package_name, status)
# Initialize venv if required and available
logging.debug("[-] Launching venv init submodule")
lib.venv_init(package_name, status)
# Initialize sphinx docs if required and available
logging.debug("[-] Launching sphinx init submodule")
lib.sphinx_init(package_name, author, version, status)
logging.debug("[-] Launching status reporter submodule")
lib.report_status(status)
print(colored.green("[|]"))
print(colored.green("[*] Package {} was created "
"successfully.".format(package_name)))
except EOFError:
# Catch error thrown by clint.main
print(colored.yellow("\n[!] Ctrl+C : Aborting package creation."))
sys.exit()
except KeyboardInterrupt:
print(colored.yellow("\n[!] Ctrl+C : Aborting package creation."))
# Rollback changes
if os.path.isdir(args.package_name):
logging.debug("[-] Rolling back committed changes, deleting files")
shutil.rmtree(args.package_name)
logging.debug("[-] alacrity:ROOT :: quiting")
sys.exit()
if __name__ == '__main__':
main()
| 36.554795 | 79 | 0.564737 |
import importlib
import logging
import os
import sys
import argparse
import shutil
from clint.textui import colored
try:
from alacrity import lib
except ImportError:
lib = importlib.import_module('lib', '../alacrity')
def main():
try:
from alacrity import version
except ImportError:
version = importlib.import_module('version', '../alacrity')
v = version.version()
parser = argparse.ArgumentParser(description="Alacrity : "
"Quickstart your Python "
"package from a terminal")
parser.add_argument('--make', action='store_true', help="Rebuild "
"persistence")
parser.add_argument('--debug', action='store_true', help="Display verbose "
"debug messages")
parser.add_argument('--version', action="version", version=v)
parser.add_argument('package_name')
args = parser.parse_args()
if args.make:
lib.rebuild_persistence()
if not args.package_name:
logging.error(" package_name is a required argument")
sys.exit()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.CRITICAL)
status = {
'structure_created': False,
'gitignore_created': False,
'setup_created': False,
'license_created': False,
'manifest_created': False,
'readme_created': False,
'requirements_created': False,
'tests_created': False,
'git_initialized': False,
'venv_created': False,
'sphinx_created': False
}
try:
try:
package_name = args.package_name
logging.debug("[-] Checking if the package already exists")
check_is_file = os.path.isfile(
"{0}/{0}/__init__.py".format(package_name))
if os.path.isdir(package_name) or check_is_file:
logging.debug("[-] Package already exists, "
"launching clean make prompt")
print(colored.red("[!] A package by that name already exists, "
"destroy and clean make? (y/n) : "), end="")
choice = input()
logging.debug("[-] Choice prompt input : {}".format(choice))
if choice == 'y':
logging.debug("[-] Removing existing package")
lib.remove_package(package_name)
elif choice == 'n':
logging.debug("[-] Clean make cancelled")
print(colored.red("[!] Please pick a different package "
"name, aborting."))
sys.exit()
else:
logging.error(colored.red(" Invalid choice"))
print(colored.red("[!] Invalid choice, aborting"))
sys.exit()
logging.debug("[-] Creating package structure")
lib.create_package_structure(package_name, status)
logging.debug("[-] Creating starter files in package")
author, version = lib.create_starter_files(package_name, status)
logging.debug("[-] Creating tests package in structure")
lib.create_tests_package(package_name, status)
logging.debug("[-] Launching git init submodule")
lib.git_init(package_name, status)
logging.debug("[-] Launching venv init submodule")
lib.venv_init(package_name, status)
logging.debug("[-] Launching sphinx init submodule")
lib.sphinx_init(package_name, author, version, status)
logging.debug("[-] Launching status reporter submodule")
lib.report_status(status)
print(colored.green("[|]"))
print(colored.green("[*] Package {} was created "
"successfully.".format(package_name)))
except EOFError:
print(colored.yellow("\n[!] Ctrl+C : Aborting package creation."))
sys.exit()
except KeyboardInterrupt:
print(colored.yellow("\n[!] Ctrl+C : Aborting package creation."))
if os.path.isdir(args.package_name):
logging.debug("[-] Rolling back committed changes, deleting files")
shutil.rmtree(args.package_name)
logging.debug("[-] alacrity:ROOT :: quiting")
sys.exit()
if __name__ == '__main__':
main()
| true | true |
f72bbf8d0fa245c9c5a1214dc4f10dd0bf1f40bc | 56,360 | py | Python | release/stubs.min/System/Windows/Forms/__init___parts/NumericUpDown.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 182 | 2017-06-27T02:26:15.000Z | 2022-03-30T18:53:43.000Z | release/stubs.min/System/Windows/Forms/__init___parts/NumericUpDown.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 28 | 2017-06-27T13:38:23.000Z | 2022-03-15T11:19:44.000Z | release/stubs.min/System/Windows/Forms/__init___parts/NumericUpDown.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 67 | 2017-06-28T09:43:59.000Z | 2022-03-20T21:17:10.000Z | class NumericUpDown(UpDownBase,IComponent,IDisposable,IOleControl,IOleObject,IOleInPlaceObject,IOleInPlaceActiveObject,IOleWindow,IViewObject,IViewObject2,IPersist,IPersistStreamInit,IPersistPropertyBag,IPersistStorage,IQuickActivate,ISupportOleDropSource,IDropTarget,ISynchronizeInvoke,IWin32Window,IArrangedElement,IBindableComponent,IContainerControl,ISupportInitialize):
"""
Represents a Windows spin box (also known as an up-down control) that displays numeric values.
NumericUpDown()
"""
def AccessibilityNotifyClients(self,*args):
"""
AccessibilityNotifyClients(self: Control,accEvent: AccessibleEvents,objectID: int,childID: int)
Notifies the accessibility client applications of the specified
System.Windows.Forms.AccessibleEvents for the specified child control .
accEvent: The System.Windows.Forms.AccessibleEvents to notify the accessibility client applications of.
objectID: The identifier of the System.Windows.Forms.AccessibleObject.
childID: The child System.Windows.Forms.Control to notify of the accessible event.
AccessibilityNotifyClients(self: Control,accEvent: AccessibleEvents,childID: int)
Notifies the accessibility client applications of the specified
System.Windows.Forms.AccessibleEvents for the specified child control.
accEvent: The System.Windows.Forms.AccessibleEvents to notify the accessibility client applications of.
childID: The child System.Windows.Forms.Control to notify of the accessible event.
"""
pass
def AdjustFormScrollbars(self,*args):
"""
AdjustFormScrollbars(self: ContainerControl,displayScrollbars: bool)
displayScrollbars: true to show the scroll bars; otherwise,false.
"""
pass
def BeginInit(self):
"""
BeginInit(self: NumericUpDown)
Begins the initialization of a System.Windows.Forms.NumericUpDown control that is used on a form
or used by another component. The initialization occurs at run time.
"""
pass
def CreateAccessibilityInstance(self,*args):
"""
CreateAccessibilityInstance(self: NumericUpDown) -> AccessibleObject
Returns: A new System.Windows.Forms.AccessibleObject for the control.
"""
pass
def CreateControlsInstance(self,*args):
"""
CreateControlsInstance(self: Control) -> ControlCollection
Creates a new instance of the control collection for the control.
Returns: A new instance of System.Windows.Forms.Control.ControlCollection assigned to the control.
"""
pass
def CreateHandle(self,*args):
"""
CreateHandle(self: Control)
Creates a handle for the control.
"""
pass
def DefWndProc(self,*args):
"""
DefWndProc(self: Control,m: Message) -> Message
Sends the specified message to the default window procedure.
m: The Windows System.Windows.Forms.Message to process.
"""
pass
def DestroyHandle(self,*args):
"""
DestroyHandle(self: Control)
Destroys the handle associated with the control.
"""
pass
def Dispose(self):
"""
Dispose(self: ContainerControl,disposing: bool)
disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources.
"""
pass
def DownButton(self):
"""
DownButton(self: NumericUpDown)
Decrements the value of the spin box (also known as an up-down control).
"""
pass
def EndInit(self):
"""
EndInit(self: NumericUpDown)
Ends the initialization of a System.Windows.Forms.NumericUpDown control that is used on a form
or used by another component. The initialization occurs at run time.
"""
pass
def GetAccessibilityObjectById(self,*args):
"""
GetAccessibilityObjectById(self: Control,objectId: int) -> AccessibleObject
Retrieves the specified System.Windows.Forms.AccessibleObject.
objectId: An Int32 that identifies the System.Windows.Forms.AccessibleObject to retrieve.
Returns: An System.Windows.Forms.AccessibleObject.
"""
pass
def GetAutoSizeMode(self,*args):
"""
GetAutoSizeMode(self: Control) -> AutoSizeMode
Retrieves a value indicating how a control will behave when its
System.Windows.Forms.Control.AutoSize property is enabled.
Returns: One of the System.Windows.Forms.AutoSizeMode values.
"""
pass
def GetScaledBounds(self,*args):
"""
GetScaledBounds(self: Control,bounds: Rectangle,factor: SizeF,specified: BoundsSpecified) -> Rectangle
Retrieves the bounds within which the control is scaled.
bounds: A System.Drawing.Rectangle that specifies the area for which to retrieve the display bounds.
factor: The height and width of the control's bounds.
specified: One of the values of System.Windows.Forms.BoundsSpecified that specifies the bounds of the
control to use when defining its size and position.
Returns: A System.Drawing.Rectangle representing the bounds within which the control is scaled.
"""
pass
def GetScrollState(self,*args):
"""
GetScrollState(self: ScrollableControl,bit: int) -> bool
Determines whether the specified flag has been set.
bit: The flag to check.
Returns: true if the specified flag has been set; otherwise,false.
"""
pass
def GetService(self,*args):
"""
GetService(self: Component,service: Type) -> object
Returns an object that represents a service provided by the System.ComponentModel.Component or
by its System.ComponentModel.Container.
service: A service provided by the System.ComponentModel.Component.
Returns: An System.Object that represents a service provided by the System.ComponentModel.Component,or
null if the System.ComponentModel.Component does not provide the specified service.
"""
pass
def GetStyle(self,*args):
"""
GetStyle(self: Control,flag: ControlStyles) -> bool
Retrieves the value of the specified control style bit for the control.
flag: The System.Windows.Forms.ControlStyles bit to return the value from.
Returns: true if the specified control style bit is set to true; otherwise,false.
"""
pass
def GetTopLevel(self,*args):
"""
GetTopLevel(self: Control) -> bool
Determines if the control is a top-level control.
Returns: true if the control is a top-level control; otherwise,false.
"""
pass
def InitLayout(self,*args):
"""
InitLayout(self: Control)
Called after the control has been added to another container.
"""
pass
def InvokeGotFocus(self,*args):
"""
InvokeGotFocus(self: Control,toInvoke: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.GotFocus event for the specified control.
toInvoke: The System.Windows.Forms.Control to assign the event to.
e: An System.EventArgs that contains the event data.
"""
pass
def InvokeLostFocus(self,*args):
"""
InvokeLostFocus(self: Control,toInvoke: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.LostFocus event for the specified control.
toInvoke: The System.Windows.Forms.Control to assign the event to.
e: An System.EventArgs that contains the event data.
"""
pass
def InvokeOnClick(self,*args):
"""
InvokeOnClick(self: Control,toInvoke: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Click event for the specified control.
toInvoke: The System.Windows.Forms.Control to assign the System.Windows.Forms.Control.Click event to.
e: An System.EventArgs that contains the event data.
"""
pass
def InvokePaint(self,*args):
"""
InvokePaint(self: Control,c: Control,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event for the specified control.
c: The System.Windows.Forms.Control to assign the System.Windows.Forms.Control.Paint event to.
e: An System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def InvokePaintBackground(self,*args):
"""
InvokePaintBackground(self: Control,c: Control,e: PaintEventArgs)
Raises the PaintBackground event for the specified control.
c: The System.Windows.Forms.Control to assign the System.Windows.Forms.Control.Paint event to.
e: An System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def IsInputChar(self,*args):
"""
IsInputChar(self: Control,charCode: Char) -> bool
Determines if a character is an input character that the control recognizes.
charCode: The character to test.
Returns: true if the character should be sent directly to the control and not preprocessed; otherwise,
false.
"""
pass
def IsInputKey(self,*args):
"""
IsInputKey(self: Control,keyData: Keys) -> bool
Determines whether the specified key is a regular input key or a special key that requires
preprocessing.
keyData: One of the System.Windows.Forms.Keys values.
Returns: true if the specified key is a regular input key; otherwise,false.
"""
pass
def MemberwiseClone(self,*args):
"""
MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject
Creates a shallow copy of the current System.MarshalByRefObject object.
cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the
object to be assigned a new identity when it is marshaled across a remoting boundary. A value of
false is usually appropriate. true to copy the current System.MarshalByRefObject object's
identity to its clone,which will cause remoting client calls to be routed to the remote server
object.
Returns: A shallow copy of the current System.MarshalByRefObject object.
MemberwiseClone(self: object) -> object
Creates a shallow copy of the current System.Object.
Returns: A shallow copy of the current System.Object.
"""
pass
def NotifyInvalidate(self,*args):
"""
NotifyInvalidate(self: Control,invalidatedArea: Rectangle)
Raises the System.Windows.Forms.Control.Invalidated event with a specified region of the control
to invalidate.
invalidatedArea: A System.Drawing.Rectangle representing the area to invalidate.
"""
pass
def OnAutoSizeChanged(self,*args):
"""
OnAutoSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.AutoSizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnAutoValidateChanged(self,*args):
"""
OnAutoValidateChanged(self: ContainerControl,e: EventArgs)
Raises the System.Windows.Forms.ContainerControl.AutoValidateChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackColorChanged(self,*args):
"""
OnBackColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackgroundImageChanged(self,*args):
"""
OnBackgroundImageChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBackgroundImageLayoutChanged(self,*args):
"""
OnBackgroundImageLayoutChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageLayoutChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnBindingContextChanged(self,*args):
"""
OnBindingContextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BindingContextChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnCausesValidationChanged(self,*args):
"""
OnCausesValidationChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CausesValidationChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnChanged(self,*args):
"""
OnChanged(self: UpDownBase,source: object,e: EventArgs)
When overridden in a derived class,raises the Changed event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnChangeUICues(self,*args):
"""
OnChangeUICues(self: Control,e: UICuesEventArgs)
Raises the System.Windows.Forms.Control.ChangeUICues event.
e: A System.Windows.Forms.UICuesEventArgs that contains the event data.
"""
pass
def OnClick(self,*args):
"""
OnClick(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Click event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnClientSizeChanged(self,*args):
"""
OnClientSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ClientSizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnContextMenuChanged(self,*args):
"""
OnContextMenuChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ContextMenuChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnContextMenuStripChanged(self,*args):
"""
OnContextMenuStripChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ContextMenuStripChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnControlAdded(self,*args):
"""
OnControlAdded(self: Control,e: ControlEventArgs)
Raises the System.Windows.Forms.Control.ControlAdded event.
e: A System.Windows.Forms.ControlEventArgs that contains the event data.
"""
pass
def OnControlRemoved(self,*args):
"""
OnControlRemoved(self: Control,e: ControlEventArgs)
Raises the System.Windows.Forms.Control.ControlRemoved event.
e: A System.Windows.Forms.ControlEventArgs that contains the event data.
"""
pass
def OnCreateControl(self,*args):
""" OnCreateControl(self: ContainerControl) """
pass
def OnCursorChanged(self,*args):
"""
OnCursorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CursorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDockChanged(self,*args):
"""
OnDockChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.DockChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDoubleClick(self,*args):
"""
OnDoubleClick(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.DoubleClick event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDpiChangedAfterParent(self,*args):
""" OnDpiChangedAfterParent(self: Control,e: EventArgs) """
pass
def OnDpiChangedBeforeParent(self,*args):
""" OnDpiChangedBeforeParent(self: Control,e: EventArgs) """
pass
def OnDragDrop(self,*args):
"""
OnDragDrop(self: Control,drgevent: DragEventArgs)
Raises the System.Windows.Forms.Control.DragDrop event.
drgevent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: Control,drgevent: DragEventArgs)
Raises the System.Windows.Forms.Control.DragEnter event.
drgevent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.DragLeave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: Control,drgevent: DragEventArgs)
Raises the System.Windows.Forms.Control.DragOver event.
drgevent: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def OnEnabledChanged(self,*args):
"""
OnEnabledChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.EnabledChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnEnter(self,*args):
"""
OnEnter(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Enter event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnFontChanged(self,*args):
"""
OnFontChanged(self: UpDownBase,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnForeColorChanged(self,*args):
"""
OnForeColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ForeColorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: Control,gfbevent: GiveFeedbackEventArgs)
Raises the System.Windows.Forms.Control.GiveFeedback event.
gfbevent: A System.Windows.Forms.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnGotFocus(self,*args):
"""
OnGotFocus(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.GotFocus event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnHandleCreated(self,*args):
"""
OnHandleCreated(self: UpDownBase,e: EventArgs)
Raises the System.Windows.Forms.Control.HandleCreated event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnHandleDestroyed(self,*args):
"""
OnHandleDestroyed(self: UpDownBase,e: EventArgs)
Raises the System.Windows.Forms.Control.HandleDestroyed event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnHelpRequested(self,*args):
"""
OnHelpRequested(self: Control,hevent: HelpEventArgs)
Raises the System.Windows.Forms.Control.HelpRequested event.
hevent: A System.Windows.Forms.HelpEventArgs that contains the event data.
"""
pass
def OnImeModeChanged(self,*args):
"""
OnImeModeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ImeModeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnInvalidated(self,*args):
"""
OnInvalidated(self: Control,e: InvalidateEventArgs)
Raises the System.Windows.Forms.Control.Invalidated event.
e: An System.Windows.Forms.InvalidateEventArgs that contains the event data.
"""
pass
def OnKeyDown(self,*args):
"""
OnKeyDown(self: NumericUpDown,e: KeyEventArgs)
Raises the System.Windows.Forms.Control.KeyDown event.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def OnKeyPress(self,*args):
"""
OnKeyPress(self: Control,e: KeyPressEventArgs)
Raises the System.Windows.Forms.Control.KeyPress event.
e: A System.Windows.Forms.KeyPressEventArgs that contains the event data.
"""
pass
def OnKeyUp(self,*args):
"""
OnKeyUp(self: NumericUpDown,e: KeyEventArgs)
Raises the System.Windows.Forms.Control.KeyUp event.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def OnLayout(self,*args):
"""
OnLayout(self: UpDownBase,e: LayoutEventArgs)
Raises the System.Windows.Forms.Control.Layout event.
e: A System.Windows.Forms.LayoutEventArgs that contains the event data.
"""
pass
def OnLeave(self,*args):
"""
OnLeave(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Leave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnLocationChanged(self,*args):
"""
OnLocationChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.LocationChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnLostFocus(self,*args):
"""
OnLostFocus(self: NumericUpDown,e: EventArgs)
Raises the System.Windows.Forms.Control.LostFocus event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMarginChanged(self,*args):
"""
OnMarginChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MarginChanged event.
e: A System.EventArgs that contains the event data.
"""
pass
def OnMouseCaptureChanged(self,*args):
"""
OnMouseCaptureChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseCaptureChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseClick(self,*args):
"""
OnMouseClick(self: Control,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseClick event.
e: An System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseDoubleClick(self,*args):
"""
OnMouseDoubleClick(self: Control,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseDoubleClick event.
e: An System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: UpDownBase,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseDown event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseEnter event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseHover(self,*args):
"""
OnMouseHover(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseHover event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.MouseLeave event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: Control,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseMove event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: UpDownBase,mevent: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseUp event.
mevent: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMouseWheel(self,*args):
"""
OnMouseWheel(self: UpDownBase,e: MouseEventArgs)
Raises the System.Windows.Forms.Control.MouseWheel event.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def OnMove(self,*args):
"""
OnMove(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Move event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnNotifyMessage(self,*args):
"""
OnNotifyMessage(self: Control,m: Message)
Notifies the control of Windows messages.
m: A System.Windows.Forms.Message that represents the Windows message.
"""
pass
def OnPaddingChanged(self,*args):
"""
OnPaddingChanged(self: ScrollableControl,e: EventArgs)
Raises the System.Windows.Forms.Control.PaddingChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnPaint(self,*args):
"""
OnPaint(self: UpDownBase,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnPaintBackground(self,*args):
"""
OnPaintBackground(self: ScrollableControl,e: PaintEventArgs)
Paints the background of the control.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnParentBackColorChanged(self,*args):
"""
OnParentBackColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackColorChanged event when the
System.Windows.Forms.Control.BackColor property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentBackgroundImageChanged(self,*args):
"""
OnParentBackgroundImageChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageChanged event when the
System.Windows.Forms.Control.BackgroundImage property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentBindingContextChanged(self,*args):
"""
OnParentBindingContextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BindingContextChanged event when the
System.Windows.Forms.Control.BindingContext property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentChanged(self,*args):
"""
OnParentChanged(self: ContainerControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentCursorChanged(self,*args):
"""
OnParentCursorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CursorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentEnabledChanged(self,*args):
"""
OnParentEnabledChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.EnabledChanged event when the
System.Windows.Forms.Control.Enabled property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentFontChanged(self,*args):
"""
OnParentFontChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event when the
System.Windows.Forms.Control.Font property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentForeColorChanged(self,*args):
"""
OnParentForeColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ForeColorChanged event when the
System.Windows.Forms.Control.ForeColor property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentRightToLeftChanged(self,*args):
"""
OnParentRightToLeftChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.RightToLeftChanged event when the
System.Windows.Forms.Control.RightToLeft property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentVisibleChanged(self,*args):
"""
OnParentVisibleChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.VisibleChanged event when the
System.Windows.Forms.Control.Visible property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: Control,e: PreviewKeyDownEventArgs)
Raises the System.Windows.Forms.Control.PreviewKeyDown event.
e: A System.Windows.Forms.PreviewKeyDownEventArgs that contains the event data.
"""
pass
def OnPrint(self,*args):
"""
OnPrint(self: Control,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: Control,qcdevent: QueryContinueDragEventArgs)
Raises the System.Windows.Forms.Control.QueryContinueDrag event.
qcdevent: A System.Windows.Forms.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnRegionChanged(self,*args):
"""
OnRegionChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.RegionChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnResize(self,*args):
"""
OnResize(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Resize event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnRightToLeftChanged(self,*args):
"""
OnRightToLeftChanged(self: ScrollableControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def OnScroll(self,*args):
"""
OnScroll(self: ScrollableControl,se: ScrollEventArgs)
Raises the System.Windows.Forms.ScrollableControl.Scroll event.
se: A System.Windows.Forms.ScrollEventArgs that contains the event data.
"""
pass
def OnSizeChanged(self,*args):
"""
OnSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.SizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.StyleChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnSystemColorsChanged(self,*args):
"""
OnSystemColorsChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.SystemColorsChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTabIndexChanged(self,*args):
"""
OnTabIndexChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TabIndexChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTabStopChanged(self,*args):
"""
OnTabStopChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TabStopChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextBoxKeyDown(self,*args):
"""
OnTextBoxKeyDown(self: UpDownBase,source: object,e: KeyEventArgs)
Raises the System.Windows.Forms.Control.KeyDown event.
source: The source of the event.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def OnTextBoxKeyPress(self,*args):
"""
OnTextBoxKeyPress(self: NumericUpDown,source: object,e: KeyPressEventArgs)
Raises the System.Windows.Forms.Control.KeyPress event.
source: The source of the event.
e: A System.Windows.Forms.KeyPressEventArgs that contains the event data.
"""
pass
def OnTextBoxLostFocus(self,*args):
"""
OnTextBoxLostFocus(self: UpDownBase,source: object,e: EventArgs)
Raises the System.Windows.Forms.Control.LostFocus event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextBoxResize(self,*args):
"""
OnTextBoxResize(self: UpDownBase,source: object,e: EventArgs)
Raises the System.Windows.Forms.Control.Resize event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextBoxTextChanged(self,*args):
"""
OnTextBoxTextChanged(self: UpDownBase,source: object,e: EventArgs)
Raises the System.Windows.Forms.Control.TextChanged event.
source: The source of the event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextChanged(self,*args):
"""
OnTextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TextChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnValidated(self,*args):
"""
OnValidated(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Validated event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnValidating(self,*args):
"""
OnValidating(self: Control,e: CancelEventArgs)
Raises the System.Windows.Forms.Control.Validating event.
e: A System.ComponentModel.CancelEventArgs that contains the event data.
"""
pass
def OnValueChanged(self,*args):
"""
OnValueChanged(self: NumericUpDown,e: EventArgs)
Raises the System.Windows.Forms.NumericUpDown.ValueChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnVisibleChanged(self,*args):
"""
OnVisibleChanged(self: ScrollableControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def ParseEditText(self,*args):
"""
ParseEditText(self: NumericUpDown)
Converts the text displayed in the spin box (also known as an up-down control) to a numeric
value and evaluates it.
"""
pass
def ProcessCmdKey(self,*args):
"""
ProcessCmdKey(self: ContainerControl,msg: Message,keyData: Keys) -> (bool,Message)
msg: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the character was processed by the control; otherwise,false.
"""
pass
def ProcessDialogChar(self,*args):
"""
ProcessDialogChar(self: ContainerControl,charCode: Char) -> bool
charCode: The character to process.
Returns: true if the character was processed by the control; otherwise,false.
"""
pass
def ProcessDialogKey(self,*args):
"""
ProcessDialogKey(self: ContainerControl,keyData: Keys) -> bool
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the key was processed by the control; otherwise,false.
"""
pass
def ProcessKeyEventArgs(self,*args):
"""
ProcessKeyEventArgs(self: Control,m: Message) -> (bool,Message)
Processes a key message and generates the appropriate control events.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessKeyMessage(self,*args):
"""
ProcessKeyMessage(self: Control,m: Message) -> (bool,Message)
Processes a keyboard message.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessKeyPreview(self,*args):
"""
ProcessKeyPreview(self: Control,m: Message) -> (bool,Message)
Previews a keyboard message.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessMnemonic(self,*args):
"""
ProcessMnemonic(self: ContainerControl,charCode: Char) -> bool
charCode: The character to process.
Returns: true if the character was processed as a mnemonic by the control; otherwise,false.
"""
pass
def ProcessTabKey(self,*args):
"""
ProcessTabKey(self: ContainerControl,forward: bool) -> bool
Selects the next available control and makes it the active control.
forward: true to cycle forward through the controls in the System.Windows.Forms.ContainerControl;
otherwise,false.
Returns: true if a control is selected; otherwise,false.
"""
pass
def RaiseDragEvent(self,*args):
"""
RaiseDragEvent(self: Control,key: object,e: DragEventArgs)
Raises the appropriate drag event.
key: The event to raise.
e: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def RaiseKeyEvent(self,*args):
"""
RaiseKeyEvent(self: Control,key: object,e: KeyEventArgs)
Raises the appropriate key event.
key: The event to raise.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def RaiseMouseEvent(self,*args):
"""
RaiseMouseEvent(self: Control,key: object,e: MouseEventArgs)
Raises the appropriate mouse event.
key: The event to raise.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def RaisePaintEvent(self,*args):
"""
RaisePaintEvent(self: Control,key: object,e: PaintEventArgs)
Raises the appropriate paint event.
key: The event to raise.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def RecreateHandle(self,*args):
"""
RecreateHandle(self: Control)
Forces the re-creation of the handle for the control.
"""
pass
def RescaleConstantsForDpi(self,*args):
""" RescaleConstantsForDpi(self: UpDownBase,deviceDpiOld: int,deviceDpiNew: int) """
pass
def ResetMouseEventArgs(self,*args):
"""
ResetMouseEventArgs(self: Control)
Resets the control to handle the System.Windows.Forms.Control.MouseLeave event.
"""
pass
def RtlTranslateAlignment(self,*args):
"""
RtlTranslateAlignment(self: Control,align: ContentAlignment) -> ContentAlignment
Converts the specified System.Drawing.ContentAlignment to the appropriate
System.Drawing.ContentAlignment to support right-to-left text.
align: One of the System.Drawing.ContentAlignment values.
Returns: One of the System.Drawing.ContentAlignment values.
RtlTranslateAlignment(self: Control,align: LeftRightAlignment) -> LeftRightAlignment
Converts the specified System.Windows.Forms.LeftRightAlignment to the appropriate
System.Windows.Forms.LeftRightAlignment to support right-to-left text.
align: One of the System.Windows.Forms.LeftRightAlignment values.
Returns: One of the System.Windows.Forms.LeftRightAlignment values.
RtlTranslateAlignment(self: Control,align: HorizontalAlignment) -> HorizontalAlignment
Converts the specified System.Windows.Forms.HorizontalAlignment to the appropriate
System.Windows.Forms.HorizontalAlignment to support right-to-left text.
align: One of the System.Windows.Forms.HorizontalAlignment values.
Returns: One of the System.Windows.Forms.HorizontalAlignment values.
"""
pass
def RtlTranslateContent(self,*args):
"""
RtlTranslateContent(self: Control,align: ContentAlignment) -> ContentAlignment
Converts the specified System.Drawing.ContentAlignment to the appropriate
System.Drawing.ContentAlignment to support right-to-left text.
align: One of the System.Drawing.ContentAlignment values.
Returns: One of the System.Drawing.ContentAlignment values.
"""
pass
def RtlTranslateHorizontal(self,*args):
"""
RtlTranslateHorizontal(self: Control,align: HorizontalAlignment) -> HorizontalAlignment
Converts the specified System.Windows.Forms.HorizontalAlignment to the appropriate
System.Windows.Forms.HorizontalAlignment to support right-to-left text.
align: One of the System.Windows.Forms.HorizontalAlignment values.
Returns: One of the System.Windows.Forms.HorizontalAlignment values.
"""
pass
def RtlTranslateLeftRight(self,*args):
"""
RtlTranslateLeftRight(self: Control,align: LeftRightAlignment) -> LeftRightAlignment
Converts the specified System.Windows.Forms.LeftRightAlignment to the appropriate
System.Windows.Forms.LeftRightAlignment to support right-to-left text.
align: One of the System.Windows.Forms.LeftRightAlignment values.
Returns: One of the System.Windows.Forms.LeftRightAlignment values.
"""
pass
def ScaleControl(self,*args):
"""
ScaleControl(self: ScrollableControl,factor: SizeF,specified: BoundsSpecified)
factor: The factor by which the height and width of the control will be scaled.
specified: A System.Windows.Forms.BoundsSpecified value that specifies the bounds of the control to use
when defining its size and position.
"""
pass
def ScaleCore(self,*args):
"""
ScaleCore(self: ScrollableControl,dx: Single,dy: Single)
dx: The horizontal scaling factor.
dy: The vertical scaling factor.
"""
pass
def ScrollToControl(self,*args):
"""
ScrollToControl(self: ScrollableControl,activeControl: Control) -> Point
Calculates the scroll offset to the specified child control.
activeControl: The child control to scroll into view.
Returns: The upper-left hand System.Drawing.Point of the display area relative to the client area
required to scroll the control into view.
"""
pass
def Select(self,start=None,length=None):
"""
Select(self: ContainerControl,directed: bool,forward: bool)
directed: true to specify the direction of the control to select; otherwise,false.
forward: true to move forward in the tab order; false to move backward in the tab order.
"""
pass
def SetAutoSizeMode(self,*args):
"""
SetAutoSizeMode(self: Control,mode: AutoSizeMode)
Sets a value indicating how a control will behave when its System.Windows.Forms.Control.AutoSize
property is enabled.
mode: One of the System.Windows.Forms.AutoSizeMode values.
"""
pass
def SetBoundsCore(self,*args):
"""
SetBoundsCore(self: Control,x: int,y: int,width: int,height: int,specified: BoundsSpecified)
Performs the work of setting the specified bounds of this control.
x: The new System.Windows.Forms.Control.Left property value of the control.
y: The new System.Windows.Forms.Control.Top property value of the control.
width: The new System.Windows.Forms.Control.Width property value of the control.
height: The new System.Windows.Forms.Control.Height property value of the control.
specified: A bitwise combination of the System.Windows.Forms.BoundsSpecified values.
"""
pass
def SetClientSizeCore(self,*args):
"""
SetClientSizeCore(self: Control,x: int,y: int)
Sets the size of the client area of the control.
x: The client area width,in pixels.
y: The client area height,in pixels.
"""
pass
def SetDisplayRectLocation(self,*args):
"""
SetDisplayRectLocation(self: ScrollableControl,x: int,y: int)
Positions the display window to the specified value.
x: The horizontal offset at which to position the System.Windows.Forms.ScrollableControl.
y: The vertical offset at which to position the System.Windows.Forms.ScrollableControl.
"""
pass
def SetScrollState(self,*args):
"""
SetScrollState(self: ScrollableControl,bit: int,value: bool)
Sets the specified scroll state flag.
bit: The scroll state flag to set.
value: The value to set the flag.
"""
pass
def SetStyle(self,*args):
"""
SetStyle(self: Control,flag: ControlStyles,value: bool)
Sets a specified System.Windows.Forms.ControlStyles flag to either true or false.
flag: The System.Windows.Forms.ControlStyles bit to set.
value: true to apply the specified style to the control; otherwise,false.
"""
pass
def SetTopLevel(self,*args):
"""
SetTopLevel(self: Control,value: bool)
Sets the control as the top-level control.
value: true to set the control as the top-level control; otherwise,false.
"""
pass
def SetVisibleCore(self,*args):
"""
SetVisibleCore(self: Control,value: bool)
Sets the control to the specified visible state.
value: true to make the control visible; otherwise,false.
"""
pass
def SizeFromClientSize(self,*args):
"""
SizeFromClientSize(self: Control,clientSize: Size) -> Size
Determines the size of the entire control from the height and width of its client area.
clientSize: A System.Drawing.Size value representing the height and width of the control's client area.
Returns: A System.Drawing.Size value representing the height and width of the entire control.
"""
pass
def ToString(self):
"""
ToString(self: NumericUpDown) -> str
Returns a string that represents the System.Windows.Forms.NumericUpDown control.
Returns: A string that represents the current System.Windows.Forms.NumericUpDown.
"""
pass
def UpButton(self):
"""
UpButton(self: NumericUpDown)
Increments the value of the spin box (also known as an up-down control).
"""
pass
def UpdateBounds(self,*args):
"""
UpdateBounds(self: Control,x: int,y: int,width: int,height: int,clientWidth: int,clientHeight: int)
Updates the bounds of the control with the specified size,location,and client size.
x: The System.Drawing.Point.X coordinate of the control.
y: The System.Drawing.Point.Y coordinate of the control.
width: The System.Drawing.Size.Width of the control.
height: The System.Drawing.Size.Height of the control.
clientWidth: The client System.Drawing.Size.Width of the control.
clientHeight: The client System.Drawing.Size.Height of the control.
UpdateBounds(self: Control,x: int,y: int,width: int,height: int)
Updates the bounds of the control with the specified size and location.
x: The System.Drawing.Point.X coordinate of the control.
y: The System.Drawing.Point.Y coordinate of the control.
width: The System.Drawing.Size.Width of the control.
height: The System.Drawing.Size.Height of the control.
UpdateBounds(self: Control)
Updates the bounds of the control with the current size and location.
"""
pass
def UpdateDefaultButton(self,*args):
"""
UpdateDefaultButton(self: ContainerControl)
When overridden by a derived class,updates which button is the default button.
"""
pass
def UpdateEditText(self,*args):
"""
UpdateEditText(self: NumericUpDown)
Displays the current value of the spin box (also known as an up-down control) in the appropriate
format.
"""
pass
def UpdateStyles(self,*args):
"""
UpdateStyles(self: Control)
Forces the assigned styles to be reapplied to the control.
"""
pass
def UpdateZOrder(self,*args):
"""
UpdateZOrder(self: Control)
Updates the control in its parent's z-order.
"""
pass
def ValidateEditText(self,*args):
"""
ValidateEditText(self: NumericUpDown)
Validates and updates the text displayed in the spin box (also known as an up-down control).
"""
pass
def WndProc(self,*args):
"""
WndProc(self: UpDownBase,m: Message) -> Message
Processes Windows messages.
m: The Windows System.Windows.Forms.Message to process.
"""
pass
def __enter__(self,*args):
"""
__enter__(self: IDisposable) -> object
Provides the implementation of __enter__ for objects which implement IDisposable.
"""
pass
def __exit__(self,*args):
"""
__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)
Provides the implementation of __exit__ for objects which implement IDisposable.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
Accelerations=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of sorted acceleration objects for the System.Windows.Forms.NumericUpDown control.
Get: Accelerations(self: NumericUpDown) -> NumericUpDownAccelerationCollection
"""
AutoScaleFactor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the scaling factor between the current and design-time automatic scaling dimensions.
"""
CanEnableIme=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the System.Windows.Forms.Control.ImeMode property can be set to an active value,to enable IME support.
"""
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Determines if events can be raised on the control.
"""
ChangingText=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the text property is being changed internally by its parent class.
"""
CreateParams=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the required creation parameters when the control handle is created.
"""
DecimalPlaces=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the number of decimal places to display in the spin box (also known as an up-down control).
Get: DecimalPlaces(self: NumericUpDown) -> int
Set: DecimalPlaces(self: NumericUpDown)=value
"""
DefaultCursor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the default cursor for the control.
"""
DefaultImeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the default Input Method Editor (IME) mode supported by the control.
"""
DefaultMargin=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the space,in pixels,that is specified by default between controls.
"""
DefaultMaximumSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the length and height,in pixels,that is specified as the default maximum size of a control.
"""
DefaultMinimumSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the length and height,in pixels,that is specified as the default minimum size of a control.
"""
DefaultPadding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the internal spacing,in pixels,of the contents of a control.
"""
DefaultSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the default size of the control.
"""
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether the System.ComponentModel.Component is currently in design mode.
"""
DoubleBuffered=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether this control should redraw its surface using a secondary buffer to reduce or prevent flicker.
"""
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the list of event handlers that are attached to this System.ComponentModel.Component.
"""
FontHeight=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the height of the font of the control.
"""
Hexadecimal=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the spin box (also known as an up-down control) should display the value it contains in hexadecimal format.
Get: Hexadecimal(self: NumericUpDown) -> bool
Set: Hexadecimal(self: NumericUpDown)=value
"""
HScroll=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the horizontal scroll bar is visible.
"""
ImeModeBase=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the IME mode of a control.
"""
Increment=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the value to increment or decrement the spin box (also known as an up-down control) when the up or down buttons are clicked.
Get: Increment(self: NumericUpDown) -> Decimal
Set: Increment(self: NumericUpDown)=value
"""
Maximum=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the maximum value for the spin box (also known as an up-down control).
Get: Maximum(self: NumericUpDown) -> Decimal
Set: Maximum(self: NumericUpDown)=value
"""
Minimum=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the minimum allowed value for the spin box (also known as an up-down control).
Get: Minimum(self: NumericUpDown) -> Decimal
Set: Minimum(self: NumericUpDown)=value
"""
Padding=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the space between the edges of a System.Windows.Forms.NumericUpDown control and its contents.
Get: Padding(self: NumericUpDown) -> Padding
Set: Padding(self: NumericUpDown)=value
"""
RenderRightToLeft=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""This property is now obsolete.
"""
ResizeRedraw=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the control redraws itself when resized.
"""
ScaleChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that determines the scaling of child controls.
"""
ShowFocusCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the control should display focus rectangles.
"""
ShowKeyboardCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value indicating whether the user interface is in the appropriate state to show or hide keyboard accelerators.
"""
Text=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the text to be displayed in the System.Windows.Forms.NumericUpDown control.
Get: Text(self: NumericUpDown) -> str
Set: Text(self: NumericUpDown)=value
"""
ThousandsSeparator=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether a thousands separator is displayed in the spin box (also known as an up-down control) when appropriate.
Get: ThousandsSeparator(self: NumericUpDown) -> bool
Set: ThousandsSeparator(self: NumericUpDown)=value
"""
UserEdit=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether a value has been entered by the user.
"""
Value=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the value assigned to the spin box (also known as an up-down control).
Get: Value(self: NumericUpDown) -> Decimal
Set: Value(self: NumericUpDown)=value
"""
VScroll=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value indicating whether the vertical scroll bar is visible.
"""
PaddingChanged=None
TextChanged=None
ValueChanged=None
| 24.261731 | 375 | 0.700053 | class NumericUpDown(UpDownBase,IComponent,IDisposable,IOleControl,IOleObject,IOleInPlaceObject,IOleInPlaceActiveObject,IOleWindow,IViewObject,IViewObject2,IPersist,IPersistStreamInit,IPersistPropertyBag,IPersistStorage,IQuickActivate,ISupportOleDropSource,IDropTarget,ISynchronizeInvoke,IWin32Window,IArrangedElement,IBindableComponent,IContainerControl,ISupportInitialize):
def AccessibilityNotifyClients(self,*args):
pass
def AdjustFormScrollbars(self,*args):
pass
def BeginInit(self):
pass
def CreateAccessibilityInstance(self,*args):
pass
def CreateControlsInstance(self,*args):
pass
def CreateHandle(self,*args):
pass
def DefWndProc(self,*args):
pass
def DestroyHandle(self,*args):
pass
def Dispose(self):
pass
def DownButton(self):
pass
def EndInit(self):
pass
def GetAccessibilityObjectById(self,*args):
pass
def GetAutoSizeMode(self,*args):
pass
def GetScaledBounds(self,*args):
pass
def GetScrollState(self,*args):
pass
def GetService(self,*args):
pass
def GetStyle(self,*args):
pass
def GetTopLevel(self,*args):
pass
def InitLayout(self,*args):
pass
def InvokeGotFocus(self,*args):
pass
def InvokeLostFocus(self,*args):
pass
def InvokeOnClick(self,*args):
pass
def InvokePaint(self,*args):
pass
def InvokePaintBackground(self,*args):
pass
def IsInputChar(self,*args):
pass
def IsInputKey(self,*args):
pass
def MemberwiseClone(self,*args):
pass
def NotifyInvalidate(self,*args):
pass
def OnAutoSizeChanged(self,*args):
pass
def OnAutoValidateChanged(self,*args):
pass
def OnBackColorChanged(self,*args):
pass
def OnBackgroundImageChanged(self,*args):
pass
def OnBackgroundImageLayoutChanged(self,*args):
pass
def OnBindingContextChanged(self,*args):
pass
def OnCausesValidationChanged(self,*args):
pass
def OnChanged(self,*args):
pass
def OnChangeUICues(self,*args):
pass
def OnClick(self,*args):
pass
def OnClientSizeChanged(self,*args):
pass
def OnContextMenuChanged(self,*args):
pass
def OnContextMenuStripChanged(self,*args):
pass
def OnControlAdded(self,*args):
pass
def OnControlRemoved(self,*args):
pass
def OnCreateControl(self,*args):
pass
def OnCursorChanged(self,*args):
pass
def OnDockChanged(self,*args):
pass
def OnDoubleClick(self,*args):
pass
def OnDpiChangedAfterParent(self,*args):
pass
def OnDpiChangedBeforeParent(self,*args):
pass
def OnDragDrop(self,*args):
pass
def OnDragEnter(self,*args):
pass
def OnDragLeave(self,*args):
pass
def OnDragOver(self,*args):
pass
def OnEnabledChanged(self,*args):
pass
def OnEnter(self,*args):
pass
def OnFontChanged(self,*args):
pass
def OnForeColorChanged(self,*args):
pass
def OnGiveFeedback(self,*args):
pass
def OnGotFocus(self,*args):
pass
def OnHandleCreated(self,*args):
pass
def OnHandleDestroyed(self,*args):
pass
def OnHelpRequested(self,*args):
pass
def OnImeModeChanged(self,*args):
pass
def OnInvalidated(self,*args):
pass
def OnKeyDown(self,*args):
pass
def OnKeyPress(self,*args):
pass
def OnKeyUp(self,*args):
pass
def OnLayout(self,*args):
pass
def OnLeave(self,*args):
pass
def OnLocationChanged(self,*args):
pass
def OnLostFocus(self,*args):
pass
def OnMarginChanged(self,*args):
pass
def OnMouseCaptureChanged(self,*args):
pass
def OnMouseClick(self,*args):
pass
def OnMouseDoubleClick(self,*args):
pass
def OnMouseDown(self,*args):
pass
def OnMouseEnter(self,*args):
pass
def OnMouseHover(self,*args):
pass
def OnMouseLeave(self,*args):
pass
def OnMouseMove(self,*args):
pass
def OnMouseUp(self,*args):
pass
def OnMouseWheel(self,*args):
pass
def OnMove(self,*args):
pass
def OnNotifyMessage(self,*args):
pass
def OnPaddingChanged(self,*args):
pass
def OnPaint(self,*args):
pass
def OnPaintBackground(self,*args):
pass
def OnParentBackColorChanged(self,*args):
pass
def OnParentBackgroundImageChanged(self,*args):
pass
def OnParentBindingContextChanged(self,*args):
pass
def OnParentChanged(self,*args):
pass
def OnParentCursorChanged(self,*args):
pass
def OnParentEnabledChanged(self,*args):
pass
def OnParentFontChanged(self,*args):
pass
def OnParentForeColorChanged(self,*args):
pass
def OnParentRightToLeftChanged(self,*args):
pass
def OnParentVisibleChanged(self,*args):
pass
def OnPreviewKeyDown(self,*args):
pass
def OnPrint(self,*args):
pass
def OnQueryContinueDrag(self,*args):
pass
def OnRegionChanged(self,*args):
pass
def OnResize(self,*args):
pass
def OnRightToLeftChanged(self,*args):
pass
def OnScroll(self,*args):
pass
def OnSizeChanged(self,*args):
pass
def OnStyleChanged(self,*args):
pass
def OnSystemColorsChanged(self,*args):
pass
def OnTabIndexChanged(self,*args):
pass
def OnTabStopChanged(self,*args):
pass
def OnTextBoxKeyDown(self,*args):
pass
def OnTextBoxKeyPress(self,*args):
pass
def OnTextBoxLostFocus(self,*args):
pass
def OnTextBoxResize(self,*args):
pass
def OnTextBoxTextChanged(self,*args):
pass
def OnTextChanged(self,*args):
pass
def OnValidated(self,*args):
pass
def OnValidating(self,*args):
pass
def OnValueChanged(self,*args):
pass
def OnVisibleChanged(self,*args):
pass
def ParseEditText(self,*args):
pass
def ProcessCmdKey(self,*args):
pass
def ProcessDialogChar(self,*args):
pass
def ProcessDialogKey(self,*args):
pass
def ProcessKeyEventArgs(self,*args):
pass
def ProcessKeyMessage(self,*args):
pass
def ProcessKeyPreview(self,*args):
pass
def ProcessMnemonic(self,*args):
pass
def ProcessTabKey(self,*args):
pass
def RaiseDragEvent(self,*args):
pass
def RaiseKeyEvent(self,*args):
pass
def RaiseMouseEvent(self,*args):
pass
def RaisePaintEvent(self,*args):
pass
def RecreateHandle(self,*args):
pass
def RescaleConstantsForDpi(self,*args):
pass
def ResetMouseEventArgs(self,*args):
pass
def RtlTranslateAlignment(self,*args):
pass
def RtlTranslateContent(self,*args):
pass
def RtlTranslateHorizontal(self,*args):
pass
def RtlTranslateLeftRight(self,*args):
pass
def ScaleControl(self,*args):
pass
def ScaleCore(self,*args):
pass
def ScrollToControl(self,*args):
pass
def Select(self,start=None,length=None):
pass
def SetAutoSizeMode(self,*args):
pass
def SetBoundsCore(self,*args):
pass
def SetClientSizeCore(self,*args):
pass
def SetDisplayRectLocation(self,*args):
pass
def SetScrollState(self,*args):
pass
def SetStyle(self,*args):
pass
def SetTopLevel(self,*args):
pass
def SetVisibleCore(self,*args):
pass
def SizeFromClientSize(self,*args):
pass
def ToString(self):
pass
def UpButton(self):
pass
def UpdateBounds(self,*args):
pass
def UpdateDefaultButton(self,*args):
pass
def UpdateEditText(self,*args):
pass
def UpdateStyles(self,*args):
pass
def UpdateZOrder(self,*args):
pass
def ValidateEditText(self,*args):
pass
def WndProc(self,*args):
pass
def __enter__(self,*args):
pass
def __exit__(self,*args):
pass
def __init__(self,*args):
pass
def __str__(self,*args):
pass
Accelerations=property(lambda self: object(),lambda self,v: None,lambda self: None)
AutoScaleFactor=property(lambda self: object(),lambda self,v: None,lambda self: None)
CanEnableIme=property(lambda self: object(),lambda self,v: None,lambda self: None)
CanRaiseEvents=property(lambda self: object(),lambda self,v: None,lambda self: None)
ChangingText=property(lambda self: object(),lambda self,v: None,lambda self: None)
CreateParams=property(lambda self: object(),lambda self,v: None,lambda self: None)
DecimalPlaces=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultCursor=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultImeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultMargin=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultMaximumSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultMinimumSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultPadding=property(lambda self: object(),lambda self,v: None,lambda self: None)
DefaultSize=property(lambda self: object(),lambda self,v: None,lambda self: None)
DesignMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
DoubleBuffered=property(lambda self: object(),lambda self,v: None,lambda self: None)
Events=property(lambda self: object(),lambda self,v: None,lambda self: None)
FontHeight=property(lambda self: object(),lambda self,v: None,lambda self: None)
Hexadecimal=property(lambda self: object(),lambda self,v: None,lambda self: None)
HScroll=property(lambda self: object(),lambda self,v: None,lambda self: None)
ImeModeBase=property(lambda self: object(),lambda self,v: None,lambda self: None)
Increment=property(lambda self: object(),lambda self,v: None,lambda self: None)
Maximum=property(lambda self: object(),lambda self,v: None,lambda self: None)
Minimum=property(lambda self: object(),lambda self,v: None,lambda self: None)
Padding=property(lambda self: object(),lambda self,v: None,lambda self: None)
RenderRightToLeft=property(lambda self: object(),lambda self,v: None,lambda self: None)
ResizeRedraw=property(lambda self: object(),lambda self,v: None,lambda self: None)
ScaleChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
ShowFocusCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
ShowKeyboardCues=property(lambda self: object(),lambda self,v: None,lambda self: None)
Text=property(lambda self: object(),lambda self,v: None,lambda self: None)
ThousandsSeparator=property(lambda self: object(),lambda self,v: None,lambda self: None)
UserEdit=property(lambda self: object(),lambda self,v: None,lambda self: None)
Value=property(lambda self: object(),lambda self,v: None,lambda self: None)
VScroll=property(lambda self: object(),lambda self,v: None,lambda self: None)
PaddingChanged=None
TextChanged=None
ValueChanged=None
| true | true |
f72bc0bbb4aca05776043b6a2bbf09555f2323ed | 498 | py | Python | barberShopControl/core/migrations/0017_reserva_res_servicos.py | YohaneSilva/Barbearia-Amizades | 8550cc5de7a0055be78f9e539de9e5c72a8c3a61 | [
"Apache-2.0"
] | 1 | 2020-03-19T21:09:24.000Z | 2020-03-19T21:09:24.000Z | barberShopControl/core/migrations/0017_reserva_res_servicos.py | YohaneSilva/Barbearia-Amizades | 8550cc5de7a0055be78f9e539de9e5c72a8c3a61 | [
"Apache-2.0"
] | null | null | null | barberShopControl/core/migrations/0017_reserva_res_servicos.py | YohaneSilva/Barbearia-Amizades | 8550cc5de7a0055be78f9e539de9e5c72a8c3a61 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.0.5 on 2020-05-18 04:14
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20200516_0338'),
]
operations = [
migrations.AddField(
model_name='reserva',
name='res_servicos',
field=models.TextField(default=django.utils.timezone.now, verbose_name='Servicos'),
preserve_default=False,
),
]
| 23.714286 | 95 | 0.634538 |
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20200516_0338'),
]
operations = [
migrations.AddField(
model_name='reserva',
name='res_servicos',
field=models.TextField(default=django.utils.timezone.now, verbose_name='Servicos'),
preserve_default=False,
),
]
| true | true |
f72bc16d4a055439bb4d5c75416bba366abd7fa1 | 21,675 | py | Python | generated/nidcpower/nidcpower/_converters.py | AlexHearnNI/nimi-python | 39d7c4d9aa66d826c60a5d700982eff173e051e5 | [
"MIT"
] | null | null | null | generated/nidcpower/nidcpower/_converters.py | AlexHearnNI/nimi-python | 39d7c4d9aa66d826c60a5d700982eff173e051e5 | [
"MIT"
] | null | null | null | generated/nidcpower/nidcpower/_converters.py | AlexHearnNI/nimi-python | 39d7c4d9aa66d826c60a5d700982eff173e051e5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# This file was generated
import nidcpower._visatype as _visatype
import nidcpower.errors as errors
import array
import datetime
import numbers
from functools import singledispatch
@singledispatch
def _convert_repeated_capabilities(arg, prefix): # noqa: F811
'''Base version that should not be called
Overall purpose is to convert the repeated capabilities to a list of strings with prefix from what ever form
Supported types:
- str - List (comma delimited)
- str - Range (using '-' or ':')
- str - single item
- int
- tuple
- range
- slice
Each instance should return a list of strings, without prefix
- '0' --> ['0']
- 0 --> ['0']
- '0, 1' --> ['0', '1']
- 'ScriptTrigger0, ScriptTrigger1' --> ['0', '1']
- '0-1' --> ['0', '1']
- '0:1' --> ['0', '1']
- '0-1,4' --> ['0', '1', '4']
- range(0, 2) --> ['0', '1']
- slice(0, 2) --> ['0', '1']
- (0, 1, 4) --> ['0', '1', '4']
- ('0-1', 4) --> ['0', '1', '4']
- (slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17') -->
['0', '2', '4', '5', '6', '7', '8', '9', '11', '12', '13', '14', '16', '17']
'''
raise errors.InvalidRepeatedCapabilityError('Invalid type', type(arg))
@_convert_repeated_capabilities.register(numbers.Integral) # noqa: F811
def _(repeated_capability, prefix):
'''Integer version'''
return [str(repeated_capability)]
# This parsing function duplicate the parsing in the driver, so if changes to the allowed format are made there, they will need to be replicated here.
@_convert_repeated_capabilities.register(str) # noqa: F811
def _(repeated_capability, prefix):
'''String version (this is the most complex)
We need to deal with a range ('0-3' or '0:3'), a list ('0,1,2,3') and a single item
'''
# First we deal with a list
rep_cap_list = repeated_capability.split(',')
if len(rep_cap_list) > 1:
# We have a list so call ourselves again to let the iterable instance handle it
return _convert_repeated_capabilities(rep_cap_list, prefix)
# Now we deal with ranges
# We remove any prefix and change ':' to '-'
r = repeated_capability.strip().replace(prefix, '').replace(':', '-')
rc = r.split('-')
if len(rc) > 1:
if len(rc) > 2:
raise errors.InvalidRepeatedCapabilityError("Multiple '-' or ':'", repeated_capability)
start = int(rc[0])
end = int(rc[1])
if end < start:
rng = range(start, end - 1, -1)
else:
rng = range(start, end + 1)
return _convert_repeated_capabilities(rng, prefix)
# If we made it here, it must be a simple item so we remove any prefix and return
return [repeated_capability.replace(prefix, '').strip()]
# We cannot use collections.abc.Iterable here because strings are also iterable and then this
# instance is what gets called instead of the string one.
@_convert_repeated_capabilities.register(list) # noqa: F811
@_convert_repeated_capabilities.register(range) # noqa: F811
@_convert_repeated_capabilities.register(tuple) # noqa: F811
def _(repeated_capability, prefix):
'''Iterable version - can handle lists, ranges, and tuples'''
rep_cap_list = []
for r in repeated_capability:
rep_cap_list += _convert_repeated_capabilities(r, prefix)
return rep_cap_list
@_convert_repeated_capabilities.register(slice) # noqa: F811
def _(repeated_capability, prefix):
'''slice version'''
def ifnone(a, b):
return b if a is None else a
# Turn the slice into a list and call ourselves again to let the iterable instance handle it
rng = range(ifnone(repeated_capability.start, 0), repeated_capability.stop, ifnone(repeated_capability.step, 1))
return _convert_repeated_capabilities(rng, prefix)
def convert_repeated_capabilities(repeated_capability, prefix=''):
'''Convert a repeated capabilities object to a comma delimited list
Args:
repeated_capability (str, list, tuple, slice, None) -
prefix (str) - common prefix for all strings
Returns:
rep_cal_list (list of str) - list of each repeated capability item with ranges expanded and prefix added
'''
# We need to explicitly handle None here. Everything else we can pass on to the singledispatch functions
if repeated_capability is None:
return []
return [prefix + r for r in _convert_repeated_capabilities(repeated_capability, prefix)]
def convert_repeated_capabilities_from_init(repeated_capability):
'''Convert a repeated capabilities object to a comma delimited list
Parameter list is so it can be called from the code generated __init__(). We know it is for channels when called
this was so we use a prefix of ''
Args:
repeated_capability (str, list, tuple, slice, None) -
Returns:
rep_cal (str) - comma delimited string of each repeated capability item with ranges expanded
'''
return ','.join(convert_repeated_capabilities(repeated_capability, ''))
def _convert_timedelta(value, library_type, scaling):
try:
# We first assume it is a datetime.timedelta object
scaled_value = value.total_seconds() * scaling
except AttributeError:
# If that doesn't work, assume it is a value in seconds
# cast to float so scaled_value is always a float. This allows `timeout=10` to work as expected
scaled_value = float(value) * scaling
# ctype integer types don't convert to int from float so we need to
if library_type in [_visatype.ViInt64, _visatype.ViInt32, _visatype.ViUInt32, _visatype.ViInt16, _visatype.ViUInt16, _visatype.ViInt8]:
scaled_value = int(scaled_value)
return library_type(scaled_value)
def convert_timedelta_to_seconds_real64(value):
return _convert_timedelta(value, _visatype.ViReal64, 1)
def convert_timedelta_to_milliseconds_int32(value):
return _convert_timedelta(value, _visatype.ViInt32, 1000)
def convert_timedeltas_to_seconds_real64(values):
return [convert_timedelta_to_seconds_real64(i) for i in values]
def convert_seconds_real64_to_timedeltas(seconds):
return [datetime.timedelta(seconds=i) for i in seconds]
def convert_month_to_timedelta(months):
return datetime.timedelta(days=(30.4167 * months))
# This converter is not called from the normal codegen path for function. Instead it is
# call from init and is a special case.
def convert_init_with_options_dictionary(values):
if type(values) is str:
init_with_options_string = values
else:
good_keys = {
'rangecheck': 'RangeCheck',
'queryinstrstatus': 'QueryInstrStatus',
'cache': 'Cache',
'simulate': 'Simulate',
'recordcoercions': 'RecordCoercions',
'interchangecheck': 'InterchangeCheck',
'driversetup': 'DriverSetup',
'range_check': 'RangeCheck',
'query_instr_status': 'QueryInstrStatus',
'record_coercions': 'RecordCoercions',
'interchange_check': 'InterchangeCheck',
'driver_setup': 'DriverSetup',
}
init_with_options = []
for k in sorted(values.keys()):
value = None
if k.lower() in good_keys and not good_keys[k.lower()] == 'DriverSetup':
value = good_keys[k.lower()] + ('=1' if values[k] is True else '=0')
elif k.lower() in good_keys and good_keys[k.lower()] == 'DriverSetup':
if not isinstance(values[k], dict):
raise TypeError('DriverSetup must be a dictionary')
value = 'DriverSetup=' + (';'.join([key + ':' + values[k][key] for key in sorted(values[k])]))
else:
value = k + ('=1' if values[k] is True else '=0')
init_with_options.append(value)
init_with_options_string = ','.join(init_with_options)
return init_with_options_string
# convert value to bytes
@singledispatch
def _convert_to_bytes(value): # noqa: F811
pass
@_convert_to_bytes.register(list) # noqa: F811
@_convert_to_bytes.register(bytes) # noqa: F811
@_convert_to_bytes.register(bytearray) # noqa: F811
@_convert_to_bytes.register(array.array) # noqa: F811
def _(value):
return value
@_convert_to_bytes.register(str) # noqa: F811
def _(value):
return value.encode()
def convert_to_bytes(value): # noqa: F811
return bytes(_convert_to_bytes(value))
# Let's run some tests
def test_convert_init_with_options_dictionary():
assert convert_init_with_options_dictionary('') == ''
assert convert_init_with_options_dictionary('Simulate=1') == 'Simulate=1'
assert convert_init_with_options_dictionary({'Simulate': True, }) == 'Simulate=1'
assert convert_init_with_options_dictionary({'Simulate': False, }) == 'Simulate=0'
assert convert_init_with_options_dictionary({'Simulate': True, 'Cache': False}) == 'Cache=0,Simulate=1'
assert convert_init_with_options_dictionary({'DriverSetup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH)'
assert convert_init_with_options_dictionary({'Simulate': True, 'DriverSetup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH),Simulate=1'
assert convert_init_with_options_dictionary({'simulate': True, 'cache': False}) == 'Cache=0,Simulate=1'
assert convert_init_with_options_dictionary({'driver_setup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH)'
assert convert_init_with_options_dictionary({'simulate': True, 'driver_setup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH),Simulate=1'
# Tests - time
def test_convert_timedelta_to_seconds_double():
test_result = convert_timedelta_to_seconds_real64(datetime.timedelta(seconds=10))
assert test_result.value == 10.0
assert isinstance(test_result, _visatype.ViReal64)
test_result = convert_timedelta_to_seconds_real64(datetime.timedelta(seconds=-1))
assert test_result.value == -1
assert isinstance(test_result, _visatype.ViReal64)
test_result = convert_timedelta_to_seconds_real64(10.5)
assert test_result.value == 10.5
assert isinstance(test_result, _visatype.ViReal64)
test_result = convert_timedelta_to_seconds_real64(-1)
assert test_result.value == -1
assert isinstance(test_result, _visatype.ViReal64)
def test_convert_timedelta_to_milliseconds_int32():
test_result = convert_timedelta_to_milliseconds_int32(datetime.timedelta(seconds=10))
assert test_result.value == 10000
assert isinstance(test_result, _visatype.ViInt32)
test_result = convert_timedelta_to_milliseconds_int32(datetime.timedelta(seconds=-1))
assert test_result.value == -1000
assert isinstance(test_result, _visatype.ViInt32)
test_result = convert_timedelta_to_milliseconds_int32(10.5)
assert test_result.value == 10500
assert isinstance(test_result, _visatype.ViInt32)
test_result = convert_timedelta_to_milliseconds_int32(-1)
assert test_result.value == -1000
assert isinstance(test_result, _visatype.ViInt32)
def test_convert_timedeltas_to_seconds_real64():
time_values = [10.5, -1]
test_result = convert_timedeltas_to_seconds_real64(time_values)
assert all([actual.value == expected for actual, expected in zip(test_result, time_values)])
assert all([isinstance(i, _visatype.ViReal64) for i in test_result])
timedeltas = [datetime.timedelta(seconds=s, milliseconds=ms) for s, ms in zip([10, -1], [500, 0])]
test_result = convert_timedeltas_to_seconds_real64(timedeltas)
assert all([actual.value == expected for actual, expected in zip(test_result, time_values)])
assert all([isinstance(i, _visatype.ViReal64) for i in test_result])
def test_convert_seconds_real64_to_timedeltas():
time_values = [10.5, -1]
timedeltas = convert_seconds_real64_to_timedeltas(time_values)
assert all([actual.total_seconds() == expected for actual, expected in zip(timedeltas, time_values)])
# Tests - repeated capabilities
def test_repeated_capabilies_string_channel():
test_result_list = convert_repeated_capabilities('0')
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities('r0')
assert test_result_list == ['r0']
test_result_list = convert_repeated_capabilities('0,1')
assert test_result_list == ['0', '1']
def test_repeated_capabilies_string_prefix():
test_result_list = convert_repeated_capabilities('0', prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
def test_repeated_capabilies_list_channel():
test_result_list = convert_repeated_capabilities(['0'])
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities(['r0'])
assert test_result_list == ['r0']
test_result_list = convert_repeated_capabilities(['0', '1'])
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities([0, 1])
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities([0, 1, '3'])
assert test_result_list == ['0', '1', '3']
def test_repeated_capabilies_list_prefix():
test_result_list = convert_repeated_capabilities(['ScriptTrigger0', 'ScriptTrigger1'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(['0'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
test_result_list = convert_repeated_capabilities(['0', '1'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities([0, 1], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_tuple_channel():
test_result_list = convert_repeated_capabilities(('0'))
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities(('0,1'))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities(('0', '1'))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities((0, 1))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities((0, 1, '3'))
assert test_result_list == ['0', '1', '3']
def test_repeated_capabilies_tuple_prefix():
test_result_list = convert_repeated_capabilities(('ScriptTrigger0,ScriptTrigger1'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(('0'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
test_result_list = convert_repeated_capabilities(('0', '1'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities((0, 1), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_unicode():
test_result_list = convert_repeated_capabilities(u'ScriptTrigger0,ScriptTrigger1', prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(u'ScriptTrigger0,ScriptTrigger1', prefix=u'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities('ScriptTrigger0,ScriptTrigger1', prefix=u'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_raw():
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities('ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix=u'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(u'ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_slice_channel():
test_result_list = convert_repeated_capabilities(slice(0, 1))
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities(slice(0, 2))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities(slice(None, 2))
assert test_result_list == ['0', '1']
def test_repeated_capabilies_mixed_channel():
test_result_list = convert_repeated_capabilities((slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'))
assert test_result_list == ['0', '2', '4', '5', '6', '7', '8', '9', '11', '12', '13', '14', '16', '17']
test_result_list = convert_repeated_capabilities([slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'])
assert test_result_list == ['0', '2', '4', '5', '6', '7', '8', '9', '11', '12', '13', '14', '16', '17']
def test_repeated_capabilies_mixed_prefix():
test_result_list = convert_repeated_capabilities((slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger2', 'ScriptTrigger4', 'ScriptTrigger5', 'ScriptTrigger6', 'ScriptTrigger7', 'ScriptTrigger8', 'ScriptTrigger9', 'ScriptTrigger11', 'ScriptTrigger12', 'ScriptTrigger13', 'ScriptTrigger14', 'ScriptTrigger16', 'ScriptTrigger17']
test_result_list = convert_repeated_capabilities([slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger2', 'ScriptTrigger4', 'ScriptTrigger5', 'ScriptTrigger6', 'ScriptTrigger7', 'ScriptTrigger8', 'ScriptTrigger9', 'ScriptTrigger11', 'ScriptTrigger12', 'ScriptTrigger13', 'ScriptTrigger14', 'ScriptTrigger16', 'ScriptTrigger17']
def test_invalid_repeated_capabilies():
try:
convert_repeated_capabilities('6-8-10')
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities(['5', '6-8-10'])
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities(('5', '6-8-10'))
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities('5,6-8-10')
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities(5.0)
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities([5.0, '0'])
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities((5.0, '0'))
assert False
except errors.InvalidRepeatedCapabilityError:
pass
def test_repeated_capabilies_slice_prefix():
test_result_list = convert_repeated_capabilities(slice(0, 1), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
test_result_list = convert_repeated_capabilities(slice(0, 2), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(slice(None, 2), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_from_init():
test_result = convert_repeated_capabilities_from_init((slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'))
assert test_result == '0,2,4,5,6,7,8,9,11,12,13,14,16,17'
def test_string_to_list_channel():
test_result = _convert_repeated_capabilities('r0', '')
assert test_result == ['r0']
test_result = _convert_repeated_capabilities(['0-2'], '')
assert test_result == ['0', '1', '2']
test_result = _convert_repeated_capabilities(['3:7'], '')
assert test_result == ['3', '4', '5', '6', '7']
test_result = _convert_repeated_capabilities(['2-0'], '')
assert test_result == ['2', '1', '0']
test_result = _convert_repeated_capabilities(['2:0'], '')
assert test_result == ['2', '1', '0']
def test_string_to_list_prefix():
test_result = _convert_repeated_capabilities(['ScriptTrigger3-ScriptTrigger7'], 'ScriptTrigger')
assert test_result == ['3', '4', '5', '6', '7']
test_result = _convert_repeated_capabilities(['ScriptTrigger3:ScriptTrigger7'], 'ScriptTrigger')
assert test_result == ['3', '4', '5', '6', '7']
test_result = _convert_repeated_capabilities(['ScriptTrigger2-ScriptTrigger0'], 'ScriptTrigger')
assert test_result == ['2', '1', '0']
test_result = _convert_repeated_capabilities(['ScriptTrigger2:ScriptTrigger0'], 'ScriptTrigger')
assert test_result == ['2', '1', '0']
| 44.875776 | 289 | 0.705421 |
import nidcpower._visatype as _visatype
import nidcpower.errors as errors
import array
import datetime
import numbers
from functools import singledispatch
@singledispatch
def _convert_repeated_capabilities(arg, prefix):
raise errors.InvalidRepeatedCapabilityError('Invalid type', type(arg))
@_convert_repeated_capabilities.register(numbers.Integral)
def _(repeated_capability, prefix):
return [str(repeated_capability)]
@_convert_repeated_capabilities.register(str)
def _(repeated_capability, prefix):
rep_cap_list = repeated_capability.split(',')
if len(rep_cap_list) > 1:
return _convert_repeated_capabilities(rep_cap_list, prefix)
r = repeated_capability.strip().replace(prefix, '').replace(':', '-')
rc = r.split('-')
if len(rc) > 1:
if len(rc) > 2:
raise errors.InvalidRepeatedCapabilityError("Multiple '-' or ':'", repeated_capability)
start = int(rc[0])
end = int(rc[1])
if end < start:
rng = range(start, end - 1, -1)
else:
rng = range(start, end + 1)
return _convert_repeated_capabilities(rng, prefix)
return [repeated_capability.replace(prefix, '').strip()]
@_convert_repeated_capabilities.register(list)
@_convert_repeated_capabilities.register(range)
@_convert_repeated_capabilities.register(tuple)
def _(repeated_capability, prefix):
rep_cap_list = []
for r in repeated_capability:
rep_cap_list += _convert_repeated_capabilities(r, prefix)
return rep_cap_list
@_convert_repeated_capabilities.register(slice)
def _(repeated_capability, prefix):
def ifnone(a, b):
return b if a is None else a
rng = range(ifnone(repeated_capability.start, 0), repeated_capability.stop, ifnone(repeated_capability.step, 1))
return _convert_repeated_capabilities(rng, prefix)
def convert_repeated_capabilities(repeated_capability, prefix=''):
if repeated_capability is None:
return []
return [prefix + r for r in _convert_repeated_capabilities(repeated_capability, prefix)]
def convert_repeated_capabilities_from_init(repeated_capability):
return ','.join(convert_repeated_capabilities(repeated_capability, ''))
def _convert_timedelta(value, library_type, scaling):
try:
scaled_value = value.total_seconds() * scaling
except AttributeError:
# cast to float so scaled_value is always a float. This allows `timeout=10` to work as expected
scaled_value = float(value) * scaling
# ctype integer types don't convert to int from float so we need to
if library_type in [_visatype.ViInt64, _visatype.ViInt32, _visatype.ViUInt32, _visatype.ViInt16, _visatype.ViUInt16, _visatype.ViInt8]:
scaled_value = int(scaled_value)
return library_type(scaled_value)
def convert_timedelta_to_seconds_real64(value):
return _convert_timedelta(value, _visatype.ViReal64, 1)
def convert_timedelta_to_milliseconds_int32(value):
return _convert_timedelta(value, _visatype.ViInt32, 1000)
def convert_timedeltas_to_seconds_real64(values):
return [convert_timedelta_to_seconds_real64(i) for i in values]
def convert_seconds_real64_to_timedeltas(seconds):
return [datetime.timedelta(seconds=i) for i in seconds]
def convert_month_to_timedelta(months):
return datetime.timedelta(days=(30.4167 * months))
def convert_init_with_options_dictionary(values):
if type(values) is str:
init_with_options_string = values
else:
good_keys = {
'rangecheck': 'RangeCheck',
'queryinstrstatus': 'QueryInstrStatus',
'cache': 'Cache',
'simulate': 'Simulate',
'recordcoercions': 'RecordCoercions',
'interchangecheck': 'InterchangeCheck',
'driversetup': 'DriverSetup',
'range_check': 'RangeCheck',
'query_instr_status': 'QueryInstrStatus',
'record_coercions': 'RecordCoercions',
'interchange_check': 'InterchangeCheck',
'driver_setup': 'DriverSetup',
}
init_with_options = []
for k in sorted(values.keys()):
value = None
if k.lower() in good_keys and not good_keys[k.lower()] == 'DriverSetup':
value = good_keys[k.lower()] + ('=1' if values[k] is True else '=0')
elif k.lower() in good_keys and good_keys[k.lower()] == 'DriverSetup':
if not isinstance(values[k], dict):
raise TypeError('DriverSetup must be a dictionary')
value = 'DriverSetup=' + (';'.join([key + ':' + values[k][key] for key in sorted(values[k])]))
else:
value = k + ('=1' if values[k] is True else '=0')
init_with_options.append(value)
init_with_options_string = ','.join(init_with_options)
return init_with_options_string
@singledispatch
def _convert_to_bytes(value):
pass
@_convert_to_bytes.register(list)
@_convert_to_bytes.register(bytes)
@_convert_to_bytes.register(bytearray)
@_convert_to_bytes.register(array.array)
def _(value):
return value
@_convert_to_bytes.register(str)
def _(value):
return value.encode()
def convert_to_bytes(value):
return bytes(_convert_to_bytes(value))
def test_convert_init_with_options_dictionary():
assert convert_init_with_options_dictionary('') == ''
assert convert_init_with_options_dictionary('Simulate=1') == 'Simulate=1'
assert convert_init_with_options_dictionary({'Simulate': True, }) == 'Simulate=1'
assert convert_init_with_options_dictionary({'Simulate': False, }) == 'Simulate=0'
assert convert_init_with_options_dictionary({'Simulate': True, 'Cache': False}) == 'Cache=0,Simulate=1'
assert convert_init_with_options_dictionary({'DriverSetup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH)'
assert convert_init_with_options_dictionary({'Simulate': True, 'DriverSetup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH),Simulate=1'
assert convert_init_with_options_dictionary({'simulate': True, 'cache': False}) == 'Cache=0,Simulate=1'
assert convert_init_with_options_dictionary({'driver_setup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH)'
assert convert_init_with_options_dictionary({'simulate': True, 'driver_setup': {'Model': '5162 (4CH)', 'Bitfile': 'CustomProcessing'}}) == 'DriverSetup=Bitfile:CustomProcessing;Model:5162 (4CH),Simulate=1'
# Tests - time
def test_convert_timedelta_to_seconds_double():
test_result = convert_timedelta_to_seconds_real64(datetime.timedelta(seconds=10))
assert test_result.value == 10.0
assert isinstance(test_result, _visatype.ViReal64)
test_result = convert_timedelta_to_seconds_real64(datetime.timedelta(seconds=-1))
assert test_result.value == -1
assert isinstance(test_result, _visatype.ViReal64)
test_result = convert_timedelta_to_seconds_real64(10.5)
assert test_result.value == 10.5
assert isinstance(test_result, _visatype.ViReal64)
test_result = convert_timedelta_to_seconds_real64(-1)
assert test_result.value == -1
assert isinstance(test_result, _visatype.ViReal64)
def test_convert_timedelta_to_milliseconds_int32():
test_result = convert_timedelta_to_milliseconds_int32(datetime.timedelta(seconds=10))
assert test_result.value == 10000
assert isinstance(test_result, _visatype.ViInt32)
test_result = convert_timedelta_to_milliseconds_int32(datetime.timedelta(seconds=-1))
assert test_result.value == -1000
assert isinstance(test_result, _visatype.ViInt32)
test_result = convert_timedelta_to_milliseconds_int32(10.5)
assert test_result.value == 10500
assert isinstance(test_result, _visatype.ViInt32)
test_result = convert_timedelta_to_milliseconds_int32(-1)
assert test_result.value == -1000
assert isinstance(test_result, _visatype.ViInt32)
def test_convert_timedeltas_to_seconds_real64():
time_values = [10.5, -1]
test_result = convert_timedeltas_to_seconds_real64(time_values)
assert all([actual.value == expected for actual, expected in zip(test_result, time_values)])
assert all([isinstance(i, _visatype.ViReal64) for i in test_result])
timedeltas = [datetime.timedelta(seconds=s, milliseconds=ms) for s, ms in zip([10, -1], [500, 0])]
test_result = convert_timedeltas_to_seconds_real64(timedeltas)
assert all([actual.value == expected for actual, expected in zip(test_result, time_values)])
assert all([isinstance(i, _visatype.ViReal64) for i in test_result])
def test_convert_seconds_real64_to_timedeltas():
time_values = [10.5, -1]
timedeltas = convert_seconds_real64_to_timedeltas(time_values)
assert all([actual.total_seconds() == expected for actual, expected in zip(timedeltas, time_values)])
# Tests - repeated capabilities
def test_repeated_capabilies_string_channel():
test_result_list = convert_repeated_capabilities('0')
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities('r0')
assert test_result_list == ['r0']
test_result_list = convert_repeated_capabilities('0,1')
assert test_result_list == ['0', '1']
def test_repeated_capabilies_string_prefix():
test_result_list = convert_repeated_capabilities('0', prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
def test_repeated_capabilies_list_channel():
test_result_list = convert_repeated_capabilities(['0'])
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities(['r0'])
assert test_result_list == ['r0']
test_result_list = convert_repeated_capabilities(['0', '1'])
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities([0, 1])
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities([0, 1, '3'])
assert test_result_list == ['0', '1', '3']
def test_repeated_capabilies_list_prefix():
test_result_list = convert_repeated_capabilities(['ScriptTrigger0', 'ScriptTrigger1'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(['0'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
test_result_list = convert_repeated_capabilities(['0', '1'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities([0, 1], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_tuple_channel():
test_result_list = convert_repeated_capabilities(('0'))
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities(('0,1'))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities(('0', '1'))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities((0, 1))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities((0, 1, '3'))
assert test_result_list == ['0', '1', '3']
def test_repeated_capabilies_tuple_prefix():
test_result_list = convert_repeated_capabilities(('ScriptTrigger0,ScriptTrigger1'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(('0'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
test_result_list = convert_repeated_capabilities(('0', '1'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities((0, 1), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_unicode():
test_result_list = convert_repeated_capabilities(u'ScriptTrigger0,ScriptTrigger1', prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(u'ScriptTrigger0,ScriptTrigger1', prefix=u'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities('ScriptTrigger0,ScriptTrigger1', prefix=u'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_raw():
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities('ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix=u'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(r'ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(u'ScriptTrigger0,ScriptTrigger1', prefix=r'ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_slice_channel():
test_result_list = convert_repeated_capabilities(slice(0, 1))
assert test_result_list == ['0']
test_result_list = convert_repeated_capabilities(slice(0, 2))
assert test_result_list == ['0', '1']
test_result_list = convert_repeated_capabilities(slice(None, 2))
assert test_result_list == ['0', '1']
def test_repeated_capabilies_mixed_channel():
test_result_list = convert_repeated_capabilities((slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'))
assert test_result_list == ['0', '2', '4', '5', '6', '7', '8', '9', '11', '12', '13', '14', '16', '17']
test_result_list = convert_repeated_capabilities([slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'])
assert test_result_list == ['0', '2', '4', '5', '6', '7', '8', '9', '11', '12', '13', '14', '16', '17']
def test_repeated_capabilies_mixed_prefix():
test_result_list = convert_repeated_capabilities((slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger2', 'ScriptTrigger4', 'ScriptTrigger5', 'ScriptTrigger6', 'ScriptTrigger7', 'ScriptTrigger8', 'ScriptTrigger9', 'ScriptTrigger11', 'ScriptTrigger12', 'ScriptTrigger13', 'ScriptTrigger14', 'ScriptTrigger16', 'ScriptTrigger17']
test_result_list = convert_repeated_capabilities([slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'], prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger2', 'ScriptTrigger4', 'ScriptTrigger5', 'ScriptTrigger6', 'ScriptTrigger7', 'ScriptTrigger8', 'ScriptTrigger9', 'ScriptTrigger11', 'ScriptTrigger12', 'ScriptTrigger13', 'ScriptTrigger14', 'ScriptTrigger16', 'ScriptTrigger17']
def test_invalid_repeated_capabilies():
try:
convert_repeated_capabilities('6-8-10')
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities(['5', '6-8-10'])
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities(('5', '6-8-10'))
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities('5,6-8-10')
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities(5.0)
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities([5.0, '0'])
assert False
except errors.InvalidRepeatedCapabilityError:
pass
try:
convert_repeated_capabilities((5.0, '0'))
assert False
except errors.InvalidRepeatedCapabilityError:
pass
def test_repeated_capabilies_slice_prefix():
test_result_list = convert_repeated_capabilities(slice(0, 1), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0']
test_result_list = convert_repeated_capabilities(slice(0, 2), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
test_result_list = convert_repeated_capabilities(slice(None, 2), prefix='ScriptTrigger')
assert test_result_list == ['ScriptTrigger0', 'ScriptTrigger1']
def test_repeated_capabilies_from_init():
test_result = convert_repeated_capabilities_from_init((slice(0, 1), '2', [4, '5-6'], '7-9', '11:14', '16, 17'))
assert test_result == '0,2,4,5,6,7,8,9,11,12,13,14,16,17'
def test_string_to_list_channel():
test_result = _convert_repeated_capabilities('r0', '')
assert test_result == ['r0']
test_result = _convert_repeated_capabilities(['0-2'], '')
assert test_result == ['0', '1', '2']
test_result = _convert_repeated_capabilities(['3:7'], '')
assert test_result == ['3', '4', '5', '6', '7']
test_result = _convert_repeated_capabilities(['2-0'], '')
assert test_result == ['2', '1', '0']
test_result = _convert_repeated_capabilities(['2:0'], '')
assert test_result == ['2', '1', '0']
def test_string_to_list_prefix():
test_result = _convert_repeated_capabilities(['ScriptTrigger3-ScriptTrigger7'], 'ScriptTrigger')
assert test_result == ['3', '4', '5', '6', '7']
test_result = _convert_repeated_capabilities(['ScriptTrigger3:ScriptTrigger7'], 'ScriptTrigger')
assert test_result == ['3', '4', '5', '6', '7']
test_result = _convert_repeated_capabilities(['ScriptTrigger2-ScriptTrigger0'], 'ScriptTrigger')
assert test_result == ['2', '1', '0']
test_result = _convert_repeated_capabilities(['ScriptTrigger2:ScriptTrigger0'], 'ScriptTrigger')
assert test_result == ['2', '1', '0']
| true | true |
f72bc1d5aefbd73272c909b63fc43b86465f5c78 | 29,314 | py | Python | tensor2tensor/layers/discretization.py | spacegoing/t2t_caps | ded708b738fa8966eb7544708c4a785479da4c3c | [
"Apache-2.0"
] | null | null | null | tensor2tensor/layers/discretization.py | spacegoing/t2t_caps | ded708b738fa8966eb7544708c4a785479da4c3c | [
"Apache-2.0"
] | null | null | null | tensor2tensor/layers/discretization.py | spacegoing/t2t_caps | ded708b738fa8966eb7544708c4a785479da4c3c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Discretization bottlenecks used to train discrete latent variables."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
# Dependency imports
from tensor2tensor.layers import common_layers
import tensorflow as tf
from tensorflow.python.training import moving_averages
def project_hidden(x, projection_tensors, hidden_size, num_blocks):
"""Project encoder hidden state into block_dim using projection tensors.
Args:
x: Encoder hidden state of shape [-1, hidden_size].
projection_tensors: Projection tensors used to project the hidden state.
hidden_size: Dimension of the latent space.
num_blocks: Number of blocks in DVQ.
Returns:
Projected states of shape [-1, num_blocks, block_dim].
"""
x = tf.reshape(x, shape=[1, -1, hidden_size])
x_tiled = tf.reshape(
tf.tile(x, multiples=[num_blocks, 1, 1]),
shape=[num_blocks, -1, hidden_size])
x_projected = tf.matmul(x_tiled, projection_tensors)
x_projected = tf.transpose(x_projected, perm=[1, 0, 2])
return x_projected
def slice_hidden(x, hidden_size, num_blocks):
"""Slice encoder hidden state into block_dim.
Args:
x: Encoder hidden state of shape [-1, hidden_size].
hidden_size: Dimension of the latent space.
num_blocks: Number of blocks in DVQ.
Returns:
Sliced states of shape [-1, num_blocks, block_dim].
"""
block_dim = int(hidden_size // num_blocks)
x_sliced = tf.reshape(x, shape=[-1, num_blocks, block_dim])
return x_sliced
def nearest_neighbor(x,
means,
block_v_size,
random_top_k=1,
soft_em=False,
num_samples=1):
"""Find the nearest element in means to elements in x.
Args:
x: Batch of encoder continuous latent states sliced/projected into shape
[-1, num_blocks, block_dim].
means: Embedding table of shpae [num_blocks, block_v_size, block_dim].
block_v_size: Number of table entries per block.
random_top_k: Noisy top-k if this is bigger than 1 (Default: 1).
soft_em: If True then use soft EM rather than hard EM (Default: False).
num_samples: Number of samples to take in soft EM (Default: 1).
Returns:
Tensor with nearest element in mean encoded in one-hot notation
and distances.
"""
x_norm_sq = tf.reduce_sum(tf.square(x), axis=-1, keep_dims=True)
means_norm_sq = tf.reduce_sum(tf.square(means), axis=-1, keep_dims=True)
scalar_prod = tf.matmul(
tf.transpose(x, perm=[1, 0, 2]), tf.transpose(means, perm=[0, 2, 1]))
scalar_prod = tf.transpose(scalar_prod, perm=[1, 0, 2])
dist = x_norm_sq + tf.transpose(
means_norm_sq, perm=[2, 0, 1]) - 2 * scalar_prod
# computing cluster probabilities
if soft_em:
num_blocks = common_layers.shape_list(dist)[1]
nearest_idx = tf.stack(
[
tf.multinomial(-dist[:, i, :], num_samples=num_samples)
for i in range(num_blocks)
],
axis=1)
nearest_hot = tf.one_hot(nearest_idx, depth=block_v_size)
nearest_hot = tf.reduce_mean(nearest_hot, axis=-2)
else:
if random_top_k > 1:
_, top_k_idx = tf.nn.top_k(-dist, k=random_top_k)
nearest_idx = tf.gather(
top_k_idx,
tf.random_uniform(
[1], minval=0, maxval=random_top_k - 1, dtype=tf.int32),
axis=-1)
else:
nearest_idx = tf.argmax(-dist, axis=-1)
nearest_hot = tf.one_hot(nearest_idx, block_v_size)
return nearest_hot
def embedding_lookup(x,
means,
num_blocks,
block_v_size,
random_top_k=1,
soft_em=False,
num_samples=1):
"""Compute nearest neighbors and loss for training the embeddings via DVQ.
Args:
x: Batch of encoder continuous latent states sliced/projected into shape
[-1, num_blocks, block_dim].
means: Embedding table of shape [num_blocks, block_v_size, block_dim].
num_blocks: Number of blocks in DVQ.
block_v_size: Number of table entries per block.
random_top_k: Noisy top-k if this is bigger than 1 (Default: 1).
soft_em: If True then use soft EM rather than hard EM (Default: False).
num_samples: Number of samples to use for soft EM (Default: 1).
Returns:
The nearest neighbor in one hot form, the nearest neighbor itself, the
commitment loss, embedding training loss and distances.
"""
x_means_hot = nearest_neighbor(
x,
means,
block_v_size,
random_top_k,
soft_em=soft_em,
num_samples=num_samples)
x_means_hot_flat = tf.reshape(x_means_hot, [-1, num_blocks, block_v_size])
x_means = tf.matmul(tf.transpose(x_means_hot_flat, perm=[1, 0, 2]), means)
x_means = tf.transpose(x_means, [1, 0, 2])
q_loss = tf.reduce_mean(tf.square((tf.stop_gradient(x) - x_means)))
e_loss = tf.reduce_mean(tf.square(x - tf.stop_gradient(x_means)))
return x_means_hot, x_means, q_loss, e_loss
def bit_to_int(x_bit, num_bits, base=2):
"""Turn x_bit representing numbers bitwise (lower-endian) to int tensor.
Args:
x_bit: Tensor containing numbers in a particular base to be converted to
int.
num_bits: Number of bits in the representation.
base: Base of the representation.
Returns:
Integer representation of this number.
"""
x_l = tf.stop_gradient(tf.to_int32(tf.reshape(x_bit, [-1, num_bits])))
x_labels = []
for i in range(num_bits):
x_labels.append(x_l[:, i] * tf.to_int32(base)**tf.to_int32(i))
res = sum(x_labels)
return tf.to_int32(tf.reshape(res, common_layers.shape_list(x_bit)[:-1]))
def int_to_bit(x_int, num_bits, base=2):
"""Turn x_int representing numbers into a bitwise (lower-endian) tensor.
Args:
x_int: Tensor containing integer to be converted into base notation.
num_bits: Number of bits in the representation.
base: Base of the representation.
Returns:
Corresponding number expressed in base.
"""
x_l = tf.to_int32(tf.expand_dims(x_int, axis=-1))
x_labels = []
for i in range(num_bits):
x_labels.append(
tf.floormod(
tf.floordiv(tf.to_int32(x_l),
tf.to_int32(base)**i), tf.to_int32(base)))
res = tf.concat(x_labels, axis=-1)
return tf.to_float(res)
def int_to_bit_embed(x_int, num_bits, embedding_size, base=2):
"""Turn x_int into a bitwise (lower-endian) tensor and embed densly."""
shape = common_layers.shape_list(x_int)
inputs = int_to_bit(x_int, num_bits, base=base)
inputs = tf.reshape(inputs, shape[:-1] + [shape[-1] * 8])
inputs = 2.0 * tf.to_float(inputs) - 1.0 # Move from 0/1 to -1/1.
return tf.layers.dense(inputs, embedding_size, name="int_to_bit_embed")
def embed(x,
hidden_size,
z_size,
filter_size,
name,
bottleneck_kind="dvq",
soft_em=False,
num_blocks=2,
num_residuals=1,
block_v_size=None,
means=None):
"""Embedding function that takes discrete latent and returns embedding.
Args:
x: Input to the discretization bottleneck.
hidden_size: Dimension of the latent state.
z_size: Number of bits used to produce discrete code; discrete codes range
from 1 to 2**z_size.
filter_size: Filter size to be used for the embedding function.
name: Name for the bottleneck scope.
bottleneck_kind: Kind of discretization bottleneck to use; one of dvq,
semhash, gumbel-softmax (Default: dvq).
soft_em: If True then it uses a multi-sample version of EM (Default: False).
num_blocks: Number of blocks in DVQ (Default: 2).
num_residuals: Number of residuals (Default: 1).
block_v_size: Number of embedding entries per block (Default: None).
means: The embedding table for dvq (Default: None).
Returns:
Continuous embedding to be passed on to the decoder.
Raises:
ValueError: For unknown or missing arguments.
"""
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
if bottleneck_kind == "semhash":
c = int_to_bit(x, z_size)
h1a = tf.layers.dense(c, filter_size, name="vch1a")
h1b = tf.layers.dense(1.0 - c, filter_size, name="vch1b")
h1 = h1a + h1b
elif bottleneck_kind == "gumbel-softmax":
hot = tf.one_hot(x, 2**z_size)
h1 = tf.layers.dense(hot, hidden_size, name="dae_dense")
elif bottleneck_kind == "dvq":
if block_v_size is None:
raise ValueError("Bottleneck kind is dvq but block_v_size is None.")
if soft_em:
assert num_residuals == 1
x_hot_flat = tf.reshape(x, shape=[-1, num_blocks, block_v_size])
h1 = tf.matmul(tf.transpose(x_hot_flat, perm=[1, 0, 2]), means[0])
h1 = tf.transpose(h1, perm=[1, 0, 2])
new_shape = common_layers.shape_list(x)
new_shape[-1] = hidden_size
h1 = tf.reshape(h1, shape=new_shape)
else:
shape_x = common_layers.shape_list(x)
x_flat = tf.reshape(x, [-1, 1])
c = int_to_bit(x_flat, num_bits=z_size, base=2)
shape = common_layers.shape_list(c)
new_shape = shape
new_shape[-1] = num_residuals
new_shape.append(num_blocks)
new_shape.append(int(z_size / (num_residuals * num_blocks)))
c = tf.to_int32(tf.reshape(c, shape=new_shape))
h1_shape = shape_x
h1_shape.append(hidden_size)
h1 = tf.zeros(dtype=tf.float32, shape=h1_shape)
for i in range(num_residuals):
c_residual = bit_to_int(
c[:, :, i, :, :],
num_bits=int(z_size / (num_residuals * num_blocks)),
base=2)
c_hot = tf.one_hot(c_residual, depth=block_v_size, axis=-1)
c_hot_flat = tf.reshape(c_hot, shape=[-1, num_blocks, block_v_size])
h1_residual = tf.matmul(
tf.transpose(c_hot_flat, perm=[1, 0, 2]), means[i])
h1_residual = tf.transpose(h1_residual, perm=[1, 0, 2])
h1_residual = tf.reshape(h1_residual, shape=h1_shape)
h1 += h1_residual
elif bottleneck_kind == "rounding":
h1 = x
else:
raise ValueError("Unknown bottleneck kind.")
h2 = tf.layers.dense(tf.nn.relu(h1), filter_size, name="vch2")
return tf.layers.dense(tf.nn.relu(h2), hidden_size, name="vcfin")
def vae(x, name, z_size):
"""Simple variational autoencoder without discretization.
Args:
x: Input to the discretization bottleneck.
name: Name for the bottleneck scope.
z_size: Number of bits used to produce discrete code; discrete codes range
from 1 to 2**z_size.
Returns:
Embedding function, latent, loss, mu and log_simga.
"""
with tf.variable_scope(name):
mu = tf.layers.dense(x, z_size, name="mu")
log_sigma = tf.layers.dense(x, z_size, name="log_sigma")
shape = common_layers.shape_list(x)
epsilon = tf.random_normal([shape[0], shape[1], 1, z_size])
z = mu + tf.exp(log_sigma / 2) * epsilon
kl = 0.5 * tf.reduce_mean(
tf.exp(log_sigma) + tf.square(mu) - 1. - log_sigma, axis=-1)
free_bits = z_size // 4
kl_loss = tf.reduce_mean(tf.maximum(kl - free_bits, 0.0))
return z, kl_loss, mu, log_sigma
def top_k_softmax(x, k):
"""Calculate softmax(x), select top-k and rescale to sum to 1.
Args:
x: Input to softmax over.
k: Number of top-k to select.
Returns:
softmax(x) and maximum item.
"""
x = tf.nn.softmax(x)
top_x, _ = tf.nn.top_k(x, k=k + 1)
min_top = tf.reduce_min(top_x, axis=-1, keep_dims=True)
x = tf.nn.relu((x - min_top) + 1e-12)
x /= tf.reduce_sum(x, axis=-1, keep_dims=True)
return x, tf.reduce_max(top_x, axis=-1)
def gumbel_sample(shape):
"""Sample from the Gumbel distribution, protect from overflows.
Args:
shape: Shape of Gumbel samples.
Returns:
Noise drawn from Gumbel distribution.
"""
uniform_samples = tf.random_uniform(shape, minval=0.00001, maxval=0.99998)
return -tf.log(-tf.log(uniform_samples))
def gumbel_softmax(x,
name,
z_size,
mode,
softmax_k=0,
kl_warmup_steps=150000,
summary=True):
"""Gumbel softmax discretization bottleneck.
Args:
x: Input to the discretization bottleneck.
name: Name for the bottleneck scope.
z_size: Number of bits used to produce discrete code; discrete codes range
from 1 to 2**z_size.
mode: Mode represents whether we are training or testing for bottlenecks
that differ in behavior (Default: None).
softmax_k: If > 1 then do top-k softmax (Default: 0).
kl_warmup_steps: Number of steps for kl warmup (Default: 150000).
summary: If True, then write summaries (Default: True).
Returns:
Embedding function, discrete code and loss.
"""
with tf.variable_scope(name):
m = tf.layers.dense(x, 2**z_size, name="mask")
if softmax_k > 0:
m, kl = top_k_softmax(m, softmax_k)
return m, m, 1.0 - tf.reduce_mean(kl)
logsm = tf.nn.log_softmax(m)
# Gumbel-softmax sample.
gumbel_samples = gumbel_sample(common_layers.shape_list(m))
steps = kl_warmup_steps
gumbel_samples *= common_layers.inverse_exp_decay(steps // 5) * 0.5
temperature = 1.2 - common_layers.inverse_lin_decay(steps)
# 10% of the time keep reasonably high temperature to keep learning.
temperature = tf.cond(
tf.less(tf.random_uniform([]), 0.9), lambda: temperature,
lambda: tf.random_uniform([], minval=0.5, maxval=1.0))
s = tf.nn.softmax((logsm + gumbel_samples) / temperature)
m = tf.nn.softmax(m)
kl = -tf.reduce_max(logsm, axis=-1)
if summary:
tf.summary.histogram("max-log", tf.reshape(kl, [-1]))
# Calculate the argmax and construct hot vectors.
maxvec = tf.reshape(tf.argmax(m, axis=-1), [-1])
maxvhot = tf.stop_gradient(tf.one_hot(maxvec, 2**z_size))
# Add losses that prevent too few being used.
distrib = tf.reshape(logsm, [-1, 2**z_size]) * maxvhot
d_mean = tf.reduce_mean(distrib, axis=[0], keep_dims=True)
d_variance = tf.reduce_mean(tf.square(distrib - d_mean), axis=[0])
d_dev = -tf.reduce_mean(d_variance)
ret = s
if mode != tf.contrib.learn.ModeKeys.TRAIN:
ret = tf.reshape(maxvhot, common_layers.shape_list(s)) # Just hot @eval.
return m, ret, d_dev * 5.0 + tf.reduce_mean(kl) * 0.002
def discrete_bottleneck(x,
hidden_size,
z_size,
filter_size,
name,
mode=None,
startup_steps=50000,
bottleneck_kind="dvq",
num_blocks=2,
num_residuals=1,
reshape_method="slice",
projection_tensors=None,
means=None,
beta=0.25,
noise_dev=1.,
decay=0.999,
discrete_mix=0.5,
random_top_k=1,
soft_em=False,
num_samples=1,
epsilon=1e-5,
softmax_k=0,
kl_warmup_steps=150000,
ema=True,
ema_count=None,
ema_means=None,
summary=True):
"""Discretization bottleneck for latent variables.
Args:
x: Input to the discretization bottleneck.
hidden_size: Dimension of the latent state.
z_size: Number of bits used to produce discrete code; discrete codes range
from 1 to 2**z_size.
filter_size: Filter size to be used for the embedding function.
name: Name for the bottleneck scope.
mode: Mode represents whether we are training or testing for bottlenecks
that differ in behavior (Default: None).
startup_steps: Number of steps after which latent predictor is trained
(Default: 50000).
bottleneck_kind: Kind of discretization bottleneck to use; one of dvq,
semhash, gumbel-softmax (Default: dvq).
num_blocks: Number of blocks to use for decomposed vector
quantization (Default: 2).
num_residuals: Number of residual units used to compute nearest
neighbors (Default: 1).
reshape_method: Method to reshape for DVQ (Default: slice).
projection_tensors: If the reshape method is project, then these are the
tensors used to project (Default: None).
means: The embedding table for dvq (Default: None).
beta: Beta factor for the DVQ loss (Default: 0.25).
noise_dev: Stddev for noise added for semhash (Default: 0).
decay: Decay factor for the exponential moving average (Default: 0.999).
discrete_mix: Factor for mixing discrete and non-discrete input for semhash
(Default: 0.5).
random_top_k: Noisy top-k for DVQ (Default: 1).
soft_em: If True then use soft EM rather than hard EM (Default: False).
num_samples: Number of samples for soft EM (Default: 1).
epsilon: Epsilon parameter for DVQ (Default: 1e-5).
softmax_k: If > 1 then do top-k softmax (Default: 0).
kl_warmup_steps: Number of steps for kl warmup (Default: 150000).
ema: If True update embeddings using exponential moving averages (Default:
True).
ema_count: Table of counts for each embedding corresponding to how many
examples in a batch it was the closest to (Default: None).
ema_means: Exponentially averaged version of the embeddings (Default: None).
summary: If True, then write summaries (Default: True).
Returns:
Embedding to pass to the decoder, discrete latent, loss, and the embedding
function.
Raises:
ValueError: If projection_tensors is None for reshape_method project, or
ema_count or ema_means is None if we are using ema, or unknown args.
"""
block_v_size = None
if bottleneck_kind == "dvq":
# Define the dvq parameters
assert means is not None
# Check block dimensions add up
if hidden_size % num_blocks != 0:
raise ValueError("num_blocks does not divide hidden size")
if z_size % num_residuals != 0:
raise ValueError("num_residuals does not divide embedding table size")
z_size_per_residual = int(z_size / num_residuals)
if z_size_per_residual % num_blocks != 0:
raise ValueError("num_blocks does not divide embedding table size")
block_v_size = 2**(z_size_per_residual / num_blocks)
block_v_size = int(block_v_size)
# Set the reshape method corresponding to projections or slices
if reshape_method == "slice":
reshape_fn = partial(
slice_hidden, hidden_size=hidden_size, num_blocks=num_blocks)
elif reshape_method == "project":
if projection_tensors is None:
raise ValueError(
"Projection tensors is None for reshape_method project")
reshape_fn = partial(
project_hidden,
projection_tensors=projection_tensors,
hidden_size=hidden_size,
num_blocks=num_blocks)
else:
raise ValueError("Unknown reshape_method")
# Check if the ema settings make sense
if ema:
if ema_count is None:
raise ValueError("ema_count is None but ema is True")
if ema_means is None:
raise ValueError("ema_means is None but ema is True")
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
l = tf.constant(0.0)
if bottleneck_kind == "dense":
c = tf.layers.dense(x, z_size, name="vcc")
h1 = tf.layers.dense(c, filter_size, name="vch1")
elif bottleneck_kind == "vae":
c, l, _, _ = vae(x, z_size, "vae")
h1 = tf.layers.dense(c, filter_size, name="vch1")
elif bottleneck_kind == "semhash":
c = tf.layers.dense(x, z_size, name="vcc")
y_clean = common_layers.saturating_sigmoid(c)
if summary:
tf.summary.histogram("y_clean", tf.reshape(y_clean, [-1]))
if noise_dev > 0 and mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.truncated_normal(
common_layers.shape_list(c), mean=0.0, stddev=noise_dev)
y = common_layers.saturating_sigmoid(c + noise)
else:
y = y_clean
d = tf.to_float(tf.less(0.5, y))
y_discrete = tf.stop_gradient(d) + y - tf.stop_gradient(y)
pd = common_layers.inverse_exp_decay(startup_steps * 2)
pd *= discrete_mix
pd = pd if mode == tf.estimator.ModeKeys.TRAIN else 1.0
c = tf.where(
tf.less(tf.random_uniform([common_layers.shape_list(y)[0]]), pd),
y_discrete, y)
h1a = tf.layers.dense(c, filter_size, name="vch1a")
h1b = tf.layers.dense(1.0 - c, filter_size, name="vch1b")
h1 = h1a + h1b
dx = tf.to_int32(tf.stop_gradient(d))
c = bit_to_int(dx, z_size)
elif bottleneck_kind == "gumbel-softmax":
_, hot, l = gumbel_softmax(x, name, z_size, mode, softmax_k,
kl_warmup_steps, summary)
c = tf.argmax(hot, axis=-1)
h1 = tf.layers.dense(hot, hidden_size, name="dae_dense")
elif bottleneck_kind == "dvq":
x_reshaped = reshape_fn(x)
x_res = x_reshaped
x_means_hot = []
x_means = 0
l = 0
for i in range(num_residuals):
x_means_hot_res, x_means_res, q_loss_res, e_loss_res = embedding_lookup(
x_res, means[i], num_blocks, block_v_size, random_top_k, soft_em,
num_samples)
# Update the ema variables
if ema:
tf.logging.info("Using EMA with beta = {}".format(beta))
updated_ema_count_res = moving_averages.assign_moving_average(
ema_count[i],
tf.reduce_sum(
tf.reshape(
x_means_hot_res, shape=[-1, num_blocks, block_v_size]),
axis=0),
decay,
zero_debias=False)
dw = tf.matmul(
tf.transpose(x_means_hot_res, perm=[1, 2, 0]),
tf.transpose(x_res, perm=[1, 0, 2]))
updated_ema_means_res = moving_averages.assign_moving_average(
ema_means[i], dw, decay, zero_debias=False)
n = tf.reduce_sum(updated_ema_count_res, axis=-1, keep_dims=True)
updated_ema_count_res = ((updated_ema_count_res + epsilon) /
(n + 2**z_size * epsilon) * n)
# pylint: disable=g-no-augmented-assignment
updated_ema_means_res = updated_ema_means_res / tf.expand_dims(
updated_ema_count_res, axis=-1)
# pylint: enable=g-no-augmented-assignment
with tf.control_dependencies([e_loss_res]):
update_means_res = tf.assign(means[i], updated_ema_means_res)
with tf.control_dependencies([update_means_res]):
l += beta * e_loss_res
else:
l += q_loss_res + beta * e_loss_res
# Update the residuals
x_res -= x_means_res
x_means += x_means_res
x_means_hot.append(x_means_hot_res)
# Get the discrete latent representation
x_means_hot = tf.stack(x_means_hot, axis=1)
x_means_idx = tf.argmax(x_means_hot, axis=-1)
# Get the binary representation
x_means_bits = int_to_bit(
x_means_idx,
num_bits=int(z_size / (num_residuals * num_blocks)),
base=2)
shape = common_layers.shape_list(x_means_bits)
new_shape = shape[:-2]
new_shape[-1] = z_size
x_means_bits = tf.reshape(x_means_bits, shape=new_shape)
c = bit_to_int(tf.to_int32(x_means_bits), num_bits=z_size, base=2)
# Adjust shape of c
shape_x = common_layers.shape_list(x)
new_shape = shape_x[:-1]
c = tf.reshape(c, new_shape)
# If we are doing soft EM then c is x_means_hot
if soft_em:
c = x_means_hot
new_shape.append(block_v_size)
c = tf.reshape(c, new_shape)
x_means = tf.reshape(x_means, shape_x)
x_reshaped = tf.reshape(x_reshaped, shape_x)
h1 = x_reshaped + tf.stop_gradient(x_means - x_reshaped)
else:
raise ValueError("Unknown discretization method.")
h2 = tf.layers.dense(tf.nn.relu(h1), filter_size, name="vch2")
res = tf.layers.dense(tf.nn.relu(h2), hidden_size, name="vcfin")
embed_fn = partial(
embed,
hidden_size=hidden_size,
z_size=z_size,
filter_size=filter_size,
name=name,
bottleneck_kind=bottleneck_kind,
soft_em=soft_em,
num_blocks=num_blocks,
num_residuals=num_residuals,
block_v_size=block_v_size,
means=means)
return res, c, l, embed_fn
# New API for discretization bottlenecks:
# * Each method is separate and provides 2 functions:
# * The [method]_bottleneck function returns discretized state.
# * The [method]_unbottleneck function moves from discretized state to dense.
def tanh_discrete_bottleneck(x, bottleneck_size, bottleneck_noise,
discretize_warmup_steps, mode):
"""Simple discretization through tanh, flip bottleneck_noise many bits."""
x = tf.tanh(tf.layers.dense(x, bottleneck_size,
name="tanh_discrete_bottleneck"))
d = x + tf.stop_gradient(2.0 * tf.to_float(tf.less(0.0, x)) - 1.0 - x)
if mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.random_uniform(common_layers.shape_list(x))
noise = 2.0 * tf.to_float(tf.less(bottleneck_noise, noise)) - 1.0
d *= noise
d = common_layers.mix(d, x, discretize_warmup_steps,
mode == tf.estimator.ModeKeys.TRAIN)
return d
def tanh_discrete_unbottleneck(x, hidden_size):
"""Simple un-discretization from tanh."""
x = tf.layers.dense(x, hidden_size, name="tanh_discrete_unbottleneck")
return x
def isemhash_bottleneck(x, bottleneck_size, bottleneck_noise,
discretize_warmup_steps, mode,
isemhash_noise_dev=0.5, isemhash_mix_prob=0.5):
"""Improved semantic hashing bottleneck."""
with tf.variable_scope("isemhash_bottleneck"):
x = tf.layers.dense(x, bottleneck_size, name="dense")
y = common_layers.saturating_sigmoid(x)
if isemhash_noise_dev > 0 and mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.truncated_normal(
common_layers.shape_list(x), mean=0.0, stddev=isemhash_noise_dev)
y = common_layers.saturating_sigmoid(x + noise)
d = tf.to_float(tf.less(0.5, y)) + y - tf.stop_gradient(y)
d = 2.0 * d - 1.0 # Move from [0, 1] to [-1, 1].
if mode == tf.estimator.ModeKeys.TRAIN: # Flip some bits.
noise = tf.random_uniform(common_layers.shape_list(x))
noise = 2.0 * tf.to_float(tf.less(bottleneck_noise, noise)) - 1.0
d *= noise
d = common_layers.mix(d, 2.0 * y - 1.0, discretize_warmup_steps,
mode == tf.estimator.ModeKeys.TRAIN,
max_prob=isemhash_mix_prob)
return d
def isemhash_unbottleneck(x, hidden_size, isemhash_filter_size_multiplier=1.0):
"""Improved semantic hashing un-bottleneck."""
filter_size = int(hidden_size * isemhash_filter_size_multiplier)
x = 0.5 * (x - 1.0) # Move from [-1, 1] to [0, 1].
with tf.variable_scope("isemhash_unbottleneck"):
h1a = tf.layers.dense(x, filter_size, name="hidden1a")
h1b = tf.layers.dense(1.0 - x, filter_size, name="hidden1b")
h2 = tf.layers.dense(tf.nn.relu(h1a + h1b), filter_size, name="hidden2")
return tf.layers.dense(tf.nn.relu(h2), hidden_size, name="final")
def parametrized_bottleneck(x, hparams):
"""Meta-function calling all the above bottlenecks with hparams."""
if hparams.bottleneck_kind == "tanh_discrete":
return tanh_discrete_bottleneck(
x, hparams.bottleneck_size, hparams.bottleneck_noise * 0.5,
hparams.discretize_warmup_steps, hparams.mode)
if hparams.bottleneck_kind == "isemhash":
return isemhash_bottleneck(
x, hparams.bottleneck_size, hparams.bottleneck_noise * 0.5,
hparams.discretize_warmup_steps, hparams.mode,
hparams.isemhash_noise_dev, hparams.isemhash_mix_prob)
raise ValueError("Unsupported hparams.bottleneck_kind %s"
% hparams.bottleneck_kind)
def parametrized_unbottleneck(x, hidden_size, hparams):
"""Meta-function calling all the above un-bottlenecks with hparams."""
if hparams.bottleneck_kind == "tanh_discrete":
return tanh_discrete_unbottleneck(x, hidden_size)
if hparams.bottleneck_kind == "isemhash":
return isemhash_unbottleneck(
x, hidden_size, hparams.isemhash_filter_size_multiplier)
raise ValueError("Unsupported hparams.bottleneck_kind %s"
% hparams.bottleneck_kind)
| 38.877984 | 80 | 0.651395 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from functools import partial
from tensor2tensor.layers import common_layers
import tensorflow as tf
from tensorflow.python.training import moving_averages
def project_hidden(x, projection_tensors, hidden_size, num_blocks):
x = tf.reshape(x, shape=[1, -1, hidden_size])
x_tiled = tf.reshape(
tf.tile(x, multiples=[num_blocks, 1, 1]),
shape=[num_blocks, -1, hidden_size])
x_projected = tf.matmul(x_tiled, projection_tensors)
x_projected = tf.transpose(x_projected, perm=[1, 0, 2])
return x_projected
def slice_hidden(x, hidden_size, num_blocks):
block_dim = int(hidden_size // num_blocks)
x_sliced = tf.reshape(x, shape=[-1, num_blocks, block_dim])
return x_sliced
def nearest_neighbor(x,
means,
block_v_size,
random_top_k=1,
soft_em=False,
num_samples=1):
x_norm_sq = tf.reduce_sum(tf.square(x), axis=-1, keep_dims=True)
means_norm_sq = tf.reduce_sum(tf.square(means), axis=-1, keep_dims=True)
scalar_prod = tf.matmul(
tf.transpose(x, perm=[1, 0, 2]), tf.transpose(means, perm=[0, 2, 1]))
scalar_prod = tf.transpose(scalar_prod, perm=[1, 0, 2])
dist = x_norm_sq + tf.transpose(
means_norm_sq, perm=[2, 0, 1]) - 2 * scalar_prod
if soft_em:
num_blocks = common_layers.shape_list(dist)[1]
nearest_idx = tf.stack(
[
tf.multinomial(-dist[:, i, :], num_samples=num_samples)
for i in range(num_blocks)
],
axis=1)
nearest_hot = tf.one_hot(nearest_idx, depth=block_v_size)
nearest_hot = tf.reduce_mean(nearest_hot, axis=-2)
else:
if random_top_k > 1:
_, top_k_idx = tf.nn.top_k(-dist, k=random_top_k)
nearest_idx = tf.gather(
top_k_idx,
tf.random_uniform(
[1], minval=0, maxval=random_top_k - 1, dtype=tf.int32),
axis=-1)
else:
nearest_idx = tf.argmax(-dist, axis=-1)
nearest_hot = tf.one_hot(nearest_idx, block_v_size)
return nearest_hot
def embedding_lookup(x,
means,
num_blocks,
block_v_size,
random_top_k=1,
soft_em=False,
num_samples=1):
x_means_hot = nearest_neighbor(
x,
means,
block_v_size,
random_top_k,
soft_em=soft_em,
num_samples=num_samples)
x_means_hot_flat = tf.reshape(x_means_hot, [-1, num_blocks, block_v_size])
x_means = tf.matmul(tf.transpose(x_means_hot_flat, perm=[1, 0, 2]), means)
x_means = tf.transpose(x_means, [1, 0, 2])
q_loss = tf.reduce_mean(tf.square((tf.stop_gradient(x) - x_means)))
e_loss = tf.reduce_mean(tf.square(x - tf.stop_gradient(x_means)))
return x_means_hot, x_means, q_loss, e_loss
def bit_to_int(x_bit, num_bits, base=2):
x_l = tf.stop_gradient(tf.to_int32(tf.reshape(x_bit, [-1, num_bits])))
x_labels = []
for i in range(num_bits):
x_labels.append(x_l[:, i] * tf.to_int32(base)**tf.to_int32(i))
res = sum(x_labels)
return tf.to_int32(tf.reshape(res, common_layers.shape_list(x_bit)[:-1]))
def int_to_bit(x_int, num_bits, base=2):
x_l = tf.to_int32(tf.expand_dims(x_int, axis=-1))
x_labels = []
for i in range(num_bits):
x_labels.append(
tf.floormod(
tf.floordiv(tf.to_int32(x_l),
tf.to_int32(base)**i), tf.to_int32(base)))
res = tf.concat(x_labels, axis=-1)
return tf.to_float(res)
def int_to_bit_embed(x_int, num_bits, embedding_size, base=2):
shape = common_layers.shape_list(x_int)
inputs = int_to_bit(x_int, num_bits, base=base)
inputs = tf.reshape(inputs, shape[:-1] + [shape[-1] * 8])
inputs = 2.0 * tf.to_float(inputs) - 1.0
return tf.layers.dense(inputs, embedding_size, name="int_to_bit_embed")
def embed(x,
hidden_size,
z_size,
filter_size,
name,
bottleneck_kind="dvq",
soft_em=False,
num_blocks=2,
num_residuals=1,
block_v_size=None,
means=None):
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
if bottleneck_kind == "semhash":
c = int_to_bit(x, z_size)
h1a = tf.layers.dense(c, filter_size, name="vch1a")
h1b = tf.layers.dense(1.0 - c, filter_size, name="vch1b")
h1 = h1a + h1b
elif bottleneck_kind == "gumbel-softmax":
hot = tf.one_hot(x, 2**z_size)
h1 = tf.layers.dense(hot, hidden_size, name="dae_dense")
elif bottleneck_kind == "dvq":
if block_v_size is None:
raise ValueError("Bottleneck kind is dvq but block_v_size is None.")
if soft_em:
assert num_residuals == 1
x_hot_flat = tf.reshape(x, shape=[-1, num_blocks, block_v_size])
h1 = tf.matmul(tf.transpose(x_hot_flat, perm=[1, 0, 2]), means[0])
h1 = tf.transpose(h1, perm=[1, 0, 2])
new_shape = common_layers.shape_list(x)
new_shape[-1] = hidden_size
h1 = tf.reshape(h1, shape=new_shape)
else:
shape_x = common_layers.shape_list(x)
x_flat = tf.reshape(x, [-1, 1])
c = int_to_bit(x_flat, num_bits=z_size, base=2)
shape = common_layers.shape_list(c)
new_shape = shape
new_shape[-1] = num_residuals
new_shape.append(num_blocks)
new_shape.append(int(z_size / (num_residuals * num_blocks)))
c = tf.to_int32(tf.reshape(c, shape=new_shape))
h1_shape = shape_x
h1_shape.append(hidden_size)
h1 = tf.zeros(dtype=tf.float32, shape=h1_shape)
for i in range(num_residuals):
c_residual = bit_to_int(
c[:, :, i, :, :],
num_bits=int(z_size / (num_residuals * num_blocks)),
base=2)
c_hot = tf.one_hot(c_residual, depth=block_v_size, axis=-1)
c_hot_flat = tf.reshape(c_hot, shape=[-1, num_blocks, block_v_size])
h1_residual = tf.matmul(
tf.transpose(c_hot_flat, perm=[1, 0, 2]), means[i])
h1_residual = tf.transpose(h1_residual, perm=[1, 0, 2])
h1_residual = tf.reshape(h1_residual, shape=h1_shape)
h1 += h1_residual
elif bottleneck_kind == "rounding":
h1 = x
else:
raise ValueError("Unknown bottleneck kind.")
h2 = tf.layers.dense(tf.nn.relu(h1), filter_size, name="vch2")
return tf.layers.dense(tf.nn.relu(h2), hidden_size, name="vcfin")
def vae(x, name, z_size):
with tf.variable_scope(name):
mu = tf.layers.dense(x, z_size, name="mu")
log_sigma = tf.layers.dense(x, z_size, name="log_sigma")
shape = common_layers.shape_list(x)
epsilon = tf.random_normal([shape[0], shape[1], 1, z_size])
z = mu + tf.exp(log_sigma / 2) * epsilon
kl = 0.5 * tf.reduce_mean(
tf.exp(log_sigma) + tf.square(mu) - 1. - log_sigma, axis=-1)
free_bits = z_size // 4
kl_loss = tf.reduce_mean(tf.maximum(kl - free_bits, 0.0))
return z, kl_loss, mu, log_sigma
def top_k_softmax(x, k):
x = tf.nn.softmax(x)
top_x, _ = tf.nn.top_k(x, k=k + 1)
min_top = tf.reduce_min(top_x, axis=-1, keep_dims=True)
x = tf.nn.relu((x - min_top) + 1e-12)
x /= tf.reduce_sum(x, axis=-1, keep_dims=True)
return x, tf.reduce_max(top_x, axis=-1)
def gumbel_sample(shape):
uniform_samples = tf.random_uniform(shape, minval=0.00001, maxval=0.99998)
return -tf.log(-tf.log(uniform_samples))
def gumbel_softmax(x,
name,
z_size,
mode,
softmax_k=0,
kl_warmup_steps=150000,
summary=True):
with tf.variable_scope(name):
m = tf.layers.dense(x, 2**z_size, name="mask")
if softmax_k > 0:
m, kl = top_k_softmax(m, softmax_k)
return m, m, 1.0 - tf.reduce_mean(kl)
logsm = tf.nn.log_softmax(m)
gumbel_samples = gumbel_sample(common_layers.shape_list(m))
steps = kl_warmup_steps
gumbel_samples *= common_layers.inverse_exp_decay(steps // 5) * 0.5
temperature = 1.2 - common_layers.inverse_lin_decay(steps)
temperature = tf.cond(
tf.less(tf.random_uniform([]), 0.9), lambda: temperature,
lambda: tf.random_uniform([], minval=0.5, maxval=1.0))
s = tf.nn.softmax((logsm + gumbel_samples) / temperature)
m = tf.nn.softmax(m)
kl = -tf.reduce_max(logsm, axis=-1)
if summary:
tf.summary.histogram("max-log", tf.reshape(kl, [-1]))
maxvec = tf.reshape(tf.argmax(m, axis=-1), [-1])
maxvhot = tf.stop_gradient(tf.one_hot(maxvec, 2**z_size))
distrib = tf.reshape(logsm, [-1, 2**z_size]) * maxvhot
d_mean = tf.reduce_mean(distrib, axis=[0], keep_dims=True)
d_variance = tf.reduce_mean(tf.square(distrib - d_mean), axis=[0])
d_dev = -tf.reduce_mean(d_variance)
ret = s
if mode != tf.contrib.learn.ModeKeys.TRAIN:
ret = tf.reshape(maxvhot, common_layers.shape_list(s))
return m, ret, d_dev * 5.0 + tf.reduce_mean(kl) * 0.002
def discrete_bottleneck(x,
hidden_size,
z_size,
filter_size,
name,
mode=None,
startup_steps=50000,
bottleneck_kind="dvq",
num_blocks=2,
num_residuals=1,
reshape_method="slice",
projection_tensors=None,
means=None,
beta=0.25,
noise_dev=1.,
decay=0.999,
discrete_mix=0.5,
random_top_k=1,
soft_em=False,
num_samples=1,
epsilon=1e-5,
softmax_k=0,
kl_warmup_steps=150000,
ema=True,
ema_count=None,
ema_means=None,
summary=True):
block_v_size = None
if bottleneck_kind == "dvq":
assert means is not None
if hidden_size % num_blocks != 0:
raise ValueError("num_blocks does not divide hidden size")
if z_size % num_residuals != 0:
raise ValueError("num_residuals does not divide embedding table size")
z_size_per_residual = int(z_size / num_residuals)
if z_size_per_residual % num_blocks != 0:
raise ValueError("num_blocks does not divide embedding table size")
block_v_size = 2**(z_size_per_residual / num_blocks)
block_v_size = int(block_v_size)
if reshape_method == "slice":
reshape_fn = partial(
slice_hidden, hidden_size=hidden_size, num_blocks=num_blocks)
elif reshape_method == "project":
if projection_tensors is None:
raise ValueError(
"Projection tensors is None for reshape_method project")
reshape_fn = partial(
project_hidden,
projection_tensors=projection_tensors,
hidden_size=hidden_size,
num_blocks=num_blocks)
else:
raise ValueError("Unknown reshape_method")
if ema:
if ema_count is None:
raise ValueError("ema_count is None but ema is True")
if ema_means is None:
raise ValueError("ema_means is None but ema is True")
with tf.variable_scope(name, reuse=tf.AUTO_REUSE):
l = tf.constant(0.0)
if bottleneck_kind == "dense":
c = tf.layers.dense(x, z_size, name="vcc")
h1 = tf.layers.dense(c, filter_size, name="vch1")
elif bottleneck_kind == "vae":
c, l, _, _ = vae(x, z_size, "vae")
h1 = tf.layers.dense(c, filter_size, name="vch1")
elif bottleneck_kind == "semhash":
c = tf.layers.dense(x, z_size, name="vcc")
y_clean = common_layers.saturating_sigmoid(c)
if summary:
tf.summary.histogram("y_clean", tf.reshape(y_clean, [-1]))
if noise_dev > 0 and mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.truncated_normal(
common_layers.shape_list(c), mean=0.0, stddev=noise_dev)
y = common_layers.saturating_sigmoid(c + noise)
else:
y = y_clean
d = tf.to_float(tf.less(0.5, y))
y_discrete = tf.stop_gradient(d) + y - tf.stop_gradient(y)
pd = common_layers.inverse_exp_decay(startup_steps * 2)
pd *= discrete_mix
pd = pd if mode == tf.estimator.ModeKeys.TRAIN else 1.0
c = tf.where(
tf.less(tf.random_uniform([common_layers.shape_list(y)[0]]), pd),
y_discrete, y)
h1a = tf.layers.dense(c, filter_size, name="vch1a")
h1b = tf.layers.dense(1.0 - c, filter_size, name="vch1b")
h1 = h1a + h1b
dx = tf.to_int32(tf.stop_gradient(d))
c = bit_to_int(dx, z_size)
elif bottleneck_kind == "gumbel-softmax":
_, hot, l = gumbel_softmax(x, name, z_size, mode, softmax_k,
kl_warmup_steps, summary)
c = tf.argmax(hot, axis=-1)
h1 = tf.layers.dense(hot, hidden_size, name="dae_dense")
elif bottleneck_kind == "dvq":
x_reshaped = reshape_fn(x)
x_res = x_reshaped
x_means_hot = []
x_means = 0
l = 0
for i in range(num_residuals):
x_means_hot_res, x_means_res, q_loss_res, e_loss_res = embedding_lookup(
x_res, means[i], num_blocks, block_v_size, random_top_k, soft_em,
num_samples)
if ema:
tf.logging.info("Using EMA with beta = {}".format(beta))
updated_ema_count_res = moving_averages.assign_moving_average(
ema_count[i],
tf.reduce_sum(
tf.reshape(
x_means_hot_res, shape=[-1, num_blocks, block_v_size]),
axis=0),
decay,
zero_debias=False)
dw = tf.matmul(
tf.transpose(x_means_hot_res, perm=[1, 2, 0]),
tf.transpose(x_res, perm=[1, 0, 2]))
updated_ema_means_res = moving_averages.assign_moving_average(
ema_means[i], dw, decay, zero_debias=False)
n = tf.reduce_sum(updated_ema_count_res, axis=-1, keep_dims=True)
updated_ema_count_res = ((updated_ema_count_res + epsilon) /
(n + 2**z_size * epsilon) * n)
updated_ema_means_res = updated_ema_means_res / tf.expand_dims(
updated_ema_count_res, axis=-1)
with tf.control_dependencies([e_loss_res]):
update_means_res = tf.assign(means[i], updated_ema_means_res)
with tf.control_dependencies([update_means_res]):
l += beta * e_loss_res
else:
l += q_loss_res + beta * e_loss_res
x_res -= x_means_res
x_means += x_means_res
x_means_hot.append(x_means_hot_res)
x_means_hot = tf.stack(x_means_hot, axis=1)
x_means_idx = tf.argmax(x_means_hot, axis=-1)
x_means_bits = int_to_bit(
x_means_idx,
num_bits=int(z_size / (num_residuals * num_blocks)),
base=2)
shape = common_layers.shape_list(x_means_bits)
new_shape = shape[:-2]
new_shape[-1] = z_size
x_means_bits = tf.reshape(x_means_bits, shape=new_shape)
c = bit_to_int(tf.to_int32(x_means_bits), num_bits=z_size, base=2)
shape_x = common_layers.shape_list(x)
new_shape = shape_x[:-1]
c = tf.reshape(c, new_shape)
if soft_em:
c = x_means_hot
new_shape.append(block_v_size)
c = tf.reshape(c, new_shape)
x_means = tf.reshape(x_means, shape_x)
x_reshaped = tf.reshape(x_reshaped, shape_x)
h1 = x_reshaped + tf.stop_gradient(x_means - x_reshaped)
else:
raise ValueError("Unknown discretization method.")
h2 = tf.layers.dense(tf.nn.relu(h1), filter_size, name="vch2")
res = tf.layers.dense(tf.nn.relu(h2), hidden_size, name="vcfin")
embed_fn = partial(
embed,
hidden_size=hidden_size,
z_size=z_size,
filter_size=filter_size,
name=name,
bottleneck_kind=bottleneck_kind,
soft_em=soft_em,
num_blocks=num_blocks,
num_residuals=num_residuals,
block_v_size=block_v_size,
means=means)
return res, c, l, embed_fn
def tanh_discrete_bottleneck(x, bottleneck_size, bottleneck_noise,
discretize_warmup_steps, mode):
x = tf.tanh(tf.layers.dense(x, bottleneck_size,
name="tanh_discrete_bottleneck"))
d = x + tf.stop_gradient(2.0 * tf.to_float(tf.less(0.0, x)) - 1.0 - x)
if mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.random_uniform(common_layers.shape_list(x))
noise = 2.0 * tf.to_float(tf.less(bottleneck_noise, noise)) - 1.0
d *= noise
d = common_layers.mix(d, x, discretize_warmup_steps,
mode == tf.estimator.ModeKeys.TRAIN)
return d
def tanh_discrete_unbottleneck(x, hidden_size):
x = tf.layers.dense(x, hidden_size, name="tanh_discrete_unbottleneck")
return x
def isemhash_bottleneck(x, bottleneck_size, bottleneck_noise,
discretize_warmup_steps, mode,
isemhash_noise_dev=0.5, isemhash_mix_prob=0.5):
with tf.variable_scope("isemhash_bottleneck"):
x = tf.layers.dense(x, bottleneck_size, name="dense")
y = common_layers.saturating_sigmoid(x)
if isemhash_noise_dev > 0 and mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.truncated_normal(
common_layers.shape_list(x), mean=0.0, stddev=isemhash_noise_dev)
y = common_layers.saturating_sigmoid(x + noise)
d = tf.to_float(tf.less(0.5, y)) + y - tf.stop_gradient(y)
d = 2.0 * d - 1.0
if mode == tf.estimator.ModeKeys.TRAIN:
noise = tf.random_uniform(common_layers.shape_list(x))
noise = 2.0 * tf.to_float(tf.less(bottleneck_noise, noise)) - 1.0
d *= noise
d = common_layers.mix(d, 2.0 * y - 1.0, discretize_warmup_steps,
mode == tf.estimator.ModeKeys.TRAIN,
max_prob=isemhash_mix_prob)
return d
def isemhash_unbottleneck(x, hidden_size, isemhash_filter_size_multiplier=1.0):
filter_size = int(hidden_size * isemhash_filter_size_multiplier)
x = 0.5 * (x - 1.0)
with tf.variable_scope("isemhash_unbottleneck"):
h1a = tf.layers.dense(x, filter_size, name="hidden1a")
h1b = tf.layers.dense(1.0 - x, filter_size, name="hidden1b")
h2 = tf.layers.dense(tf.nn.relu(h1a + h1b), filter_size, name="hidden2")
return tf.layers.dense(tf.nn.relu(h2), hidden_size, name="final")
def parametrized_bottleneck(x, hparams):
if hparams.bottleneck_kind == "tanh_discrete":
return tanh_discrete_bottleneck(
x, hparams.bottleneck_size, hparams.bottleneck_noise * 0.5,
hparams.discretize_warmup_steps, hparams.mode)
if hparams.bottleneck_kind == "isemhash":
return isemhash_bottleneck(
x, hparams.bottleneck_size, hparams.bottleneck_noise * 0.5,
hparams.discretize_warmup_steps, hparams.mode,
hparams.isemhash_noise_dev, hparams.isemhash_mix_prob)
raise ValueError("Unsupported hparams.bottleneck_kind %s"
% hparams.bottleneck_kind)
def parametrized_unbottleneck(x, hidden_size, hparams):
if hparams.bottleneck_kind == "tanh_discrete":
return tanh_discrete_unbottleneck(x, hidden_size)
if hparams.bottleneck_kind == "isemhash":
return isemhash_unbottleneck(
x, hidden_size, hparams.isemhash_filter_size_multiplier)
raise ValueError("Unsupported hparams.bottleneck_kind %s"
% hparams.bottleneck_kind)
| true | true |
f72bc2026dd2bf64e09da4b3d2a51aebf0adea83 | 5,122 | py | Python | perfect_information_game/tablebases/symmetry_transform.py | amaarquadri/perfect-information-game | 6755f9633935be762d039ece9c0b646c64de6ab8 | [
"MIT"
] | null | null | null | perfect_information_game/tablebases/symmetry_transform.py | amaarquadri/perfect-information-game | 6755f9633935be762d039ece9c0b646c64de6ab8 | [
"MIT"
] | null | null | null | perfect_information_game/tablebases/symmetry_transform.py | amaarquadri/perfect-information-game | 6755f9633935be762d039ece9c0b646c64de6ab8 | [
"MIT"
] | null | null | null | import numpy as np
from perfect_information_game.games import Chess
from perfect_information_game.utils import iter_product
from perfect_information_game.tablebases import get_verified_chess_subclass
class SymmetryTransform:
# noinspection PyChainedComparisons
PAWNLESS_UNIQUE_SQUARE_INDICES = [(i, j) for i, j in iter_product(Chess.BOARD_SHAPE)
if i < 4 and j < 4 and i <= j]
UNIQUE_SQUARE_INDICES = [(i, j) for i, j in iter_product(Chess.BOARD_SHAPE) if j < 4]
def __init__(self, GameClass, state):
self.GameClass = get_verified_chess_subclass(GameClass)
self.flip_colors = self.flip_i = self.flip_j = self.flip_diagonal = False
if self.should_swap_colours(state):
# black is attacking, so switch white and black
self.flip_colors = True
i, j = self.GameClass.get_king_pos(state, self.GameClass.BLACK_SLICE)
i = self.GameClass.ROWS - 1 - i
else:
i, j = self.GameClass.get_king_pos(state, self.GameClass.WHITE_SLICE)
pawnless = np.all(state[:, :, self.GameClass.WHITE_PAWN] == 0) and \
np.all(state[:, :, self.GameClass.BLACK_PAWN] == 0)
if pawnless and not (i < 4):
self.flip_i = True
i = self.GameClass.ROWS - 1 - i
if not (j < 4): # horizontal flipping can be done, even with pawns
self.flip_j = True
j = self.GameClass.COLUMNS - 1 - j
if pawnless and not (i <= j):
self.flip_diagonal = True
def should_swap_colours(self, state):
heuristic = self.GameClass.heuristic(state)
if heuristic > 0:
# white is up in material, so don't swap colours
return False
if heuristic < 0:
# black is up in material, so swap colours
return True
# compare the number of pawns on each rank, from most advanced to least advanced pawns
# no need to check second rank pawns, because if everything else is equal they must be equal too
for rank in range(7, 2, -1):
if np.sum(state[rank - 1, :, self.GameClass.BLACK_PAWN]) > \
np.sum(state[8 - rank, :, self.GameClass.WHITE_PAWN]):
# black has more pawns than white on this rank, so swap colours
return True
return False
@staticmethod
def identity(GameClass):
identity = SymmetryTransform(GameClass, GameClass.STARTING_STATE)
identity.flip_colors = identity.flip_i = identity.flip_j = identity.flip_diagonal = False
return identity
@staticmethod
def random(GameClass, descriptor):
"""
Returns a random symmetry transform for the given descriptor.
"""
random = SymmetryTransform.identity(GameClass)
pawnless = 'p' not in descriptor and 'P' not in descriptor
random.flip_colors = np.random.random() < 0.5
random.flip_j = np.random.random() < 0.5
if pawnless:
random.flip_i = np.random.random() < 0.5
random.flip_diagonal = np.random.random() < 0.5
return random
def is_identity(self):
return not self.flip_colors and not self.flip_i and not self.flip_j and not self.flip_diagonal
def transform_state(self, state):
if self.flip_colors:
state = self.flip_state_colors(self.GameClass, state)
if self.flip_i:
state = self.flip_state_i(state)
if self.flip_j:
state = self.flip_state_j(state)
if self.flip_diagonal:
state = self.flip_state_diagonal(state)
return state
def untransform_state(self, state):
# since all transform_funcs are their own inverses, we can just run through them in reverse
if self.flip_diagonal:
state = self.flip_state_diagonal(state)
if self.flip_j:
state = self.flip_state_j(state)
if self.flip_i:
state = self.flip_state_i(state)
if self.flip_colors:
state = self.flip_state_colors(self.GameClass, state)
return state
def transform_outcome(self, outcome):
return -outcome if self.flip_colors else outcome
@staticmethod
def flip_state_colors(GameClass, state):
special_layers = np.copy(state[..., -2:])
special_layers[..., -1] = 1 - special_layers[..., -1] # flip whose turn it is
new_state = np.concatenate((state[..., GameClass.BLACK_SLICE], state[..., GameClass.WHITE_SLICE],
special_layers),
axis=-1)
# need to flip board vertically after flipping colours
# this ensures that the pawns move in the correct directions
return SymmetryTransform.flip_state_i(new_state)
@staticmethod
def flip_state_i(state):
return np.flip(state, axis=0)
@staticmethod
def flip_state_j(state):
return np.flip(state, axis=1)
@staticmethod
def flip_state_diagonal(state):
return np.rot90(np.flip(state, axis=1), axes=(0, 1))
| 40.650794 | 105 | 0.625927 | import numpy as np
from perfect_information_game.games import Chess
from perfect_information_game.utils import iter_product
from perfect_information_game.tablebases import get_verified_chess_subclass
class SymmetryTransform:
PAWNLESS_UNIQUE_SQUARE_INDICES = [(i, j) for i, j in iter_product(Chess.BOARD_SHAPE)
if i < 4 and j < 4 and i <= j]
UNIQUE_SQUARE_INDICES = [(i, j) for i, j in iter_product(Chess.BOARD_SHAPE) if j < 4]
def __init__(self, GameClass, state):
self.GameClass = get_verified_chess_subclass(GameClass)
self.flip_colors = self.flip_i = self.flip_j = self.flip_diagonal = False
if self.should_swap_colours(state):
self.flip_colors = True
i, j = self.GameClass.get_king_pos(state, self.GameClass.BLACK_SLICE)
i = self.GameClass.ROWS - 1 - i
else:
i, j = self.GameClass.get_king_pos(state, self.GameClass.WHITE_SLICE)
pawnless = np.all(state[:, :, self.GameClass.WHITE_PAWN] == 0) and \
np.all(state[:, :, self.GameClass.BLACK_PAWN] == 0)
if pawnless and not (i < 4):
self.flip_i = True
i = self.GameClass.ROWS - 1 - i
if not (j < 4):
self.flip_j = True
j = self.GameClass.COLUMNS - 1 - j
if pawnless and not (i <= j):
self.flip_diagonal = True
def should_swap_colours(self, state):
heuristic = self.GameClass.heuristic(state)
if heuristic > 0:
return False
if heuristic < 0:
# black is up in material, so swap colours
return True
# compare the number of pawns on each rank, from most advanced to least advanced pawns
# no need to check second rank pawns, because if everything else is equal they must be equal too
for rank in range(7, 2, -1):
if np.sum(state[rank - 1, :, self.GameClass.BLACK_PAWN]) > \
np.sum(state[8 - rank, :, self.GameClass.WHITE_PAWN]):
# black has more pawns than white on this rank, so swap colours
return True
return False
@staticmethod
def identity(GameClass):
identity = SymmetryTransform(GameClass, GameClass.STARTING_STATE)
identity.flip_colors = identity.flip_i = identity.flip_j = identity.flip_diagonal = False
return identity
@staticmethod
def random(GameClass, descriptor):
random = SymmetryTransform.identity(GameClass)
pawnless = 'p' not in descriptor and 'P' not in descriptor
random.flip_colors = np.random.random() < 0.5
random.flip_j = np.random.random() < 0.5
if pawnless:
random.flip_i = np.random.random() < 0.5
random.flip_diagonal = np.random.random() < 0.5
return random
def is_identity(self):
return not self.flip_colors and not self.flip_i and not self.flip_j and not self.flip_diagonal
def transform_state(self, state):
if self.flip_colors:
state = self.flip_state_colors(self.GameClass, state)
if self.flip_i:
state = self.flip_state_i(state)
if self.flip_j:
state = self.flip_state_j(state)
if self.flip_diagonal:
state = self.flip_state_diagonal(state)
return state
def untransform_state(self, state):
# since all transform_funcs are their own inverses, we can just run through them in reverse
if self.flip_diagonal:
state = self.flip_state_diagonal(state)
if self.flip_j:
state = self.flip_state_j(state)
if self.flip_i:
state = self.flip_state_i(state)
if self.flip_colors:
state = self.flip_state_colors(self.GameClass, state)
return state
def transform_outcome(self, outcome):
return -outcome if self.flip_colors else outcome
@staticmethod
def flip_state_colors(GameClass, state):
special_layers = np.copy(state[..., -2:])
special_layers[..., -1] = 1 - special_layers[..., -1] # flip whose turn it is
new_state = np.concatenate((state[..., GameClass.BLACK_SLICE], state[..., GameClass.WHITE_SLICE],
special_layers),
axis=-1)
# need to flip board vertically after flipping colours
# this ensures that the pawns move in the correct directions
return SymmetryTransform.flip_state_i(new_state)
@staticmethod
def flip_state_i(state):
return np.flip(state, axis=0)
@staticmethod
def flip_state_j(state):
return np.flip(state, axis=1)
@staticmethod
def flip_state_diagonal(state):
return np.rot90(np.flip(state, axis=1), axes=(0, 1))
| true | true |
f72bc25c098c14f516c7757477bbf04b559e5b10 | 8,291 | py | Python | custom_components/dahua/config_flow.py | ikke-zelf/dahua | 9fca03b3d0caf5661efee7d71668a838b0478587 | [
"MIT"
] | null | null | null | custom_components/dahua/config_flow.py | ikke-zelf/dahua | 9fca03b3d0caf5661efee7d71668a838b0478587 | [
"MIT"
] | null | null | null | custom_components/dahua/config_flow.py | ikke-zelf/dahua | 9fca03b3d0caf5661efee7d71668a838b0478587 | [
"MIT"
] | null | null | null | """Adds config flow (UI flow) for Dahua IP cameras."""
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers import config_validation as cv
from .client import DahuaClient
from .const import (
CONF_PASSWORD,
CONF_USERNAME,
CONF_ADDRESS,
CONF_RTSP_PORT,
CONF_PORT,
CONF_STREAMS,
CONF_EVENTS,
CONF_NAME,
STREAM_MAIN,
STREAM_SUB,
STREAM_BOTH,
DOMAIN,
PLATFORMS,
)
from .rpc2 import DahuaRpc2Client
"""
https://developers.home-assistant.io/docs/config_entries_config_flow_handler
https://developers.home-assistant.io/docs/data_entry_flow_index/
"""
_LOGGER: logging.Logger = logging.getLogger(__package__)
STREAMS = [STREAM_MAIN, STREAM_SUB, STREAM_BOTH]
DEFAULT_EVENTS = ["VideoMotion", "CrossLineDetection", "AlarmLocal", "VideoLoss", "VideoBlind"]
ALL_EVENTS = ["VideoMotion",
"VideoLoss",
"AlarmLocal",
"CrossLineDetection",
"AudioAnomaly",
"AudioMutation",
"VideoMotionInfo",
"SmartMotionHuman",
"SmartMotionVehicle",
"NewFile",
"VideoBlind",
"IntelliFrame",
"CrossRegionDetection",
"LeftDetection",
"TakenAwayDetection",
"VideoAbnormalDetection",
"FaceDetection",
"VideoUnFocus",
"WanderDetection",
"RioterDetection",
"ParkingDetection",
"MoveDetection",
"StorageNotExist",
"StorageFailure",
"StorageLowSpace",
"AlarmOutput",
"InterVideoAccess",
"NTPAdjustTime",
"TimeChange",
"MDResult",
"HeatImagingTemper",
"CrowdDetection",
"FireWarning",
"FireWarningInfo",
]
"""
https://developers.home-assistant.io/docs/data_entry_flow_index
"""
class DahuaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for Dahua Camera API."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize."""
self.dahua_config = {}
self._errors = {}
self.init_info = None
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user to add a camera."""
self._errors = {}
# Uncomment the next 2 lines if only a single instance of the integration is allowed:
# if self._async_current_entries():
# return self.async_abort(reason="single_instance_allowed")
if user_input is not None:
data = await self._test_credentials(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
user_input[CONF_ADDRESS],
user_input[CONF_PORT],
user_input[CONF_RTSP_PORT],
)
if data is not None:
# Only allow a camera to be setup once
if "serialNumber" in data and data["serialNumber"] is not None:
await self.async_set_unique_id(data["serialNumber"])
self._abort_if_unique_id_configured()
user_input[CONF_NAME] = data["name"]
self.init_info = user_input
return await self._show_config_form_name(user_input)
else:
self._errors["base"] = "auth"
return await self._show_config_form_user(user_input)
async def async_step_name(self, user_input=None):
"""Handle a flow to configure the camera name."""
self._errors = {}
if user_input is not None:
if self.init_info is not None:
self.init_info.update(user_input)
return self.async_create_entry(
title=self.init_info["name"],
data=self.init_info,
)
return await self._show_config_form_name(user_input)
@staticmethod
@callback
def async_get_options_flow(config_entry):
return DahuaOptionsFlowHandler(config_entry)
async def _show_config_form_user(self, user_input): # pylint: disable=unused-argument
"""Show the configuration form to edit camera name."""
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(CONF_ADDRESS): str,
vol.Required(CONF_PORT, default="80"): str,
vol.Required(CONF_RTSP_PORT, default="554"): str,
vol.Required(CONF_STREAMS, default=STREAMS[0]): vol.In(STREAMS),
vol.Optional(CONF_EVENTS, default=DEFAULT_EVENTS): cv.multi_select(ALL_EVENTS),
}
),
errors=self._errors,
)
async def _show_config_form_name(self, user_input): # pylint: disable=unused-argument
"""Show the configuration form to edit location data."""
return self.async_show_form(
step_id="name",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str,
}
),
errors=self._errors,
)
async def _test_credentials(self, username, password, address, port, rtsp_port):
"""Return name and serialNumber if credentials is valid."""
session = async_create_clientsession(self.hass)
try:
# The long term goal is to migrate to the DahuaRpc2Client client and remove the DahuaClient
# Testing this out via login to see how it goes
client = DahuaRpc2Client(username, password, address, port, rtsp_port, session)
await client.login()
_LOGGER.info("Authenticated with the RPC2 API")
name = await client.get_device_name()
serial = await client.get_serial_number()
await client.logout()
return {
"name": name,
"serialNumber": serial,
}
except Exception: # pylint: disable=broad-except
_LOGGER.warning("Could not connect to Dahua device via the RPC2 API, falling back to cgi")
try:
client2 = DahuaClient(username, password, address, port, rtsp_port, session)
data = await client2.get_machine_name()
serial = await client2.async_get_system_info()
data.update(serial)
if "name" in data:
return data
except Exception as exception: # pylint: disable=broad-except
_LOGGER.warning("Could not connect to Dahua device", exc_info=exception)
return None
class DahuaOptionsFlowHandler(config_entries.OptionsFlow):
"""Dahua config flow options handler."""
def __init__(self, config_entry):
"""Initialize HACS options flow."""
self.config_entry = config_entry
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None): # pylint: disable=unused-argument
"""Manage the options."""
return await self.async_step_user()
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
if user_input is not None:
self.options.update(user_input)
return await self._update_options()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(x, default=self.options.get(x, True)): bool
for x in sorted(PLATFORMS)
}
),
)
async def _update_options(self):
"""Update config entry options."""
return self.async_create_entry(
title=self.config_entry.data.get(CONF_USERNAME), data=self.options
)
| 34.983122 | 103 | 0.592329 | import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers import config_validation as cv
from .client import DahuaClient
from .const import (
CONF_PASSWORD,
CONF_USERNAME,
CONF_ADDRESS,
CONF_RTSP_PORT,
CONF_PORT,
CONF_STREAMS,
CONF_EVENTS,
CONF_NAME,
STREAM_MAIN,
STREAM_SUB,
STREAM_BOTH,
DOMAIN,
PLATFORMS,
)
from .rpc2 import DahuaRpc2Client
_LOGGER: logging.Logger = logging.getLogger(__package__)
STREAMS = [STREAM_MAIN, STREAM_SUB, STREAM_BOTH]
DEFAULT_EVENTS = ["VideoMotion", "CrossLineDetection", "AlarmLocal", "VideoLoss", "VideoBlind"]
ALL_EVENTS = ["VideoMotion",
"VideoLoss",
"AlarmLocal",
"CrossLineDetection",
"AudioAnomaly",
"AudioMutation",
"VideoMotionInfo",
"SmartMotionHuman",
"SmartMotionVehicle",
"NewFile",
"VideoBlind",
"IntelliFrame",
"CrossRegionDetection",
"LeftDetection",
"TakenAwayDetection",
"VideoAbnormalDetection",
"FaceDetection",
"VideoUnFocus",
"WanderDetection",
"RioterDetection",
"ParkingDetection",
"MoveDetection",
"StorageNotExist",
"StorageFailure",
"StorageLowSpace",
"AlarmOutput",
"InterVideoAccess",
"NTPAdjustTime",
"TimeChange",
"MDResult",
"HeatImagingTemper",
"CrowdDetection",
"FireWarning",
"FireWarningInfo",
]
class DahuaFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
self.dahua_config = {}
self._errors = {}
self.init_info = None
async def async_step_user(self, user_input=None):
self._errors = {}
if user_input is not None:
data = await self._test_credentials(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
user_input[CONF_ADDRESS],
user_input[CONF_PORT],
user_input[CONF_RTSP_PORT],
)
if data is not None:
if "serialNumber" in data and data["serialNumber"] is not None:
await self.async_set_unique_id(data["serialNumber"])
self._abort_if_unique_id_configured()
user_input[CONF_NAME] = data["name"]
self.init_info = user_input
return await self._show_config_form_name(user_input)
else:
self._errors["base"] = "auth"
return await self._show_config_form_user(user_input)
async def async_step_name(self, user_input=None):
self._errors = {}
if user_input is not None:
if self.init_info is not None:
self.init_info.update(user_input)
return self.async_create_entry(
title=self.init_info["name"],
data=self.init_info,
)
return await self._show_config_form_name(user_input)
@staticmethod
@callback
def async_get_options_flow(config_entry):
return DahuaOptionsFlowHandler(config_entry)
async def _show_config_form_user(self, user_input):
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(CONF_ADDRESS): str,
vol.Required(CONF_PORT, default="80"): str,
vol.Required(CONF_RTSP_PORT, default="554"): str,
vol.Required(CONF_STREAMS, default=STREAMS[0]): vol.In(STREAMS),
vol.Optional(CONF_EVENTS, default=DEFAULT_EVENTS): cv.multi_select(ALL_EVENTS),
}
),
errors=self._errors,
)
async def _show_config_form_name(self, user_input):
return self.async_show_form(
step_id="name",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str,
}
),
errors=self._errors,
)
async def _test_credentials(self, username, password, address, port, rtsp_port):
session = async_create_clientsession(self.hass)
try:
client = DahuaRpc2Client(username, password, address, port, rtsp_port, session)
await client.login()
_LOGGER.info("Authenticated with the RPC2 API")
name = await client.get_device_name()
serial = await client.get_serial_number()
await client.logout()
return {
"name": name,
"serialNumber": serial,
}
except Exception:
_LOGGER.warning("Could not connect to Dahua device via the RPC2 API, falling back to cgi")
try:
client2 = DahuaClient(username, password, address, port, rtsp_port, session)
data = await client2.get_machine_name()
serial = await client2.async_get_system_info()
data.update(serial)
if "name" in data:
return data
except Exception as exception:
_LOGGER.warning("Could not connect to Dahua device", exc_info=exception)
return None
class DahuaOptionsFlowHandler(config_entries.OptionsFlow):
def __init__(self, config_entry):
self.config_entry = config_entry
self.options = dict(config_entry.options)
async def async_step_init(self, user_input=None):
return await self.async_step_user()
async def async_step_user(self, user_input=None):
if user_input is not None:
self.options.update(user_input)
return await self._update_options()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(x, default=self.options.get(x, True)): bool
for x in sorted(PLATFORMS)
}
),
)
async def _update_options(self):
return self.async_create_entry(
title=self.config_entry.data.get(CONF_USERNAME), data=self.options
)
| true | true |
f72bc3e123cf5d71ec9dfa9b87624f01ea910d05 | 7,358 | py | Python | src/ptb/ptb_enas_controller.py | tremblerz/enas | 329ee3f8beb5e715bf2dad1182cfb5120b3485f9 | [
"Apache-2.0"
] | null | null | null | src/ptb/ptb_enas_controller.py | tremblerz/enas | 329ee3f8beb5e715bf2dad1182cfb5120b3485f9 | [
"Apache-2.0"
] | null | null | null | src/ptb/ptb_enas_controller.py | tremblerz/enas | 329ee3f8beb5e715bf2dad1182cfb5120b3485f9 | [
"Apache-2.0"
] | null | null | null |
import sys
import os
import time
import numpy as np
import tensorflow as tf
from src.utils import get_train_ops
from src.common_ops import stack_lstm
from tensorflow.python.training import moving_averages
class PTBEnasController(object):
def __init__(self,
rhn_depth=5,
lstm_size=32,
lstm_num_layers=2,
lstm_keep_prob=1.0,
tanh_constant=None,
temperature=None,
num_funcs=2,
lr_init=1e-3,
lr_dec_start=0,
lr_dec_every=100,
lr_dec_rate=0.9,
l2_reg=0,
entropy_weight=None,
clip_mode=None,
grad_bound=None,
bl_dec=0.999,
optim_algo="adam",
sync_replicas=False,
num_aggregate=None,
num_replicas=None,
name="controller"):
print("-" * 80)
print("Building PTBEnasController")
self.rhn_depth = rhn_depth
self.lstm_size = lstm_size
self.lstm_num_layers = lstm_num_layers
self.lstm_keep_prob = lstm_keep_prob
self.tanh_constant = tanh_constant
self.temperature = temperature
self.num_funcs = num_funcs
self.lr_init = lr_init
self.lr_dec_start = lr_dec_start
self.lr_dec_every = lr_dec_every
self.lr_dec_rate = lr_dec_rate
self.l2_reg = l2_reg
self.entropy_weight = entropy_weight
self.clip_mode = clip_mode
self.grad_bound = grad_bound
self.bl_dec = bl_dec
self.optim_algo = optim_algo
self.sync_replicas = sync_replicas
self.num_aggregate = num_aggregate
self.num_replicas = num_replicas
self.name = name
self._create_params()
self._build_sampler()
def _create_params(self):
initializer = tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
with tf.variable_scope(self.name, initializer=initializer):
with tf.variable_scope("lstm"):
self.w_lstm = []
for layer_id in range(self.lstm_num_layers):
with tf.variable_scope("layer_{}".format(layer_id)):
w = tf.get_variable("w", [2 * self.lstm_size, 4 * self.lstm_size])
self.w_lstm.append(w)
num_funcs = self.num_funcs
with tf.variable_scope("embedding"):
self.g_emb = tf.get_variable("g_emb", [1, self.lstm_size])
self.w_emb = tf.get_variable("w", [num_funcs, self.lstm_size])
with tf.variable_scope("softmax"):
self.w_soft = tf.get_variable("w", [self.lstm_size, num_funcs])
with tf.variable_scope("attention"):
self.attn_w_1 = tf.get_variable("w_1", [self.lstm_size, self.lstm_size])
self.attn_w_2 = tf.get_variable("w_2", [self.lstm_size, self.lstm_size])
self.attn_v = tf.get_variable("v", [self.lstm_size, 1])
def _build_sampler(self):
"""Build the sampler ops and the log_prob ops."""
arc_seq = []
sample_log_probs = []
sample_entropy = []
all_h = []
all_h_w = []
# sampler ops
inputs = self.g_emb
prev_c, prev_h = [], []
for _ in range(self.lstm_num_layers):
prev_c.append(tf.zeros([1, self.lstm_size], dtype=tf.float32))
prev_h.append(tf.zeros([1, self.lstm_size], dtype=tf.float32))
# used = tf.zeros([self.rhn_depth, 2], dtype=tf.int32)
for layer_id in range(self.rhn_depth):
next_c, next_h = stack_lstm(inputs, prev_c, prev_h, self.w_lstm)
prev_c, prev_h = next_c, next_h
all_h.append(next_h[-1])
all_h_w.append(tf.matmul(next_h[-1], self.attn_w_1))
if layer_id > 0:
query = tf.matmul(next_h[-1], self.attn_w_2)
query = query + tf.concat(all_h_w[:-1], axis=0)
query = tf.tanh(query)
logits = tf.matmul(query, self.attn_v)
logits = tf.reshape(logits, [1, layer_id])
if self.temperature is not None:
logits /= self.temperature
if self.tanh_constant is not None:
logits = self.tanh_constant * tf.tanh(logits)
diff = tf.to_float(layer_id - tf.range(0, layer_id)) ** 2
logits -= tf.reshape(diff, [1, layer_id]) / 6.0
skip_index = tf.multinomial(logits, 1)
skip_index = tf.to_int32(skip_index)
skip_index = tf.reshape(skip_index, [1])
arc_seq.append(skip_index)
log_prob = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=skip_index)
sample_log_probs.append(log_prob)
entropy = log_prob * tf.exp(-log_prob)
sample_entropy.append(tf.stop_gradient(entropy))
inputs = tf.nn.embedding_lookup(
tf.concat(all_h[:-1], axis=0), skip_index)
inputs /= (0.1 + tf.to_float(layer_id - skip_index))
else:
inputs = self.g_emb
next_c, next_h = stack_lstm(inputs, prev_c, prev_h, self.w_lstm)
prev_c, prev_h = next_c, next_h
logits = tf.matmul(next_h[-1], self.w_soft)
if self.temperature is not None:
logits /= self.temperature
if self.tanh_constant is not None:
logits = self.tanh_constant * tf.tanh(logits)
func = tf.multinomial(logits, 1)
func = tf.to_int32(func)
func = tf.reshape(func, [1])
arc_seq.append(func)
log_prob = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=func)
sample_log_probs.append(log_prob)
entropy = log_prob * tf.exp(-log_prob)
sample_entropy.append(tf.stop_gradient(entropy))
inputs = tf.nn.embedding_lookup(self.w_emb, func)
arc_seq = tf.concat(arc_seq, axis=0)
self.sample_arc = arc_seq
self.sample_log_probs = tf.concat(sample_log_probs, axis=0)
self.ppl = tf.exp(tf.reduce_mean(self.sample_log_probs))
sample_entropy = tf.concat(sample_entropy, axis=0)
self.sample_entropy = tf.reduce_sum(sample_entropy)
self.all_h = all_h
def build_trainer(self, child_model):
# actor
self.valid_loss = tf.to_float(child_model.rl_loss)
self.valid_loss = tf.stop_gradient(self.valid_loss)
self.valid_ppl = tf.exp(self.valid_loss)
self.reward = 80.0 / self.valid_ppl
if self.entropy_weight is not None:
self.reward += self.entropy_weight * self.sample_entropy
# or baseline
self.sample_log_probs = tf.reduce_sum(self.sample_log_probs)
self.baseline = tf.Variable(0.0, dtype=tf.float32, trainable=False)
baseline_update = tf.assign_sub(
self.baseline, (1 - self.bl_dec) * (self.baseline - self.reward))
with tf.control_dependencies([baseline_update]):
self.reward = tf.identity(self.reward)
self.loss = self.sample_log_probs * (self.reward - self.baseline)
self.train_step = tf.Variable(
0, dtype=tf.int32, trainable=False, name="train_step")
tf_variables = [var
for var in tf.trainable_variables() if var.name.startswith(self.name)]
self.train_op, self.lr, self.grad_norm, self.optimizer = get_train_ops(
self.loss,
tf_variables,
self.train_step,
clip_mode=self.clip_mode,
grad_bound=self.grad_bound,
l2_reg=self.l2_reg,
lr_init=self.lr_init,
lr_dec_start=self.lr_dec_start,
lr_dec_every=self.lr_dec_every,
lr_dec_rate=self.lr_dec_rate,
optim_algo=self.optim_algo,
sync_replicas=self.sync_replicas,
num_aggregate=self.num_aggregate,
num_replicas=self.num_replicas)
| 33.907834 | 80 | 0.651536 |
import sys
import os
import time
import numpy as np
import tensorflow as tf
from src.utils import get_train_ops
from src.common_ops import stack_lstm
from tensorflow.python.training import moving_averages
class PTBEnasController(object):
def __init__(self,
rhn_depth=5,
lstm_size=32,
lstm_num_layers=2,
lstm_keep_prob=1.0,
tanh_constant=None,
temperature=None,
num_funcs=2,
lr_init=1e-3,
lr_dec_start=0,
lr_dec_every=100,
lr_dec_rate=0.9,
l2_reg=0,
entropy_weight=None,
clip_mode=None,
grad_bound=None,
bl_dec=0.999,
optim_algo="adam",
sync_replicas=False,
num_aggregate=None,
num_replicas=None,
name="controller"):
print("-" * 80)
print("Building PTBEnasController")
self.rhn_depth = rhn_depth
self.lstm_size = lstm_size
self.lstm_num_layers = lstm_num_layers
self.lstm_keep_prob = lstm_keep_prob
self.tanh_constant = tanh_constant
self.temperature = temperature
self.num_funcs = num_funcs
self.lr_init = lr_init
self.lr_dec_start = lr_dec_start
self.lr_dec_every = lr_dec_every
self.lr_dec_rate = lr_dec_rate
self.l2_reg = l2_reg
self.entropy_weight = entropy_weight
self.clip_mode = clip_mode
self.grad_bound = grad_bound
self.bl_dec = bl_dec
self.optim_algo = optim_algo
self.sync_replicas = sync_replicas
self.num_aggregate = num_aggregate
self.num_replicas = num_replicas
self.name = name
self._create_params()
self._build_sampler()
def _create_params(self):
initializer = tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
with tf.variable_scope(self.name, initializer=initializer):
with tf.variable_scope("lstm"):
self.w_lstm = []
for layer_id in range(self.lstm_num_layers):
with tf.variable_scope("layer_{}".format(layer_id)):
w = tf.get_variable("w", [2 * self.lstm_size, 4 * self.lstm_size])
self.w_lstm.append(w)
num_funcs = self.num_funcs
with tf.variable_scope("embedding"):
self.g_emb = tf.get_variable("g_emb", [1, self.lstm_size])
self.w_emb = tf.get_variable("w", [num_funcs, self.lstm_size])
with tf.variable_scope("softmax"):
self.w_soft = tf.get_variable("w", [self.lstm_size, num_funcs])
with tf.variable_scope("attention"):
self.attn_w_1 = tf.get_variable("w_1", [self.lstm_size, self.lstm_size])
self.attn_w_2 = tf.get_variable("w_2", [self.lstm_size, self.lstm_size])
self.attn_v = tf.get_variable("v", [self.lstm_size, 1])
def _build_sampler(self):
arc_seq = []
sample_log_probs = []
sample_entropy = []
all_h = []
all_h_w = []
inputs = self.g_emb
prev_c, prev_h = [], []
for _ in range(self.lstm_num_layers):
prev_c.append(tf.zeros([1, self.lstm_size], dtype=tf.float32))
prev_h.append(tf.zeros([1, self.lstm_size], dtype=tf.float32))
for layer_id in range(self.rhn_depth):
next_c, next_h = stack_lstm(inputs, prev_c, prev_h, self.w_lstm)
prev_c, prev_h = next_c, next_h
all_h.append(next_h[-1])
all_h_w.append(tf.matmul(next_h[-1], self.attn_w_1))
if layer_id > 0:
query = tf.matmul(next_h[-1], self.attn_w_2)
query = query + tf.concat(all_h_w[:-1], axis=0)
query = tf.tanh(query)
logits = tf.matmul(query, self.attn_v)
logits = tf.reshape(logits, [1, layer_id])
if self.temperature is not None:
logits /= self.temperature
if self.tanh_constant is not None:
logits = self.tanh_constant * tf.tanh(logits)
diff = tf.to_float(layer_id - tf.range(0, layer_id)) ** 2
logits -= tf.reshape(diff, [1, layer_id]) / 6.0
skip_index = tf.multinomial(logits, 1)
skip_index = tf.to_int32(skip_index)
skip_index = tf.reshape(skip_index, [1])
arc_seq.append(skip_index)
log_prob = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=skip_index)
sample_log_probs.append(log_prob)
entropy = log_prob * tf.exp(-log_prob)
sample_entropy.append(tf.stop_gradient(entropy))
inputs = tf.nn.embedding_lookup(
tf.concat(all_h[:-1], axis=0), skip_index)
inputs /= (0.1 + tf.to_float(layer_id - skip_index))
else:
inputs = self.g_emb
next_c, next_h = stack_lstm(inputs, prev_c, prev_h, self.w_lstm)
prev_c, prev_h = next_c, next_h
logits = tf.matmul(next_h[-1], self.w_soft)
if self.temperature is not None:
logits /= self.temperature
if self.tanh_constant is not None:
logits = self.tanh_constant * tf.tanh(logits)
func = tf.multinomial(logits, 1)
func = tf.to_int32(func)
func = tf.reshape(func, [1])
arc_seq.append(func)
log_prob = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=func)
sample_log_probs.append(log_prob)
entropy = log_prob * tf.exp(-log_prob)
sample_entropy.append(tf.stop_gradient(entropy))
inputs = tf.nn.embedding_lookup(self.w_emb, func)
arc_seq = tf.concat(arc_seq, axis=0)
self.sample_arc = arc_seq
self.sample_log_probs = tf.concat(sample_log_probs, axis=0)
self.ppl = tf.exp(tf.reduce_mean(self.sample_log_probs))
sample_entropy = tf.concat(sample_entropy, axis=0)
self.sample_entropy = tf.reduce_sum(sample_entropy)
self.all_h = all_h
def build_trainer(self, child_model):
self.valid_loss = tf.to_float(child_model.rl_loss)
self.valid_loss = tf.stop_gradient(self.valid_loss)
self.valid_ppl = tf.exp(self.valid_loss)
self.reward = 80.0 / self.valid_ppl
if self.entropy_weight is not None:
self.reward += self.entropy_weight * self.sample_entropy
self.sample_log_probs = tf.reduce_sum(self.sample_log_probs)
self.baseline = tf.Variable(0.0, dtype=tf.float32, trainable=False)
baseline_update = tf.assign_sub(
self.baseline, (1 - self.bl_dec) * (self.baseline - self.reward))
with tf.control_dependencies([baseline_update]):
self.reward = tf.identity(self.reward)
self.loss = self.sample_log_probs * (self.reward - self.baseline)
self.train_step = tf.Variable(
0, dtype=tf.int32, trainable=False, name="train_step")
tf_variables = [var
for var in tf.trainable_variables() if var.name.startswith(self.name)]
self.train_op, self.lr, self.grad_norm, self.optimizer = get_train_ops(
self.loss,
tf_variables,
self.train_step,
clip_mode=self.clip_mode,
grad_bound=self.grad_bound,
l2_reg=self.l2_reg,
lr_init=self.lr_init,
lr_dec_start=self.lr_dec_start,
lr_dec_every=self.lr_dec_every,
lr_dec_rate=self.lr_dec_rate,
optim_algo=self.optim_algo,
sync_replicas=self.sync_replicas,
num_aggregate=self.num_aggregate,
num_replicas=self.num_replicas)
| true | true |
f72bc4838f0722457e558e34b578faff5078826d | 27,139 | py | Python | swift/container/sync.py | ctvera/swift-deb | cf889b6fa8b8059ac5ce2680064d60db7602d166 | [
"Apache-2.0"
] | null | null | null | swift/container/sync.py | ctvera/swift-deb | cf889b6fa8b8059ac5ce2680064d60db7602d166 | [
"Apache-2.0"
] | null | null | null | swift/container/sync.py | ctvera/swift-deb | cf889b6fa8b8059ac5ce2680064d60db7602d166 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import os
import uuid
from swift import gettext_ as _
from time import ctime, time
from random import choice, random
from struct import unpack_from
from eventlet import sleep, Timeout
import swift.common.db
from swift.common.db import DatabaseConnectionError
from swift.container.backend import ContainerBroker
from swift.container.sync_store import ContainerSyncStore
from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.internal_client import (
delete_object, put_object, head_object,
InternalClient, UnexpectedResponse)
from swift.common.exceptions import ClientException
from swift.common.ring import Ring
from swift.common.ring.utils import is_local_device
from swift.common.utils import (
clean_content_type, config_true_value,
FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to,
whataremyips, Timestamp, decode_timestamps)
from swift.common.daemon import Daemon
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
from swift.common.wsgi import ConfigString
# The default internal client config body is to support upgrades without
# requiring deployment of the new /etc/swift/internal-client.conf
ic_conf_body = """
[DEFAULT]
# swift_dir = /etc/swift
# user = swift
# You can specify default log routing here if you want:
# log_name = swift
# log_facility = LOG_LOCAL0
# log_level = INFO
# log_address = /dev/log
#
# comma separated list of functions to call to setup custom log handlers.
# functions get passed: conf, name, log_to_console, log_route, fmt, logger,
# adapted_logger
# log_custom_handlers =
#
# If set, log_udp_host will override log_address
# log_udp_host =
# log_udp_port = 514
#
# You can enable StatsD logging here:
# log_statsd_host =
# log_statsd_port = 8125
# log_statsd_default_sample_rate = 1.0
# log_statsd_sample_rate_factor = 1.0
# log_statsd_metric_prefix =
[pipeline:main]
pipeline = catch_errors proxy-logging cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
# See proxy-server.conf-sample for options
[filter:cache]
use = egg:swift#memcache
# See proxy-server.conf-sample for options
[filter:proxy-logging]
use = egg:swift#proxy_logging
[filter:catch_errors]
use = egg:swift#catch_errors
# See proxy-server.conf-sample for options
""".lstrip()
class ContainerSync(Daemon):
"""
Daemon to sync syncable containers.
This is done by scanning the local devices for container databases and
checking for x-container-sync-to and x-container-sync-key metadata values.
If they exist, newer rows since the last sync will trigger PUTs or DELETEs
to the other container.
The actual syncing is slightly more complicated to make use of the three
(or number-of-replicas) main nodes for a container without each trying to
do the exact same work but also without missing work if one node happens to
be down.
Two sync points are kept per container database. All rows between the two
sync points trigger updates. Any rows newer than both sync points cause
updates depending on the node's position for the container (primary nodes
do one third, etc. depending on the replica count of course). After a sync
run, the first sync point is set to the newest ROWID known and the second
sync point is set to newest ROWID for which all updates have been sent.
An example may help. Assume replica count is 3 and perfectly matching
ROWIDs starting at 1.
First sync run, database has 6 rows:
* SyncPoint1 starts as -1.
* SyncPoint2 starts as -1.
* No rows between points, so no "all updates" rows.
* Six rows newer than SyncPoint1, so a third of the rows are sent
by node 1, another third by node 2, remaining third by node 3.
* SyncPoint1 is set as 6 (the newest ROWID known).
* SyncPoint2 is left as -1 since no "all updates" rows were synced.
Next sync run, database has 12 rows:
* SyncPoint1 starts as 6.
* SyncPoint2 starts as -1.
* The rows between -1 and 6 all trigger updates (most of which
should short-circuit on the remote end as having already been
done).
* Six more rows newer than SyncPoint1, so a third of the rows are
sent by node 1, another third by node 2, remaining third by node
3.
* SyncPoint1 is set as 12 (the newest ROWID known).
* SyncPoint2 is set as 6 (the newest "all updates" ROWID).
In this way, under normal circumstances each node sends its share of
updates each run and just sends a batch of older updates to ensure nothing
was missed.
:param conf: The dict of configuration values from the [container-sync]
section of the container-server.conf
:param container_ring: If None, the <swift_dir>/container.ring.gz will be
loaded. This is overridden by unit tests.
"""
def __init__(self, conf, container_ring=None, logger=None):
#: The dict of configuration values from the [container-sync] section
#: of the container-server.conf.
self.conf = conf
#: Logger to use for container-sync log lines.
self.logger = logger or get_logger(conf, log_route='container-sync')
#: Path to the local device mount points.
self.devices = conf.get('devices', '/srv/node')
#: Indicates whether mount points should be verified as actual mount
#: points (normally true, false for tests and SAIO).
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
#: Minimum time between full scans. This is to keep the daemon from
#: running wild on near empty systems.
self.interval = int(conf.get('interval', 300))
#: Maximum amount of time to spend syncing a container before moving on
#: to the next one. If a container sync hasn't finished in this time,
#: it'll just be resumed next scan.
self.container_time = int(conf.get('container_time', 60))
#: ContainerSyncCluster instance for validating sync-to values.
self.realms_conf = ContainerSyncRealms(
os.path.join(
conf.get('swift_dir', '/etc/swift'),
'container-sync-realms.conf'),
self.logger)
#: The list of hosts we're allowed to send syncs to. This can be
#: overridden by data in self.realms_conf
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.http_proxies = [
a.strip()
for a in conf.get('sync_proxy', '').split(',')
if a.strip()]
#: ContainerSyncStore instance for iterating over synced containers
self.sync_store = ContainerSyncStore(self.devices,
self.logger,
self.mount_check)
#: Number of containers with sync turned on that were successfully
#: synced.
self.container_syncs = 0
#: Number of successful DELETEs triggered.
self.container_deletes = 0
#: Number of successful PUTs triggered.
self.container_puts = 0
#: Number of containers whose sync has been turned off, but
#: are not yet cleared from the sync store.
self.container_skips = 0
#: Number of containers that had a failure of some type.
self.container_failures = 0
#: Time of last stats report.
self.reported = time()
self.swift_dir = conf.get('swift_dir', '/etc/swift')
#: swift.common.ring.Ring for locating containers.
self.container_ring = container_ring or Ring(self.swift_dir,
ring_name='container')
bind_ip = conf.get('bind_ip', '0.0.0.0')
self._myips = whataremyips(bind_ip)
self._myport = int(conf.get('bind_port', 6001))
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
self.conn_timeout = float(conf.get('conn_timeout', 5))
request_tries = int(conf.get('request_tries') or 3)
internal_client_conf_path = conf.get('internal_client_conf_path')
if not internal_client_conf_path:
self.logger.warning(
_('Configuration option internal_client_conf_path not '
'defined. Using default configuration, See '
'internal-client.conf-sample for options'))
internal_client_conf = ConfigString(ic_conf_body)
else:
internal_client_conf = internal_client_conf_path
try:
self.swift = InternalClient(
internal_client_conf, 'Swift Container Sync', request_tries)
except IOError as err:
if err.errno != errno.ENOENT:
raise
raise SystemExit(
_('Unable to load internal client from config: %r (%s)') %
(internal_client_conf_path, err))
def run_forever(self, *args, **kwargs):
"""
Runs container sync scans until stopped.
"""
sleep(random() * self.interval)
while True:
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
elapsed = time() - begin
if elapsed < self.interval:
sleep(self.interval - elapsed)
def run_once(self, *args, **kwargs):
"""
Runs a single container sync scan.
"""
self.logger.info(_('Begin container sync "once" mode'))
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
self.report()
elapsed = time() - begin
self.logger.info(
_('Container sync "once" mode completed: %.02fs'), elapsed)
def report(self):
"""
Writes a report of the stats to the logger and resets the stats for the
next report.
"""
self.logger.info(
_('Since %(time)s: %(sync)s synced [%(delete)s deletes, %(put)s '
'puts], %(skip)s skipped, %(fail)s failed'),
{'time': ctime(self.reported),
'sync': self.container_syncs,
'delete': self.container_deletes,
'put': self.container_puts,
'skip': self.container_skips,
'fail': self.container_failures})
self.reported = time()
self.container_syncs = 0
self.container_deletes = 0
self.container_puts = 0
self.container_skips = 0
self.container_failures = 0
def container_sync(self, path):
"""
Checks the given path for a container database, determines if syncing
is turned on for that database and, if so, sends any updates to the
other container.
:param path: the path to a container db
"""
broker = None
try:
broker = ContainerBroker(path)
# The path we pass to the ContainerBroker is a real path of
# a container DB. If we get here, however, it means that this
# path is linked from the sync_containers dir. In rare cases
# of race or processes failures the link can be stale and
# the get_info below will raise a DB doesn't exist exception
# In this case we remove the stale link and raise an error
# since in most cases the db should be there.
try:
info = broker.get_info()
except DatabaseConnectionError as db_err:
if str(db_err).endswith("DB doesn't exist"):
self.sync_store.remove_synced_container(broker)
raise
x, nodes = self.container_ring.get_nodes(info['account'],
info['container'])
for ordinal, node in enumerate(nodes):
if is_local_device(self._myips, self._myport,
node['ip'], node['port']):
break
else:
return
if not broker.is_deleted():
sync_to = None
user_key = None
sync_point1 = info['x_container_sync_point1']
sync_point2 = info['x_container_sync_point2']
for key, (value, timestamp) in broker.metadata.items():
if key.lower() == 'x-container-sync-to':
sync_to = value
elif key.lower() == 'x-container-sync-key':
user_key = value
if not sync_to or not user_key:
self.container_skips += 1
self.logger.increment('skips')
return
err, sync_to, realm, realm_key = validate_sync_to(
sync_to, self.allowed_sync_hosts, self.realms_conf)
if err:
self.logger.info(
_('ERROR %(db_file)s: %(validate_sync_to_err)s'),
{'db_file': str(broker),
'validate_sync_to_err': err})
self.container_failures += 1
self.logger.increment('failures')
return
stop_at = time() + self.container_time
next_sync_point = None
while time() < stop_at and sync_point2 < sync_point1:
rows = broker.get_items_since(sync_point2, 1)
if not rows:
break
row = rows[0]
if row['ROWID'] > sync_point1:
break
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.) and will skip
# problematic rows as needed in case of faults.
# This section will attempt to sync previously skipped
# rows in case the previous attempts by any of the nodes
# didn't succeed.
if not self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key):
if not next_sync_point:
next_sync_point = sync_point2
sync_point2 = row['ROWID']
broker.set_x_container_sync_points(None, sync_point2)
if next_sync_point:
broker.set_x_container_sync_points(None, next_sync_point)
while time() < stop_at:
rows = broker.get_items_since(sync_point1, 1)
if not rows:
break
row = rows[0]
key = hash_path(info['account'], info['container'],
row['name'], raw_digest=True)
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.). It'll come back
# around to the section above and attempt to sync
# previously skipped rows in case the other nodes didn't
# succeed or in case it failed to do so the first time.
if unpack_from('>I', key)[0] % \
len(nodes) == ordinal:
self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key)
sync_point1 = row['ROWID']
broker.set_x_container_sync_points(sync_point1, None)
self.container_syncs += 1
self.logger.increment('syncs')
except (Exception, Timeout):
self.container_failures += 1
self.logger.increment('failures')
self.logger.exception(_('ERROR Syncing %s'),
broker if broker else path)
def _update_sync_to_headers(self, name, sync_to, user_key,
realm, realm_key, method, headers):
"""
Updates container sync headers
:param name: The name of the object
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:param method: HTTP method to create sig with
:param headers: headers to update with container sync headers
"""
if realm and realm_key:
nonce = uuid.uuid4().hex
path = urlparse(sync_to).path + '/' + quote(name)
sig = self.realms_conf.get_sig(method, path,
headers.get('x-timestamp', 0),
nonce, realm_key,
user_key)
headers['x-container-sync-auth'] = '%s %s %s' % (realm,
nonce,
sig)
else:
headers['x-container-sync-key'] = user_key
def _object_in_remote_container(self, name, sync_to, user_key,
realm, realm_key, timestamp):
"""
Performs head object on remote to eliminate extra remote put and
local get object calls
:param name: The name of the object in the updated row in the local
database triggering the sync update.
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:param timestamp: last modified date of local object
:returns: True if object already exists in remote
"""
headers = {'x-timestamp': timestamp.internal}
self._update_sync_to_headers(name, sync_to, user_key, realm,
realm_key, 'HEAD', headers)
try:
metadata, _ = head_object(sync_to, name=name,
headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
retries=0)
remote_ts = Timestamp(metadata.get('x-timestamp', 0))
self.logger.debug("remote obj timestamp %s local obj %s" %
(timestamp.internal, remote_ts.internal))
if timestamp <= remote_ts:
return True
# Object in remote should be updated
return False
except ClientException as http_err:
# Object not in remote
if http_err.http_status == 404:
return False
raise http_err
def container_sync_row(self, row, sync_to, user_key, broker, info,
realm, realm_key):
"""
Sends the update the row indicates to the sync_to container.
Update can be either delete or put.
:param row: The updated row in the local database triggering the sync
update.
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param broker: The local container database broker.
:param info: The get_info result from the local container database
broker.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:returns: True on success
"""
try:
start_time = time()
# extract last modified time from the created_at value
ts_data, ts_ctype, ts_meta = decode_timestamps(
row['created_at'])
if row['deleted']:
# when sync'ing a deleted object, use ts_data - this is the
# timestamp of the source tombstone
try:
headers = {'x-timestamp': ts_data.internal}
self._update_sync_to_headers(row['name'], sync_to,
user_key, realm, realm_key,
'DELETE', headers)
delete_object(sync_to, name=row['name'], headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
timeout=self.conn_timeout)
except ClientException as err:
if err.http_status != HTTP_NOT_FOUND:
raise
self.container_deletes += 1
self.logger.increment('deletes')
self.logger.timing_since('deletes.timing', start_time)
else:
# when sync'ing a live object, use ts_meta - this is the time
# at which the source object was last modified by a PUT or POST
if self._object_in_remote_container(row['name'],
sync_to, user_key, realm,
realm_key, ts_meta):
return True
exc = None
# look up for the newest one
headers_out = {'X-Newest': True,
'X-Backend-Storage-Policy-Index':
str(info['storage_policy_index'])}
try:
source_obj_status, headers, body = \
self.swift.get_object(info['account'],
info['container'], row['name'],
headers=headers_out,
acceptable_statuses=(2, 4))
except (Exception, UnexpectedResponse, Timeout) as err:
headers = {}
body = None
exc = err
timestamp = Timestamp(headers.get('x-timestamp', 0))
if timestamp < ts_meta:
if exc:
raise exc
raise Exception(
_('Unknown exception trying to GET: '
'%(account)r %(container)r %(object)r'),
{'account': info['account'],
'container': info['container'],
'object': row['name']})
for key in ('date', 'last-modified'):
if key in headers:
del headers[key]
if 'etag' in headers:
headers['etag'] = headers['etag'].strip('"')
if 'content-type' in headers:
headers['content-type'] = clean_content_type(
headers['content-type'])
self._update_sync_to_headers(row['name'], sync_to, user_key,
realm, realm_key, 'PUT', headers)
put_object(sync_to, name=row['name'], headers=headers,
contents=FileLikeIter(body),
proxy=self.select_http_proxy(), logger=self.logger,
timeout=self.conn_timeout)
self.container_puts += 1
self.logger.increment('puts')
self.logger.timing_since('puts.timing', start_time)
except ClientException as err:
if err.http_status == HTTP_UNAUTHORIZED:
self.logger.info(
_('Unauth %(sync_from)r => %(sync_to)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to})
elif err.http_status == HTTP_NOT_FOUND:
self.logger.info(
_('Not found %(sync_from)r => %(sync_to)r \
- object %(obj_name)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to, 'obj_name': row['name']})
else:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
except (Exception, Timeout) as err:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
return True
def select_http_proxy(self):
return choice(self.http_proxies) if self.http_proxies else None
| 45.765599 | 79 | 0.562659 |
import errno
import os
import uuid
from swift import gettext_ as _
from time import ctime, time
from random import choice, random
from struct import unpack_from
from eventlet import sleep, Timeout
import swift.common.db
from swift.common.db import DatabaseConnectionError
from swift.container.backend import ContainerBroker
from swift.container.sync_store import ContainerSyncStore
from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.internal_client import (
delete_object, put_object, head_object,
InternalClient, UnexpectedResponse)
from swift.common.exceptions import ClientException
from swift.common.ring import Ring
from swift.common.ring.utils import is_local_device
from swift.common.utils import (
clean_content_type, config_true_value,
FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to,
whataremyips, Timestamp, decode_timestamps)
from swift.common.daemon import Daemon
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
from swift.common.wsgi import ConfigString
ic_conf_body = """
[DEFAULT]
# swift_dir = /etc/swift
# user = swift
# You can specify default log routing here if you want:
# log_name = swift
# log_facility = LOG_LOCAL0
# log_level = INFO
# log_address = /dev/log
#
# comma separated list of functions to call to setup custom log handlers.
# functions get passed: conf, name, log_to_console, log_route, fmt, logger,
# adapted_logger
# log_custom_handlers =
#
# If set, log_udp_host will override log_address
# log_udp_host =
# log_udp_port = 514
#
# You can enable StatsD logging here:
# log_statsd_host =
# log_statsd_port = 8125
# log_statsd_default_sample_rate = 1.0
# log_statsd_sample_rate_factor = 1.0
# log_statsd_metric_prefix =
[pipeline:main]
pipeline = catch_errors proxy-logging cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
# See proxy-server.conf-sample for options
[filter:cache]
use = egg:swift#memcache
# See proxy-server.conf-sample for options
[filter:proxy-logging]
use = egg:swift#proxy_logging
[filter:catch_errors]
use = egg:swift#catch_errors
# See proxy-server.conf-sample for options
""".lstrip()
class ContainerSync(Daemon):
def __init__(self, conf, container_ring=None, logger=None):
self.conf = conf
self.logger = logger or get_logger(conf, log_route='container-sync')
self.devices = conf.get('devices', '/srv/node')
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
self.interval = int(conf.get('interval', 300))
#: it'll just be resumed next scan.
self.container_time = int(conf.get('container_time', 60))
self.realms_conf = ContainerSyncRealms(
os.path.join(
conf.get('swift_dir', '/etc/swift'),
'container-sync-realms.conf'),
self.logger)
#: overridden by data in self.realms_conf
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.http_proxies = [
a.strip()
for a in conf.get('sync_proxy', '').split(',')
if a.strip()]
#: ContainerSyncStore instance for iterating over synced containers
self.sync_store = ContainerSyncStore(self.devices,
self.logger,
self.mount_check)
#: Number of containers with sync turned on that were successfully
#: synced.
self.container_syncs = 0
#: Number of successful DELETEs triggered.
self.container_deletes = 0
#: Number of successful PUTs triggered.
self.container_puts = 0
#: Number of containers whose sync has been turned off, but
#: are not yet cleared from the sync store.
self.container_skips = 0
#: Number of containers that had a failure of some type.
self.container_failures = 0
#: Time of last stats report.
self.reported = time()
self.swift_dir = conf.get('swift_dir', '/etc/swift')
#: swift.common.ring.Ring for locating containers.
self.container_ring = container_ring or Ring(self.swift_dir,
ring_name='container')
bind_ip = conf.get('bind_ip', '0.0.0.0')
self._myips = whataremyips(bind_ip)
self._myport = int(conf.get('bind_port', 6001))
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
self.conn_timeout = float(conf.get('conn_timeout', 5))
request_tries = int(conf.get('request_tries') or 3)
internal_client_conf_path = conf.get('internal_client_conf_path')
if not internal_client_conf_path:
self.logger.warning(
_('Configuration option internal_client_conf_path not '
'defined. Using default configuration, See '
'internal-client.conf-sample for options'))
internal_client_conf = ConfigString(ic_conf_body)
else:
internal_client_conf = internal_client_conf_path
try:
self.swift = InternalClient(
internal_client_conf, 'Swift Container Sync', request_tries)
except IOError as err:
if err.errno != errno.ENOENT:
raise
raise SystemExit(
_('Unable to load internal client from config: %r (%s)') %
(internal_client_conf_path, err))
def run_forever(self, *args, **kwargs):
sleep(random() * self.interval)
while True:
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
elapsed = time() - begin
if elapsed < self.interval:
sleep(self.interval - elapsed)
def run_once(self, *args, **kwargs):
self.logger.info(_('Begin container sync "once" mode'))
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
self.report()
elapsed = time() - begin
self.logger.info(
_('Container sync "once" mode completed: %.02fs'), elapsed)
def report(self):
self.logger.info(
_('Since %(time)s: %(sync)s synced [%(delete)s deletes, %(put)s '
'puts], %(skip)s skipped, %(fail)s failed'),
{'time': ctime(self.reported),
'sync': self.container_syncs,
'delete': self.container_deletes,
'put': self.container_puts,
'skip': self.container_skips,
'fail': self.container_failures})
self.reported = time()
self.container_syncs = 0
self.container_deletes = 0
self.container_puts = 0
self.container_skips = 0
self.container_failures = 0
def container_sync(self, path):
broker = None
try:
broker = ContainerBroker(path)
# The path we pass to the ContainerBroker is a real path of
# a container DB. If we get here, however, it means that this
# path is linked from the sync_containers dir. In rare cases
# of race or processes failures the link can be stale and
# the get_info below will raise a DB doesn't exist exception
try:
info = broker.get_info()
except DatabaseConnectionError as db_err:
if str(db_err).endswith("DB doesn't exist"):
self.sync_store.remove_synced_container(broker)
raise
x, nodes = self.container_ring.get_nodes(info['account'],
info['container'])
for ordinal, node in enumerate(nodes):
if is_local_device(self._myips, self._myport,
node['ip'], node['port']):
break
else:
return
if not broker.is_deleted():
sync_to = None
user_key = None
sync_point1 = info['x_container_sync_point1']
sync_point2 = info['x_container_sync_point2']
for key, (value, timestamp) in broker.metadata.items():
if key.lower() == 'x-container-sync-to':
sync_to = value
elif key.lower() == 'x-container-sync-key':
user_key = value
if not sync_to or not user_key:
self.container_skips += 1
self.logger.increment('skips')
return
err, sync_to, realm, realm_key = validate_sync_to(
sync_to, self.allowed_sync_hosts, self.realms_conf)
if err:
self.logger.info(
_('ERROR %(db_file)s: %(validate_sync_to_err)s'),
{'db_file': str(broker),
'validate_sync_to_err': err})
self.container_failures += 1
self.logger.increment('failures')
return
stop_at = time() + self.container_time
next_sync_point = None
while time() < stop_at and sync_point2 < sync_point1:
rows = broker.get_items_since(sync_point2, 1)
if not rows:
break
row = rows[0]
if row['ROWID'] > sync_point1:
break
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.) and will skip
# problematic rows as needed in case of faults.
# This section will attempt to sync previously skipped
# rows in case the previous attempts by any of the nodes
# didn't succeed.
if not self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key):
if not next_sync_point:
next_sync_point = sync_point2
sync_point2 = row['ROWID']
broker.set_x_container_sync_points(None, sync_point2)
if next_sync_point:
broker.set_x_container_sync_points(None, next_sync_point)
while time() < stop_at:
rows = broker.get_items_since(sync_point1, 1)
if not rows:
break
row = rows[0]
key = hash_path(info['account'], info['container'],
row['name'], raw_digest=True)
# around to the section above and attempt to sync
# previously skipped rows in case the other nodes didn't
if unpack_from('>I', key)[0] % \
len(nodes) == ordinal:
self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key)
sync_point1 = row['ROWID']
broker.set_x_container_sync_points(sync_point1, None)
self.container_syncs += 1
self.logger.increment('syncs')
except (Exception, Timeout):
self.container_failures += 1
self.logger.increment('failures')
self.logger.exception(_('ERROR Syncing %s'),
broker if broker else path)
def _update_sync_to_headers(self, name, sync_to, user_key,
realm, realm_key, method, headers):
if realm and realm_key:
nonce = uuid.uuid4().hex
path = urlparse(sync_to).path + '/' + quote(name)
sig = self.realms_conf.get_sig(method, path,
headers.get('x-timestamp', 0),
nonce, realm_key,
user_key)
headers['x-container-sync-auth'] = '%s %s %s' % (realm,
nonce,
sig)
else:
headers['x-container-sync-key'] = user_key
def _object_in_remote_container(self, name, sync_to, user_key,
realm, realm_key, timestamp):
headers = {'x-timestamp': timestamp.internal}
self._update_sync_to_headers(name, sync_to, user_key, realm,
realm_key, 'HEAD', headers)
try:
metadata, _ = head_object(sync_to, name=name,
headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
retries=0)
remote_ts = Timestamp(metadata.get('x-timestamp', 0))
self.logger.debug("remote obj timestamp %s local obj %s" %
(timestamp.internal, remote_ts.internal))
if timestamp <= remote_ts:
return True
return False
except ClientException as http_err:
if http_err.http_status == 404:
return False
raise http_err
def container_sync_row(self, row, sync_to, user_key, broker, info,
realm, realm_key):
try:
start_time = time()
ts_data, ts_ctype, ts_meta = decode_timestamps(
row['created_at'])
if row['deleted']:
# timestamp of the source tombstone
try:
headers = {'x-timestamp': ts_data.internal}
self._update_sync_to_headers(row['name'], sync_to,
user_key, realm, realm_key,
'DELETE', headers)
delete_object(sync_to, name=row['name'], headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
timeout=self.conn_timeout)
except ClientException as err:
if err.http_status != HTTP_NOT_FOUND:
raise
self.container_deletes += 1
self.logger.increment('deletes')
self.logger.timing_since('deletes.timing', start_time)
else:
# when sync'ing a live object, use ts_meta - this is the time
if self._object_in_remote_container(row['name'],
sync_to, user_key, realm,
realm_key, ts_meta):
return True
exc = None
headers_out = {'X-Newest': True,
'X-Backend-Storage-Policy-Index':
str(info['storage_policy_index'])}
try:
source_obj_status, headers, body = \
self.swift.get_object(info['account'],
info['container'], row['name'],
headers=headers_out,
acceptable_statuses=(2, 4))
except (Exception, UnexpectedResponse, Timeout) as err:
headers = {}
body = None
exc = err
timestamp = Timestamp(headers.get('x-timestamp', 0))
if timestamp < ts_meta:
if exc:
raise exc
raise Exception(
_('Unknown exception trying to GET: '
'%(account)r %(container)r %(object)r'),
{'account': info['account'],
'container': info['container'],
'object': row['name']})
for key in ('date', 'last-modified'):
if key in headers:
del headers[key]
if 'etag' in headers:
headers['etag'] = headers['etag'].strip('"')
if 'content-type' in headers:
headers['content-type'] = clean_content_type(
headers['content-type'])
self._update_sync_to_headers(row['name'], sync_to, user_key,
realm, realm_key, 'PUT', headers)
put_object(sync_to, name=row['name'], headers=headers,
contents=FileLikeIter(body),
proxy=self.select_http_proxy(), logger=self.logger,
timeout=self.conn_timeout)
self.container_puts += 1
self.logger.increment('puts')
self.logger.timing_since('puts.timing', start_time)
except ClientException as err:
if err.http_status == HTTP_UNAUTHORIZED:
self.logger.info(
_('Unauth %(sync_from)r => %(sync_to)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to})
elif err.http_status == HTTP_NOT_FOUND:
self.logger.info(
_('Not found %(sync_from)r => %(sync_to)r \
- object %(obj_name)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to, 'obj_name': row['name']})
else:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
except (Exception, Timeout) as err:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
return True
def select_http_proxy(self):
return choice(self.http_proxies) if self.http_proxies else None
| true | true |
f72bc6e7a5a7d63e1045e7006d2cabcd2f8a9690 | 1,305 | py | Python | oauth2_provider/generators.py | dqfort/django-oauth-toolkit | 492a867499b50f348c28db4ef3e429e8f46dc412 | [
"BSD-2-Clause-FreeBSD"
] | 1,406 | 2018-04-09T18:46:01.000Z | 2022-03-30T00:42:23.000Z | oauth2_provider/generators.py | dqfort/django-oauth-toolkit | 492a867499b50f348c28db4ef3e429e8f46dc412 | [
"BSD-2-Clause-FreeBSD"
] | 625 | 2018-04-08T06:06:29.000Z | 2022-03-28T20:48:19.000Z | oauth2_provider/generators.py | dqfort/django-oauth-toolkit | 492a867499b50f348c28db4ef3e429e8f46dc412 | [
"BSD-2-Clause-FreeBSD"
] | 378 | 2018-04-11T20:08:11.000Z | 2022-03-30T17:53:21.000Z | from oauthlib.common import UNICODE_ASCII_CHARACTER_SET
from oauthlib.common import generate_client_id as oauthlib_generate_client_id
from .settings import oauth2_settings
class BaseHashGenerator:
"""
All generators should extend this class overriding `.hash()` method.
"""
def hash(self):
raise NotImplementedError()
class ClientIdGenerator(BaseHashGenerator):
def hash(self):
"""
Generate a client_id for Basic Authentication scheme without colon char
as in http://tools.ietf.org/html/rfc2617#section-2
"""
return oauthlib_generate_client_id(length=40, chars=UNICODE_ASCII_CHARACTER_SET)
class ClientSecretGenerator(BaseHashGenerator):
def hash(self):
length = oauth2_settings.CLIENT_SECRET_GENERATOR_LENGTH
chars = UNICODE_ASCII_CHARACTER_SET
return oauthlib_generate_client_id(length=length, chars=chars)
def generate_client_id():
"""
Generate a suitable client id
"""
client_id_generator = oauth2_settings.CLIENT_ID_GENERATOR_CLASS()
return client_id_generator.hash()
def generate_client_secret():
"""
Generate a suitable client secret
"""
client_secret_generator = oauth2_settings.CLIENT_SECRET_GENERATOR_CLASS()
return client_secret_generator.hash()
| 28.369565 | 88 | 0.743295 | from oauthlib.common import UNICODE_ASCII_CHARACTER_SET
from oauthlib.common import generate_client_id as oauthlib_generate_client_id
from .settings import oauth2_settings
class BaseHashGenerator:
def hash(self):
raise NotImplementedError()
class ClientIdGenerator(BaseHashGenerator):
def hash(self):
return oauthlib_generate_client_id(length=40, chars=UNICODE_ASCII_CHARACTER_SET)
class ClientSecretGenerator(BaseHashGenerator):
def hash(self):
length = oauth2_settings.CLIENT_SECRET_GENERATOR_LENGTH
chars = UNICODE_ASCII_CHARACTER_SET
return oauthlib_generate_client_id(length=length, chars=chars)
def generate_client_id():
client_id_generator = oauth2_settings.CLIENT_ID_GENERATOR_CLASS()
return client_id_generator.hash()
def generate_client_secret():
client_secret_generator = oauth2_settings.CLIENT_SECRET_GENERATOR_CLASS()
return client_secret_generator.hash()
| true | true |
f72bc75458786940b2244977b48352162a62d379 | 767 | py | Python | src/data_consumer.py | perrycao/docker_project | e07c4a388eb4936a5f88f63e291ecb8a115bcbe2 | [
"MIT"
] | null | null | null | src/data_consumer.py | perrycao/docker_project | e07c4a388eb4936a5f88f63e291ecb8a115bcbe2 | [
"MIT"
] | null | null | null | src/data_consumer.py | perrycao/docker_project | e07c4a388eb4936a5f88f63e291ecb8a115bcbe2 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
# version 0.1 2019-05-03 First update
import argparse
from kafka import KafkaConsumer
def consume(_topic_name, _kafka_broker):
"""
helper method to consume certain topic data from kafka broker
"""
consumer = KafkaConsumer(_topic_name, bootstrap_servers=_kafka_broker)
for message in consumer:
print(message)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("topic_name", help="the kafka topic to pull from.")
parser.add_argument("kafka_broker", help="the location of kafka broker.")
# Parse arguments
args = parser.parse_args()
topic_name = args.topic_name
kafka_broker = args.kafka_broker
# Consume topic
consume(topic_name, kafka_broker)
| 26.448276 | 77 | 0.711864 |
import argparse
from kafka import KafkaConsumer
def consume(_topic_name, _kafka_broker):
consumer = KafkaConsumer(_topic_name, bootstrap_servers=_kafka_broker)
for message in consumer:
print(message)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("topic_name", help="the kafka topic to pull from.")
parser.add_argument("kafka_broker", help="the location of kafka broker.")
args = parser.parse_args()
topic_name = args.topic_name
kafka_broker = args.kafka_broker
consume(topic_name, kafka_broker)
| true | true |
f72bc80b55e85e3fe6182a7b4565c13a421a0651 | 1,734 | py | Python | bioseq/management/commands/import_genome.py | ezequieljsosa/sndg-web | 7763c8fbc83dc92abb9c53326e2fe227bcabf607 | [
"MIT"
] | null | null | null | bioseq/management/commands/import_genome.py | ezequieljsosa/sndg-web | 7763c8fbc83dc92abb9c53326e2fe227bcabf607 | [
"MIT"
] | 2 | 2021-03-10T17:08:59.000Z | 2021-09-29T17:40:48.000Z | bioseq/management/commands/import_genome.py | sndg-arg/covid19-web | fe809c87563a00bbec53c3646338d571fdde6a69 | [
"MIT"
] | null | null | null | import os
import warnings
from tqdm import tqdm
import subprocess as sp
import gzip
from io import StringIO
import Bio.SeqIO as bpio
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from bioseq.io.BioIO import BioIO
from Bio import BiopythonWarning, BiopythonParserWarning, BiopythonDeprecationWarning, BiopythonExperimentalWarning
warnings.simplefilter('ignore', RuntimeWarning)
warnings.simplefilter('ignore', BiopythonWarning)
warnings.simplefilter('ignore', BiopythonParserWarning)
warnings.simplefilter('ignore', BiopythonDeprecationWarning)
warnings.simplefilter('ignore', BiopythonExperimentalWarning)
class Command(BaseCommand):
help = 'Loads a genome in the database'
def add_arguments(self, parser):
parser.add_argument('--input', '-i', required=True)
parser.add_argument('--accession', '-a', required=True)
parser.add_argument('--name', '-n', required=True)
parser.add_argument('--taxon', '-t', type=int, required=True)
def handle(self, *args, **options):
input_file = options['input']
accession = options['accession']
name = options['name']
taxon = options['taxon']
assert os.path.exists(input_file),"'%s' does not exists" % input_file
io = BioIO(accession, taxon)
grep_cmd = 'grep -c "FEATURES *Location/Qualifiers" "%s"' % input_file
if input_file.endswith(".gz"):
grep_cmd = 'z' + grep_cmd
input_file = gzip.open(input_file, "rt")
total = int(sp.check_output(grep_cmd, shell=True))
io.create_db()
io.process_record_list(bpio.parse(input_file, "gb"), total)
self.stderr.write("genome imported!")
| 36.125 | 115 | 0.703576 | import os
import warnings
from tqdm import tqdm
import subprocess as sp
import gzip
from io import StringIO
import Bio.SeqIO as bpio
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from bioseq.io.BioIO import BioIO
from Bio import BiopythonWarning, BiopythonParserWarning, BiopythonDeprecationWarning, BiopythonExperimentalWarning
warnings.simplefilter('ignore', RuntimeWarning)
warnings.simplefilter('ignore', BiopythonWarning)
warnings.simplefilter('ignore', BiopythonParserWarning)
warnings.simplefilter('ignore', BiopythonDeprecationWarning)
warnings.simplefilter('ignore', BiopythonExperimentalWarning)
class Command(BaseCommand):
help = 'Loads a genome in the database'
def add_arguments(self, parser):
parser.add_argument('--input', '-i', required=True)
parser.add_argument('--accession', '-a', required=True)
parser.add_argument('--name', '-n', required=True)
parser.add_argument('--taxon', '-t', type=int, required=True)
def handle(self, *args, **options):
input_file = options['input']
accession = options['accession']
name = options['name']
taxon = options['taxon']
assert os.path.exists(input_file),"'%s' does not exists" % input_file
io = BioIO(accession, taxon)
grep_cmd = 'grep -c "FEATURES *Location/Qualifiers" "%s"' % input_file
if input_file.endswith(".gz"):
grep_cmd = 'z' + grep_cmd
input_file = gzip.open(input_file, "rt")
total = int(sp.check_output(grep_cmd, shell=True))
io.create_db()
io.process_record_list(bpio.parse(input_file, "gb"), total)
self.stderr.write("genome imported!")
| true | true |
f72bc81ec96d1edc6e8d005ef6645e2c6fb9e483 | 14,906 | py | Python | models/SHL_2018/transforms.py | HuguesMoreau/Sensors_similariy | 4b8592049c83b03a11f5c57fab247290ee29b8f5 | [
"Apache-2.0"
] | null | null | null | models/SHL_2018/transforms.py | HuguesMoreau/Sensors_similariy | 4b8592049c83b03a11f5c57fab247290ee29b8f5 | [
"Apache-2.0"
] | null | null | null | models/SHL_2018/transforms.py | HuguesMoreau/Sensors_similariy | 4b8592049c83b03a11f5c57fab247290ee29b8f5 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This file contains diverse preprocessing functions (mostly norms ans spectrograms),
and basic tests and visualizations.
If you are to work with any IPython console (ex: with Jupyter or spyder), is is advised
to launch a '%matplotlib qt' ,to get clean widow
"""
if __name__ == '__main__': # this is used to launch the file from anywhere
import sys
sys.path.append("../..")
import numpy as np
import torch
import scipy.signal, scipy.interpolate, scipy.ndimage
from param import classes_names, fs, duration_window, duration_overlap, spectro_batch_size
from models.SHL_2018 import Datasets
if __name__ == "__main__":
import matplotlib.pyplot as plt
n_classes = len(classes_names)
# We will need this for the tests
DS = Datasets.SignalsDataSet(mode='train', transform=None)
#%% transform functions
"""In all following functions, the input parameter (data) is, by default,
a dict of numpy arrays, containing signal names (eg. "Gyr_z") as keys, and 1-dimensional
arrays as values
Most of this part contains basic visualizations to make sure the preprocessing is correct"""
class TemporalTransform():
""" create the base transform to use to each element of the data
Parameters
----------
signal_name: a string (ex: 'Gyr_y', 'Ori_x')
If the string ends by "_norm" (ex: "Mag_norm"), the output will
be the norm of the three (or four) axis of the signal.
Returns
-------
a function with input: a dict of (_, 6000) arrays (key example: 'Gyr_y')
and output: an array with the same shape.
"""
def __init__(self, signal_name):
super(TemporalTransform, self).__init__()
self.signal_name = signal_name
def __call__(self, data):
"""
Parameters
----------
data: a dict of (B, 6000) arrays (key example: 'Gyr_y')
Returns
-------
an array with shape (B, 6000), where B depends on the input shape.
"""
if self.signal_name[-2:] in ['_x', '_y', '_z', '_w'] or self.signal_name == "Pressure":
processed_signal = data[self.signal_name]
elif self.signal_name[-5:] == '_norm':
suffix_location = self.signal_name.index("_") # 4 if signal_name == "LAcc", 3 otherwise
sensor = self.signal_name[:suffix_location] # ex: 'Acc', 'LAcc'
if sensor == "Ori":
# in that case, data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2 should be 1.0
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2 \
+ data[sensor+"_w"]**2)
else :
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2)
else :
raise ValueError("unknown signal name: '{}'. Signal names should end with either '_x', '_y', '_z', '_w', or '_norm'".format(signal_name))
return processed_signal
def __str__(self):
"""purely for visual purposes, so that we can print() the function"""
str_to_return = "Temporal_transform"
str_to_return += f"\n\t Signal: {self.signal_name}"
return str_to_return
if __name__ == "__main__":
# plot one figure per sensor
# on each figure, one subplot per class,
# to find one instance per each class, we start looking at index = index0
index0 = 0
for tested_signal_name in ["Acc_norm", "Ori_norm", "Mag_norm", "LAcc_x"]:
# plot 1 segment from each class.
plt.figure()
if tested_signal_name != 'Pressure':
suffix_location = tested_signal_name.index("_")
tested_sensor = tested_signal_name[:suffix_location] # ex: 'Acc', 'LAcc'
else:
tested_sensor = 'Pressure'
sensor_axis = [tested_sensor + axis for axis in ["_x", "_y", "_z"]] if tested_sensor != 'Pressure' else ['Pressure']
if tested_sensor == "Ori" : sensor_axis.append(tested_sensor+"_w")
temporal_transform = TemporalTransform(tested_signal_name)
remaining_classes = classes_names.copy()
index = index0
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index] # data is a dict of 2D tensors (1,nb)
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
plt.subplot(2, 4, n_classes - len(remaining_classes))
for k,signal in enumerate(sensor_axis):
if k==0: # compute the temporal axis once
nb = data_cpu[signal].shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.plot(x_t, data_cpu[signal][0,:])
selected_signal = temporal_transform(data_cpu)
error_message_dtype = "One of the signals does not have the correct type: {}, {} \n dtype should be float32, is actually {}".format(tested_signal_name, str(temporal_transform), selected_signal.dtype)
assert (selected_signal.dtype == 'float32'), error_message_dtype
plt.plot(x_t, selected_signal[0,:], '--')
plt.xlabel("t (s)")
legend = sensor_axis + [tested_signal_name+' (selected)']
plt.legend(legend)
plt.title("{} ({}, index={})".format(tested_sensor, classes_names[class_index-1], index))
index +=1
plt.show()
#%%
# ---------------- Spectrogram transforms ---------------------
# Interpolation functions
def interpol_log(f, t, spectrogram, out_size):
"""interpolates the spectrogram in input using a linear axis for the timestamps and a LOG axis for the frequencies
Parameters
----------
f : numpy array, shape: (F_in,), frequencies of the spectrogram
t : numpy array, shape: (T_in,), timestamps of the spectrogram
spectrogram : (B, F_in, T_in), B is batch size; 3D numpy array
out_size : couple of ints (F_out, T_out)
Returns
-------
f_interpolated : numpy array, shape: (F_out,), frequencies of the spectrogram AFTER interpolation
t_interpolated : numpy array, shape: (T_out,), timestamps of the spectrogram AFTER interpolation
a spectrogram, where the f axis (second dimension) has been re-interpolated
using a log axis
"""
B = spectrogram.shape[0]
out_f, out_t = out_size
log_f = np.log(f+f[1]) # log between 0.2 Hz and 50.2 Hz
log_f_normalized = (log_f-log_f[0])/(log_f[-1]-log_f[0]) # between 0.0 and 1.0
t_normalized = (t-t[0])/(t[-1]-t[0])
rescaled_f = out_f*log_f_normalized # 0 and 48
# rescaled_f = (out_f-1)*log_f_normalized ??
rescaled_t = out_t*t_normalized
spectrogram_interpolated = np.zeros( (B, out_f, out_t), dtype='float32')
index_f, index_t = np.arange(out_f), np.arange(out_t) # between 0 and 47
for i in range(B):
spectrogram_fn = scipy.interpolate.interp2d(rescaled_t, rescaled_f, spectrogram[i,:,:], copy=False)
# interp2d returns a 2D function
spectrogram_interpolated[i,:,:] = spectrogram_fn(index_t, index_f) # care to the order
f_fn = scipy.interpolate.interp1d(rescaled_f, f, copy=False)
f_interpolated = f_fn(index_f)
t_fn = scipy.interpolate.interp1d(rescaled_t, t, copy=False)
t_interpolated = t_fn(index_t)
return f_interpolated, t_interpolated, spectrogram_interpolated
#%%
# ---------------- The spectrogram class --------------
class SpectrogramTransform():
""" create the transform to work with spectrograms. This class behaves
essentially the same as TempralTransform, except the created transform
returns a dict of 3d array instead of 2d
Parameters
----------
signal_name: a string signal (ex: 'Gyr_y', 'Ori_x')
If the string ends by "_norm" (ex: "Mag_norm"), the output will
be the norm of the three (or four) axis of the signal.
Returns
-------
a function with input: data : a dict of (_, 6000) arrays (key example: 'Gyr_y')
and output: a dictionnary of 2d arrays.
"""
def __init__(self, signal_name):
super(SpectrogramTransform, self).__init__()
self.temporal_transform = TemporalTransform(signal_name)
self.fs = fs
self.duration_window = duration_window
self.duration_overlap = duration_overlap
self.spectro_batch_size = spectro_batch_size # these values were loaded from the param file
self.signal_name = signal_name
self.out_size = (48, 48)
def __call__(self, data):
"""
Parameters
----------
data : a dict of (B, 6000) arrays (key example: 'Gyr_y')
Returns
-------
An array with shape (B, F, T), where B (dataset size) depends on the
input shape, and F and T are equal to 48 here.
"""
temporal_signal = self.temporal_transform(data)
del data # free some memory
fs = self.fs
nperseg = int(self.duration_window * fs)
noverlap = int(self.duration_overlap * fs)
spectro_batch_size = self.spectro_batch_size
# turning 13,000 temporal signals into (550, 500) array
# spectrograms at once is too much: a single (13000, 550, 500) array,
# with simple precision requires 7.15 Go !
# This is why we work with batches of 1000 instead. For each batch,
# we compute the complete sectrogram (1000 x 550 x 500), then
# interpolate it to smaller sizes, before working wit the following batch.
current_spectro_batch_size = temporal_signal.shape[0]
if current_spectro_batch_size < spectro_batch_size :
f, t, spectrogram = scipy.signal.spectrogram(temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram = interpol_log(f, t, spectrogram, self.out_size)
# f, t, and possibly out_size will be ignored when the function does not need them
else :
n_batches = (current_spectro_batch_size-1)//spectro_batch_size +1
nb_interp_f, nb_interp_t = self.out_size
interpolated_spectrogram = np.zeros((current_spectro_batch_size, nb_interp_f, nb_interp_t), dtype='float32')
for i in range(n_batches):
i_min = i * spectro_batch_size
i_max = (i+1) * spectro_batch_size # does not matter if it goes beyond current_spectro_batch_size
this_temporal_signal = temporal_signal[i_min:i_max,:]
f, t, spectrogram = scipy.signal.spectrogram(this_temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram[i_min:i_max,:,:] = interpol_log(f, t, spectrogram, self.out_size)
del temporal_signal
np.log(interpolated_spectrogram + 1e-10, dtype='float32', out=interpolated_spectrogram) # in-place operation
self.f_interpolated = f_interpolated
self.t_interpolated = t_interpolated
return interpolated_spectrogram
def __str__(self):
"""purely for visual purposes, so that we can print() the function"""
str_to_return = "Spectrogram transform"
str_to_return += f"\n\t Signals: {self.signal_name}"
str_to_return += f"\n\t Output size: {self.out_size}"
str_to_return += f"\n\t Interpolation: log-interpolation"
str_to_return += "\n\t Log-power"
return str_to_return
# end of class SpectrogramTransform():
#%%
if __name__ == "__main__":
fontdict = {'fontsize':10}
n_ticks = 10
# we plot the raw spectrogram and two interpolated spectrograms for the following classes
selected_classes = ["Run", "Walk"]
remaining_classes = selected_classes.copy()
nsel = len(selected_classes)
index = 3204 # where to tart the search
plt.figure(figsize=(12,8))
signal_name = "Acc_norm"
temporal_transform = TemporalTransform(signal_name) # we will plot the result
spectrogram_transform = SpectrogramTransform(signal_name)
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index]
data_cpu = {signal:data_tensor[signal].cpu().detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
i_class = nsel - len(remaining_classes) # between 1 and n
temporal_signal = temporal_transform(data_cpu)
nb = temporal_signal.shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.subplot(2,nsel,i_class)
plt.plot(x_t, temporal_signal[0,:])
plt.title(f'{class_name} (index={index})', fontdict)
plt.xlabel("t (sec)")
plt.ylabel(signal_name)
data_tensor, _ = DS[index] # we need to recreate data because the variable is deleted
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
spectrogram_interpolated = spectrogram_transform(data_cpu)
f_interpolated = spectrogram_transform.f_interpolated
t_interpolated = spectrogram_transform.t_interpolated
plt.subplot(2,nsel,i_class + nsel)
t_interpolated = spectrogram_transform.t_interpolated
f_interpolated = spectrogram_transform.f_interpolated
matrix_shape = spectrogram_interpolated.shape
time_list = [f'{t_interpolated[i]:.0f}' for i in np.round(np.linspace(0, matrix_shape[2]-1,n_ticks)).astype(int)]
freq_list = [f'{f_interpolated[i]:.1f}' for i in np.round(np.linspace(0, matrix_shape[1]-1,n_ticks)).astype(int)]
plt.xticks(np.linspace(0, matrix_shape[2]-1, n_ticks), time_list)
plt.yticks(np.linspace(0, matrix_shape[1]-1, n_ticks), freq_list)
plt.imshow(spectrogram_interpolated[0,:,:])
plt.ylabel("f (Hz)")
plt.xlabel("t (s)")
plt.colorbar()
index += 1
plt.show()
#%%
| 40.395664 | 216 | 0.619616 |
if __name__ == '__main__':
import sys
sys.path.append("../..")
import numpy as np
import torch
import scipy.signal, scipy.interpolate, scipy.ndimage
from param import classes_names, fs, duration_window, duration_overlap, spectro_batch_size
from models.SHL_2018 import Datasets
if __name__ == "__main__":
import matplotlib.pyplot as plt
n_classes = len(classes_names)
DS = Datasets.SignalsDataSet(mode='train', transform=None)
class TemporalTransform():
def __init__(self, signal_name):
super(TemporalTransform, self).__init__()
self.signal_name = signal_name
def __call__(self, data):
if self.signal_name[-2:] in ['_x', '_y', '_z', '_w'] or self.signal_name == "Pressure":
processed_signal = data[self.signal_name]
elif self.signal_name[-5:] == '_norm':
suffix_location = self.signal_name.index("_")
sensor = self.signal_name[:suffix_location]
if sensor == "Ori":
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2 \
+ data[sensor+"_w"]**2)
else :
processed_signal = np.sqrt(data[sensor+"_x"]**2 + data[sensor+"_y"]**2 + data[sensor+"_z"]**2)
else :
raise ValueError("unknown signal name: '{}'. Signal names should end with either '_x', '_y', '_z', '_w', or '_norm'".format(signal_name))
return processed_signal
def __str__(self):
str_to_return = "Temporal_transform"
str_to_return += f"\n\t Signal: {self.signal_name}"
return str_to_return
if __name__ == "__main__":
index0 = 0
for tested_signal_name in ["Acc_norm", "Ori_norm", "Mag_norm", "LAcc_x"]:
plt.figure()
if tested_signal_name != 'Pressure':
suffix_location = tested_signal_name.index("_")
tested_sensor = tested_signal_name[:suffix_location]
else:
tested_sensor = 'Pressure'
sensor_axis = [tested_sensor + axis for axis in ["_x", "_y", "_z"]] if tested_sensor != 'Pressure' else ['Pressure']
if tested_sensor == "Ori" : sensor_axis.append(tested_sensor+"_w")
temporal_transform = TemporalTransform(tested_signal_name)
remaining_classes = classes_names.copy()
index = index0
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index]
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
plt.subplot(2, 4, n_classes - len(remaining_classes))
for k,signal in enumerate(sensor_axis):
if k==0:
nb = data_cpu[signal].shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.plot(x_t, data_cpu[signal][0,:])
selected_signal = temporal_transform(data_cpu)
error_message_dtype = "One of the signals does not have the correct type: {}, {} \n dtype should be float32, is actually {}".format(tested_signal_name, str(temporal_transform), selected_signal.dtype)
assert (selected_signal.dtype == 'float32'), error_message_dtype
plt.plot(x_t, selected_signal[0,:], '--')
plt.xlabel("t (s)")
legend = sensor_axis + [tested_signal_name+' (selected)']
plt.legend(legend)
plt.title("{} ({}, index={})".format(tested_sensor, classes_names[class_index-1], index))
index +=1
plt.show()
def interpol_log(f, t, spectrogram, out_size):
B = spectrogram.shape[0]
out_f, out_t = out_size
log_f = np.log(f+f[1])
log_f_normalized = (log_f-log_f[0])/(log_f[-1]-log_f[0])
t_normalized = (t-t[0])/(t[-1]-t[0])
rescaled_f = out_f*log_f_normalized
rescaled_t = out_t*t_normalized
spectrogram_interpolated = np.zeros( (B, out_f, out_t), dtype='float32')
index_f, index_t = np.arange(out_f), np.arange(out_t)
for i in range(B):
spectrogram_fn = scipy.interpolate.interp2d(rescaled_t, rescaled_f, spectrogram[i,:,:], copy=False)
spectrogram_interpolated[i,:,:] = spectrogram_fn(index_t, index_f)
f_fn = scipy.interpolate.interp1d(rescaled_f, f, copy=False)
f_interpolated = f_fn(index_f)
t_fn = scipy.interpolate.interp1d(rescaled_t, t, copy=False)
t_interpolated = t_fn(index_t)
return f_interpolated, t_interpolated, spectrogram_interpolated
class SpectrogramTransform():
def __init__(self, signal_name):
super(SpectrogramTransform, self).__init__()
self.temporal_transform = TemporalTransform(signal_name)
self.fs = fs
self.duration_window = duration_window
self.duration_overlap = duration_overlap
self.spectro_batch_size = spectro_batch_size
self.signal_name = signal_name
self.out_size = (48, 48)
def __call__(self, data):
temporal_signal = self.temporal_transform(data)
del data
fs = self.fs
nperseg = int(self.duration_window * fs)
noverlap = int(self.duration_overlap * fs)
spectro_batch_size = self.spectro_batch_size
current_spectro_batch_size = temporal_signal.shape[0]
if current_spectro_batch_size < spectro_batch_size :
f, t, spectrogram = scipy.signal.spectrogram(temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram = interpol_log(f, t, spectrogram, self.out_size)
else :
n_batches = (current_spectro_batch_size-1)//spectro_batch_size +1
nb_interp_f, nb_interp_t = self.out_size
interpolated_spectrogram = np.zeros((current_spectro_batch_size, nb_interp_f, nb_interp_t), dtype='float32')
for i in range(n_batches):
i_min = i * spectro_batch_size
i_max = (i+1) * spectro_batch_size
this_temporal_signal = temporal_signal[i_min:i_max,:]
f, t, spectrogram = scipy.signal.spectrogram(this_temporal_signal, fs=fs, nperseg=nperseg, noverlap=noverlap)
f_interpolated, t_interpolated, interpolated_spectrogram[i_min:i_max,:,:] = interpol_log(f, t, spectrogram, self.out_size)
del temporal_signal
np.log(interpolated_spectrogram + 1e-10, dtype='float32', out=interpolated_spectrogram)
self.f_interpolated = f_interpolated
self.t_interpolated = t_interpolated
return interpolated_spectrogram
def __str__(self):
str_to_return = "Spectrogram transform"
str_to_return += f"\n\t Signals: {self.signal_name}"
str_to_return += f"\n\t Output size: {self.out_size}"
str_to_return += f"\n\t Interpolation: log-interpolation"
str_to_return += "\n\t Log-power"
return str_to_return
if __name__ == "__main__":
fontdict = {'fontsize':10}
n_ticks = 10
selected_classes = ["Run", "Walk"]
remaining_classes = selected_classes.copy()
nsel = len(selected_classes)
index = 3204
plt.figure(figsize=(12,8))
signal_name = "Acc_norm"
temporal_transform = TemporalTransform(signal_name)
spectrogram_transform = SpectrogramTransform(signal_name)
while len(remaining_classes)>0:
data_tensor, class_tensor = DS[index]
data_cpu = {signal:data_tensor[signal].cpu().detach().numpy() for signal in data_tensor.keys()}
class_index = int(class_tensor)
class_name = classes_names[class_index-1]
if class_name in remaining_classes:
remaining_classes.remove(class_name)
i_class = nsel - len(remaining_classes)
temporal_signal = temporal_transform(data_cpu)
nb = temporal_signal.shape[1]
x_t = np.linspace(0, nb/fs, nb)
plt.subplot(2,nsel,i_class)
plt.plot(x_t, temporal_signal[0,:])
plt.title(f'{class_name} (index={index})', fontdict)
plt.xlabel("t (sec)")
plt.ylabel(signal_name)
data_tensor, _ = DS[index]
data_cpu = {signal:data_tensor[signal].to(torch.device('cpu')).detach().numpy() for signal in data_tensor.keys()}
spectrogram_interpolated = spectrogram_transform(data_cpu)
f_interpolated = spectrogram_transform.f_interpolated
t_interpolated = spectrogram_transform.t_interpolated
plt.subplot(2,nsel,i_class + nsel)
t_interpolated = spectrogram_transform.t_interpolated
f_interpolated = spectrogram_transform.f_interpolated
matrix_shape = spectrogram_interpolated.shape
time_list = [f'{t_interpolated[i]:.0f}' for i in np.round(np.linspace(0, matrix_shape[2]-1,n_ticks)).astype(int)]
freq_list = [f'{f_interpolated[i]:.1f}' for i in np.round(np.linspace(0, matrix_shape[1]-1,n_ticks)).astype(int)]
plt.xticks(np.linspace(0, matrix_shape[2]-1, n_ticks), time_list)
plt.yticks(np.linspace(0, matrix_shape[1]-1, n_ticks), freq_list)
plt.imshow(spectrogram_interpolated[0,:,:])
plt.ylabel("f (Hz)")
plt.xlabel("t (s)")
plt.colorbar()
index += 1
plt.show()
| true | true |
f72bc8fafdff910faf1d81e8260be95f5fd4d2f1 | 6,823 | py | Python | lib/rpn_layer/proposal_target_layer.py | aditya2592/PoseCNN | a763120ce0ceb55cf3432980287ef463728f8052 | [
"MIT"
] | 655 | 2018-03-21T19:55:45.000Z | 2022-03-25T20:41:21.000Z | lib/rpn_layer/proposal_target_layer.py | SergioRAgostinho/PoseCNN | da9eaae850eed7521a2a48a4d27474d655caab42 | [
"MIT"
] | 122 | 2018-04-04T13:57:49.000Z | 2022-03-18T09:28:44.000Z | lib/rpn_layer/proposal_target_layer.py | SergioRAgostinho/PoseCNN | da9eaae850eed7521a2a48a4d27474d655caab42 | [
"MIT"
] | 226 | 2018-03-22T01:40:04.000Z | 2022-03-17T11:56:14.000Z | # --------------------------------------------------------
# Faster R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick, Sean Bell and Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import numpy.random as npr
from fcn.config import cfg
from utils.bbox_transform import bbox_transform
from utils.cython_bbox import bbox_overlaps
def proposal_target_layer(rpn_rois, rpn_scores, gt_boxes, poses, _num_classes):
"""
Assign object detection proposals to ground-truth targets. Produces proposal
classification labels and bounding-box regression targets.
"""
# Proposal ROIs (0, x1, y1, x2, y2) coming from RPN
# (i.e., rpn.proposal_layer.ProposalLayer), or any other source
all_rois = rpn_rois
all_scores = rpn_scores
# Include ground-truth boxes in the set of candidate rois
if cfg.TRAIN.USE_GT:
zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype)
all_rois = np.vstack(
(all_rois, np.hstack((zeros, gt_boxes[:, :-1])))
)
# not sure if it a wise appending, but anyway i am not using it
all_scores = np.vstack((all_scores, zeros))
num_images = 1
rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images
fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)
# Sample rois with classification labels and bounding box regression
# targets
labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight = _sample_rois(
all_rois, all_scores, gt_boxes, poses, fg_rois_per_image,
rois_per_image, _num_classes)
rois = rois.reshape(-1, 5)
roi_scores = roi_scores.reshape(-1)
labels = labels.reshape(-1, 1)
bbox_targets = bbox_targets.reshape(-1, _num_classes * 4)
bbox_inside_weights = bbox_inside_weights.reshape(-1, _num_classes * 4)
bbox_outside_weights = np.array(bbox_inside_weights > 0).astype(np.float32)
return rois, roi_scores, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights, poses_target, poses_weight
def _get_bbox_regression_labels(bbox_target_data, num_classes):
"""Bounding-box regression targets (bbox_target_data) are stored in a
compact form N x (class, tx, ty, tw, th)
This function expands those targets into the 4-of-4*K representation used
by the network (i.e. only one class has non-zero targets).
Returns:
bbox_target (ndarray): N x 4K blob of regression targets
bbox_inside_weights (ndarray): N x 4K blob of loss weights
"""
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = clss[ind]
start = int(4 * cls)
end = start + 4
bbox_targets[ind, start:end] = bbox_target_data[ind, 1:]
bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS
return bbox_targets, bbox_inside_weights
def _compute_targets(ex_rois, gt_rois, labels):
"""Compute bounding-box regression targets for an image."""
assert ex_rois.shape[0] == gt_rois.shape[0]
assert ex_rois.shape[1] == 4
assert gt_rois.shape[1] == 4
targets = bbox_transform(ex_rois, gt_rois)
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
# Optionally normalize targets by a precomputed mean and stdev
targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS))
/ np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS))
return np.hstack(
(labels[:, np.newaxis], targets)).astype(np.float32, copy=False)
def _compute_pose_targets(quaternions, labels, num_classes):
"""Compute pose regression targets for an image."""
num = quaternions.shape[0]
poses_target = np.zeros((num, 4 * num_classes), dtype=np.float32)
poses_weight = np.zeros((num, 4 * num_classes), dtype=np.float32)
for i in xrange(num):
cls = labels[i]
if cls > 0:
start = int(4 * cls)
end = start + 4
poses_target[i, start:end] = quaternions[i, :]
poses_weight[i, start:end] = 1.0
return poses_target, poses_weight
def _sample_rois(all_rois, all_scores, gt_boxes, poses, fg_rois_per_image, rois_per_image, num_classes):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
# overlaps: (rois x gt_boxes)
overlaps = bbox_overlaps(
np.ascontiguousarray(all_rois[:, 1:5], dtype=np.float),
np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float))
gt_assignment = overlaps.argmax(axis=1)
max_overlaps = overlaps.max(axis=1)
labels = gt_boxes[gt_assignment, 4]
quaternions = poses[gt_assignment, 6:10]
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = np.where(max_overlaps >= cfg.TRAIN.FG_THRESH)[0]
# Guard against the case when an image has fewer than fg_rois_per_image
# Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
bg_inds = np.where((max_overlaps < cfg.TRAIN.BG_THRESH_HI) &
(max_overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
# Small modification to the original version where we ensure a fixed number of regions are sampled
if fg_inds.size > 0 and bg_inds.size > 0:
fg_rois_per_image = min(fg_rois_per_image, fg_inds.size)
fg_inds = npr.choice(fg_inds, size=int(fg_rois_per_image), replace=False)
bg_rois_per_image = rois_per_image - fg_rois_per_image
to_replace = bg_inds.size < bg_rois_per_image
bg_inds = npr.choice(bg_inds, size=int(bg_rois_per_image), replace=to_replace)
elif fg_inds.size > 0:
to_replace = fg_inds.size < rois_per_image
fg_inds = npr.choice(fg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = rois_per_image
elif bg_inds.size > 0:
to_replace = bg_inds.size < rois_per_image
bg_inds = npr.choice(bg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = 0
else:
import pdb
pdb.set_trace()
# The indices that we're selecting (both fg and bg)
keep_inds = np.append(fg_inds, bg_inds)
# Select sampled values from various arrays:
labels = labels[keep_inds]
# Clamp labels for the background RoIs to 0
labels[int(fg_rois_per_image):] = 0
rois = all_rois[keep_inds]
roi_scores = all_scores[keep_inds]
# pose regression targets and weights
poses_target, poses_weight = _compute_pose_targets(quaternions[keep_inds], labels, num_classes)
bbox_target_data = _compute_targets(
rois[:, 1:5], gt_boxes[gt_assignment[keep_inds], :4], labels)
bbox_targets, bbox_inside_weights = \
_get_bbox_regression_labels(bbox_target_data, num_classes)
return labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight
| 39.212644 | 118 | 0.725194 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import numpy.random as npr
from fcn.config import cfg
from utils.bbox_transform import bbox_transform
from utils.cython_bbox import bbox_overlaps
def proposal_target_layer(rpn_rois, rpn_scores, gt_boxes, poses, _num_classes):
all_rois = rpn_rois
all_scores = rpn_scores
if cfg.TRAIN.USE_GT:
zeros = np.zeros((gt_boxes.shape[0], 1), dtype=gt_boxes.dtype)
all_rois = np.vstack(
(all_rois, np.hstack((zeros, gt_boxes[:, :-1])))
)
all_scores = np.vstack((all_scores, zeros))
num_images = 1
rois_per_image = cfg.TRAIN.BATCH_SIZE / num_images
fg_rois_per_image = np.round(cfg.TRAIN.FG_FRACTION * rois_per_image)
labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight = _sample_rois(
all_rois, all_scores, gt_boxes, poses, fg_rois_per_image,
rois_per_image, _num_classes)
rois = rois.reshape(-1, 5)
roi_scores = roi_scores.reshape(-1)
labels = labels.reshape(-1, 1)
bbox_targets = bbox_targets.reshape(-1, _num_classes * 4)
bbox_inside_weights = bbox_inside_weights.reshape(-1, _num_classes * 4)
bbox_outside_weights = np.array(bbox_inside_weights > 0).astype(np.float32)
return rois, roi_scores, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights, poses_target, poses_weight
def _get_bbox_regression_labels(bbox_target_data, num_classes):
clss = bbox_target_data[:, 0]
bbox_targets = np.zeros((clss.size, 4 * num_classes), dtype=np.float32)
bbox_inside_weights = np.zeros(bbox_targets.shape, dtype=np.float32)
inds = np.where(clss > 0)[0]
for ind in inds:
cls = clss[ind]
start = int(4 * cls)
end = start + 4
bbox_targets[ind, start:end] = bbox_target_data[ind, 1:]
bbox_inside_weights[ind, start:end] = cfg.TRAIN.BBOX_INSIDE_WEIGHTS
return bbox_targets, bbox_inside_weights
def _compute_targets(ex_rois, gt_rois, labels):
assert ex_rois.shape[0] == gt_rois.shape[0]
assert ex_rois.shape[1] == 4
assert gt_rois.shape[1] == 4
targets = bbox_transform(ex_rois, gt_rois)
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
targets = ((targets - np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS))
/ np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS))
return np.hstack(
(labels[:, np.newaxis], targets)).astype(np.float32, copy=False)
def _compute_pose_targets(quaternions, labels, num_classes):
num = quaternions.shape[0]
poses_target = np.zeros((num, 4 * num_classes), dtype=np.float32)
poses_weight = np.zeros((num, 4 * num_classes), dtype=np.float32)
for i in xrange(num):
cls = labels[i]
if cls > 0:
start = int(4 * cls)
end = start + 4
poses_target[i, start:end] = quaternions[i, :]
poses_weight[i, start:end] = 1.0
return poses_target, poses_weight
def _sample_rois(all_rois, all_scores, gt_boxes, poses, fg_rois_per_image, rois_per_image, num_classes):
overlaps = bbox_overlaps(
np.ascontiguousarray(all_rois[:, 1:5], dtype=np.float),
np.ascontiguousarray(gt_boxes[:, :4], dtype=np.float))
gt_assignment = overlaps.argmax(axis=1)
max_overlaps = overlaps.max(axis=1)
labels = gt_boxes[gt_assignment, 4]
quaternions = poses[gt_assignment, 6:10]
fg_inds = np.where(max_overlaps >= cfg.TRAIN.FG_THRESH)[0]
bg_inds = np.where((max_overlaps < cfg.TRAIN.BG_THRESH_HI) &
(max_overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
if fg_inds.size > 0 and bg_inds.size > 0:
fg_rois_per_image = min(fg_rois_per_image, fg_inds.size)
fg_inds = npr.choice(fg_inds, size=int(fg_rois_per_image), replace=False)
bg_rois_per_image = rois_per_image - fg_rois_per_image
to_replace = bg_inds.size < bg_rois_per_image
bg_inds = npr.choice(bg_inds, size=int(bg_rois_per_image), replace=to_replace)
elif fg_inds.size > 0:
to_replace = fg_inds.size < rois_per_image
fg_inds = npr.choice(fg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = rois_per_image
elif bg_inds.size > 0:
to_replace = bg_inds.size < rois_per_image
bg_inds = npr.choice(bg_inds, size=int(rois_per_image), replace=to_replace)
fg_rois_per_image = 0
else:
import pdb
pdb.set_trace()
keep_inds = np.append(fg_inds, bg_inds)
# Select sampled values from various arrays:
labels = labels[keep_inds]
# Clamp labels for the background RoIs to 0
labels[int(fg_rois_per_image):] = 0
rois = all_rois[keep_inds]
roi_scores = all_scores[keep_inds]
# pose regression targets and weights
poses_target, poses_weight = _compute_pose_targets(quaternions[keep_inds], labels, num_classes)
bbox_target_data = _compute_targets(
rois[:, 1:5], gt_boxes[gt_assignment[keep_inds], :4], labels)
bbox_targets, bbox_inside_weights = \
_get_bbox_regression_labels(bbox_target_data, num_classes)
return labels, rois, roi_scores, bbox_targets, bbox_inside_weights, poses_target, poses_weight
| true | true |
f72bca34371f9e866f547515c6eb963685418158 | 3,878 | py | Python | alveo/apps/whole_app_acceleration/classification/test_classify_pp.py | dendisuhubdy/Vitis-AI | 524f65224c52314155dafc011d488ed30e458fcb | [
"Apache-2.0"
] | 3 | 2020-10-29T15:00:30.000Z | 2021-10-21T08:09:34.000Z | alveo/apps/whole_app_acceleration/classification/test_classify_pp.py | dendisuhubdy/Vitis-AI | 524f65224c52314155dafc011d488ed30e458fcb | [
"Apache-2.0"
] | 20 | 2020-10-31T03:19:03.000Z | 2020-11-02T18:59:49.000Z | alveo/apps/whole_app_acceleration/classification/test_classify_pp.py | dendisuhubdy/Vitis-AI | 524f65224c52314155dafc011d488ed30e458fcb | [
"Apache-2.0"
] | 9 | 2020-10-14T02:04:10.000Z | 2020-12-01T08:23:02.000Z | # Copyright 2019 Xilinx Inc.
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from six import itervalues, iteritems
from ctypes import *
import numpy as np
import os, sys
from vai.dpuv1.rt import xdnn, xdnn_io
from vai.dpuv1.rt.vitis.python.dpu.runner import Runner
import waa_rt
import multiprocessing as mp
import ctypes
def pre_process(q,args):
xclbin_p=str(args['xclbin']+"/xdnn_v3_96x16_2pe_8b_9mb_bank03.xclbin")
kernelName_p="pp_pipeline_accel"
deviceIdx_p=args['deviceid']
fpga_pp = waa_rt.PreProcess(xclbin_p,kernelName_p,deviceIdx_p, 0)
batch_sz = args['batch_sz']
img_paths = xdnn_io.getFilePaths(args['images'])
print("Pre-processing handle created. Populating Queue")
for i in range(0, len(img_paths), batch_sz):
for j, p in enumerate(img_paths[i:i + batch_sz]):
arr, ht = fpga_pp.preprocess_input(p)
q.put(arr)
print("Queue populated")
def process_xdnn(q,args):
runner = Runner(args['vitis_rundir'])
inTensors = runner.get_input_tensors()
outTensors = runner.get_output_tensors()
batch_sz = args['batch_sz']
if batch_sz == -1:
# use Runner's suggested batch size
batch_sz = inTensors[0].dims[0]
if args['golden']:
goldenMap = xdnn_io.getGoldenMap(args['golden'])
top5Count = 0
top1Count = 0
fpgaBlobs = []
for io in [inTensors, outTensors]:
blobs = []
for t in io:
shape = (batch_sz,) + tuple([t.dims[i] for i in range(t.ndims)][1:])
blobs.append(np.empty((shape), dtype=np.float32, order='C'))
fpgaBlobs.append(blobs)
img_paths = xdnn_io.getFilePaths(args['images'])
labels = xdnn_io.get_labels(args['labels'])
xdnnCPUOp = xdnn.XDNNCPUOp("%s/weights.h5" % args['vitis_rundir'])
fcOutput = np.empty((batch_sz, args['outsz'],), dtype=np.float32, order='C')
fpgaInput = fpgaBlobs[0][0]
for i in range(0, len(img_paths), batch_sz):
pl = []
# fill tensor input data from image file
for j, p in enumerate(img_paths[i:i + batch_sz]):
img, _ = q.get(), None
pl.append(p)
np.copyto(fpgaInput[j], img)
jid = runner.execute_async(fpgaBlobs[0], fpgaBlobs[1])
runner.wait(jid)
xdnnCPUOp.computeFC(fpgaBlobs[1][0], fcOutput)
softmaxOut = xdnnCPUOp.computeSoftmax(fcOutput)
if args['golden']:
for j,p in enumerate(img_paths[i:i + batch_sz]):
top1Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 1)
top5Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 5)
else:
xdnn_io.printClassification(softmaxOut, pl, labels)
if args['golden']:
print ( ("\nAverage accuracy (n=%d) Top-1: %.1f%%, Top-5: %.1f%%\n") % (len(img_paths), float(top1Count)/float(len(img_paths))*100., float(top5Count)/float(len(img_paths))*100.) )
if __name__ == '__main__':
print("\n\n\n\n\n\n\n\n" + '\33[32m' + "Running Inference with HW Pre-processing" + '\33[0m')
args = xdnn_io.processCommandLine()
#Create a queue for passing the pre-processed data
q = mp.Queue()
#Creating a process to run HW pre-processing kernel
p_preprocess = mp.Process(target=pre_process,args=(q,args))
#Process to run XDNN
p_xdnn = mp.Process(target=process_xdnn,args=(q,args))
p_preprocess.start()
p_xdnn.start()
p_preprocess.join()
p_xdnn.join()
| 34.318584 | 183 | 0.687726 |
from __future__ import print_function
from six import itervalues, iteritems
from ctypes import *
import numpy as np
import os, sys
from vai.dpuv1.rt import xdnn, xdnn_io
from vai.dpuv1.rt.vitis.python.dpu.runner import Runner
import waa_rt
import multiprocessing as mp
import ctypes
def pre_process(q,args):
xclbin_p=str(args['xclbin']+"/xdnn_v3_96x16_2pe_8b_9mb_bank03.xclbin")
kernelName_p="pp_pipeline_accel"
deviceIdx_p=args['deviceid']
fpga_pp = waa_rt.PreProcess(xclbin_p,kernelName_p,deviceIdx_p, 0)
batch_sz = args['batch_sz']
img_paths = xdnn_io.getFilePaths(args['images'])
print("Pre-processing handle created. Populating Queue")
for i in range(0, len(img_paths), batch_sz):
for j, p in enumerate(img_paths[i:i + batch_sz]):
arr, ht = fpga_pp.preprocess_input(p)
q.put(arr)
print("Queue populated")
def process_xdnn(q,args):
runner = Runner(args['vitis_rundir'])
inTensors = runner.get_input_tensors()
outTensors = runner.get_output_tensors()
batch_sz = args['batch_sz']
if batch_sz == -1:
batch_sz = inTensors[0].dims[0]
if args['golden']:
goldenMap = xdnn_io.getGoldenMap(args['golden'])
top5Count = 0
top1Count = 0
fpgaBlobs = []
for io in [inTensors, outTensors]:
blobs = []
for t in io:
shape = (batch_sz,) + tuple([t.dims[i] for i in range(t.ndims)][1:])
blobs.append(np.empty((shape), dtype=np.float32, order='C'))
fpgaBlobs.append(blobs)
img_paths = xdnn_io.getFilePaths(args['images'])
labels = xdnn_io.get_labels(args['labels'])
xdnnCPUOp = xdnn.XDNNCPUOp("%s/weights.h5" % args['vitis_rundir'])
fcOutput = np.empty((batch_sz, args['outsz'],), dtype=np.float32, order='C')
fpgaInput = fpgaBlobs[0][0]
for i in range(0, len(img_paths), batch_sz):
pl = []
# fill tensor input data from image file
for j, p in enumerate(img_paths[i:i + batch_sz]):
img, _ = q.get(), None
pl.append(p)
np.copyto(fpgaInput[j], img)
jid = runner.execute_async(fpgaBlobs[0], fpgaBlobs[1])
runner.wait(jid)
xdnnCPUOp.computeFC(fpgaBlobs[1][0], fcOutput)
softmaxOut = xdnnCPUOp.computeSoftmax(fcOutput)
if args['golden']:
for j,p in enumerate(img_paths[i:i + batch_sz]):
top1Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 1)
top5Count += xdnn_io.isTopK(softmaxOut[j], goldenMap, p, labels, 5)
else:
xdnn_io.printClassification(softmaxOut, pl, labels)
if args['golden']:
print ( ("\nAverage accuracy (n=%d) Top-1: %.1f%%, Top-5: %.1f%%\n") % (len(img_paths), float(top1Count)/float(len(img_paths))*100., float(top5Count)/float(len(img_paths))*100.) )
if __name__ == '__main__':
print("\n\n\n\n\n\n\n\n" + '\33[32m' + "Running Inference with HW Pre-processing" + '\33[0m')
args = xdnn_io.processCommandLine()
#Create a queue for passing the pre-processed data
q = mp.Queue()
#Creating a process to run HW pre-processing kernel
p_preprocess = mp.Process(target=pre_process,args=(q,args))
#Process to run XDNN
p_xdnn = mp.Process(target=process_xdnn,args=(q,args))
p_preprocess.start()
p_xdnn.start()
p_preprocess.join()
p_xdnn.join()
| true | true |
f72bca80139a374bf9bcf9170ec60403226e508e | 5,931 | py | Python | code/networks/Unet.py | loveredcarrot/ssl_multi_seg | 5315dbcc2c44e8effab28699c1491dd67b7ce00b | [
"Apache-2.0"
] | 3 | 2021-04-28T09:36:32.000Z | 2021-11-17T02:52:07.000Z | code/networks/Unet.py | loveredcarrot/ssl_multi_seg | 5315dbcc2c44e8effab28699c1491dd67b7ce00b | [
"Apache-2.0"
] | null | null | null | code/networks/Unet.py | loveredcarrot/ssl_multi_seg | 5315dbcc2c44e8effab28699c1491dd67b7ce00b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2021/4/8 15:52
# @Author : aurorazeng
# @File : Unet.py
# @license: (C) Copyright 2021-2026, aurorazeng; No reprobaiction without permission.
"""
The implementation is borrowed from: https://github.com/HiLab-git/PyMIC
"""
from __future__ import division, print_function
import numpy as np
import torch
import torch.nn as nn
from torch.distributions.uniform import Uniform
class ConvBlock(nn.Module):
"""two convolution layers with batch norm and leaky relu"""
def __init__(self, in_channels, out_channels, dropout_p):
super(ConvBlock, self).__init__()
self.conv_conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
# nn.LeakyReLU(),
nn.ReLU(),
nn.Dropout(dropout_p),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
# nn.LeakyReLU()
nn.ReLU()
)
def forward(self, x):
return self.conv_conv(x)
class DownBlock(nn.Module):
"""Downsampling followed by ConvBlock"""
def __init__(self, in_channels, out_channels, dropout_p):
super(DownBlock, self).__init__()
self.maxpool_conv = nn.Sequential(
nn.MaxPool2d(2),
ConvBlock(in_channels, out_channels, dropout_p)
)
def forward(self, x):
return self.maxpool_conv(x)
class UpBlock(nn.Module):
"""Upssampling followed by ConvBlock"""
def __init__(self, in_channels1, in_channels2, out_channels, dropout_p,
bilinear=True):
super(UpBlock, self).__init__()
self.bilinear = bilinear
if bilinear:
self.conv1x1 = nn.Conv2d(in_channels1, in_channels2, kernel_size=1)
self.up = nn.Upsample(
scale_factor=2, mode='bilinear', align_corners=True)
else:
self.up = nn.ConvTranspose2d(
in_channels1, in_channels2, kernel_size=2, stride=2)
self.conv = ConvBlock(in_channels2 * 2, out_channels, dropout_p)
def forward(self, x1, x2):
if self.bilinear:
x1 = self.conv1x1(x1)
x1 = self.up(x1)
x = torch.cat([x2, x1], dim=1)
return self.conv(x)
class Encoder(nn.Module):
def __init__(self, params):
super(Encoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
self.dropout = self.params['dropout']
assert (len(self.ft_chns) == 5)
self.in_conv = ConvBlock(
self.in_chns, self.ft_chns[0], self.dropout[0])
self.down1 = DownBlock(
self.ft_chns[0], self.ft_chns[1], self.dropout[1])
self.down2 = DownBlock(
self.ft_chns[1], self.ft_chns[2], self.dropout[2])
self.down3 = DownBlock(
self.ft_chns[2], self.ft_chns[3], self.dropout[3])
self.down4 = DownBlock(
self.ft_chns[3], self.ft_chns[4], self.dropout[4])
def forward(self, x):
x0 = self.in_conv(x)
x1 = self.down1(x0)
x2 = self.down2(x1)
x3 = self.down3(x2)
x4 = self.down4(x3)
return [x0, x1, x2, x3, x4]
class Decoder(nn.Module):
def __init__(self, params):
super(Decoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
assert (len(self.ft_chns) == 5)
self.up1 = UpBlock(
self.ft_chns[4], self.ft_chns[3], self.ft_chns[3], dropout_p=0.0)
self.up2 = UpBlock(
self.ft_chns[3], self.ft_chns[2], self.ft_chns[2], dropout_p=0.0)
self.up3 = UpBlock(
self.ft_chns[2], self.ft_chns[1], self.ft_chns[1], dropout_p=0.0)
self.up4 = UpBlock(
self.ft_chns[1], self.ft_chns[0], self.ft_chns[0], dropout_p=0.0)
self.out_conv = nn.Conv2d(self.ft_chns[0], self.n_class,
kernel_size=1, padding=0)
def forward(self, feature):
x0 = feature[0]
x1 = feature[1]
x2 = feature[2]
x3 = feature[3]
x4 = feature[4]
x = self.up1(x4, x3)
x = self.up2(x, x2)
x = self.up3(x, x1)
x = self.up4(x, x0)
output = self.out_conv(x)
return output
class UNet(nn.Module):
def __init__(self, in_chns, class_num):
super(UNet, self).__init__()
params = {'in_chns': in_chns,
# 'feature_chns': [16, 32, 64, 128, 256],
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0, 0, 0, 0, 0],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
class UNetWithDrop(nn.Module):
def __init__(self, in_chns, class_num):
super(UNetWithDrop, self).__init__()
params = {'in_chns': in_chns,
# 'feature_chns': [16, 32, 64, 128, 256],
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0.05, 0.1, 0.2, 0.3, 0.5],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
| 31.887097 | 85 | 0.572079 |
from __future__ import division, print_function
import numpy as np
import torch
import torch.nn as nn
from torch.distributions.uniform import Uniform
class ConvBlock(nn.Module):
def __init__(self, in_channels, out_channels, dropout_p):
super(ConvBlock, self).__init__()
self.conv_conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
nn.ReLU(),
nn.Dropout(dropout_p),
nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1),
nn.BatchNorm2d(out_channels),
nn.ReLU()
)
def forward(self, x):
return self.conv_conv(x)
class DownBlock(nn.Module):
def __init__(self, in_channels, out_channels, dropout_p):
super(DownBlock, self).__init__()
self.maxpool_conv = nn.Sequential(
nn.MaxPool2d(2),
ConvBlock(in_channels, out_channels, dropout_p)
)
def forward(self, x):
return self.maxpool_conv(x)
class UpBlock(nn.Module):
def __init__(self, in_channels1, in_channels2, out_channels, dropout_p,
bilinear=True):
super(UpBlock, self).__init__()
self.bilinear = bilinear
if bilinear:
self.conv1x1 = nn.Conv2d(in_channels1, in_channels2, kernel_size=1)
self.up = nn.Upsample(
scale_factor=2, mode='bilinear', align_corners=True)
else:
self.up = nn.ConvTranspose2d(
in_channels1, in_channels2, kernel_size=2, stride=2)
self.conv = ConvBlock(in_channels2 * 2, out_channels, dropout_p)
def forward(self, x1, x2):
if self.bilinear:
x1 = self.conv1x1(x1)
x1 = self.up(x1)
x = torch.cat([x2, x1], dim=1)
return self.conv(x)
class Encoder(nn.Module):
def __init__(self, params):
super(Encoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
self.dropout = self.params['dropout']
assert (len(self.ft_chns) == 5)
self.in_conv = ConvBlock(
self.in_chns, self.ft_chns[0], self.dropout[0])
self.down1 = DownBlock(
self.ft_chns[0], self.ft_chns[1], self.dropout[1])
self.down2 = DownBlock(
self.ft_chns[1], self.ft_chns[2], self.dropout[2])
self.down3 = DownBlock(
self.ft_chns[2], self.ft_chns[3], self.dropout[3])
self.down4 = DownBlock(
self.ft_chns[3], self.ft_chns[4], self.dropout[4])
def forward(self, x):
x0 = self.in_conv(x)
x1 = self.down1(x0)
x2 = self.down2(x1)
x3 = self.down3(x2)
x4 = self.down4(x3)
return [x0, x1, x2, x3, x4]
class Decoder(nn.Module):
def __init__(self, params):
super(Decoder, self).__init__()
self.params = params
self.in_chns = self.params['in_chns']
self.ft_chns = self.params['feature_chns']
self.n_class = self.params['class_num']
self.bilinear = self.params['bilinear']
assert (len(self.ft_chns) == 5)
self.up1 = UpBlock(
self.ft_chns[4], self.ft_chns[3], self.ft_chns[3], dropout_p=0.0)
self.up2 = UpBlock(
self.ft_chns[3], self.ft_chns[2], self.ft_chns[2], dropout_p=0.0)
self.up3 = UpBlock(
self.ft_chns[2], self.ft_chns[1], self.ft_chns[1], dropout_p=0.0)
self.up4 = UpBlock(
self.ft_chns[1], self.ft_chns[0], self.ft_chns[0], dropout_p=0.0)
self.out_conv = nn.Conv2d(self.ft_chns[0], self.n_class,
kernel_size=1, padding=0)
def forward(self, feature):
x0 = feature[0]
x1 = feature[1]
x2 = feature[2]
x3 = feature[3]
x4 = feature[4]
x = self.up1(x4, x3)
x = self.up2(x, x2)
x = self.up3(x, x1)
x = self.up4(x, x0)
output = self.out_conv(x)
return output
class UNet(nn.Module):
def __init__(self, in_chns, class_num):
super(UNet, self).__init__()
params = {'in_chns': in_chns,
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0, 0, 0, 0, 0],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
class UNetWithDrop(nn.Module):
def __init__(self, in_chns, class_num):
super(UNetWithDrop, self).__init__()
params = {'in_chns': in_chns,
'feature_chns': [32, 64, 128, 256, 512],
'dropout': [0.05, 0.1, 0.2, 0.3, 0.5],
'class_num': class_num,
'bilinear': False,
'acti_func': 'relu'}
self.encoder = Encoder(params)
self.decoder = Decoder(params)
def forward(self, x):
feature = self.encoder(x)
output = self.decoder(feature)
return output
| true | true |
f72bca982790961f6bbe8bc31f1a60032438e1a2 | 154 | py | Python | chat_assistant/chat_assistant_app/urls.py | mrhegemon/Rasa_zero_rpc_XR_bot | a468cc1f2b1a4e935ce18e97dcb7a11070bbea0b | [
"MIT"
] | 1 | 2021-06-21T10:44:51.000Z | 2021-06-21T10:44:51.000Z | chat_assistant/chat_assistant_app/urls.py | mrhegemon/Rasa_zero_rpc_XR_bot | a468cc1f2b1a4e935ce18e97dcb7a11070bbea0b | [
"MIT"
] | null | null | null | chat_assistant/chat_assistant_app/urls.py | mrhegemon/Rasa_zero_rpc_XR_bot | a468cc1f2b1a4e935ce18e97dcb7a11070bbea0b | [
"MIT"
] | 1 | 2021-06-07T23:09:30.000Z | 2021-06-07T23:09:30.000Z | from django.conf.urls import url
from . import views
urlpatterns = [
url('', views.chat, name='chat'),
url('^chat/', views.chat, name='chat'),
]
| 19.25 | 43 | 0.62987 | from django.conf.urls import url
from . import views
urlpatterns = [
url('', views.chat, name='chat'),
url('^chat/', views.chat, name='chat'),
]
| true | true |
f72bcb05e1a89751517adfac4596a0dd60fe06f8 | 2,516 | py | Python | actions/macro/building_positioning.py | drakonnan1st/JackBot | 345df784098cb9eb055b3901fe7455807c58a4e1 | [
"MIT"
] | null | null | null | actions/macro/building_positioning.py | drakonnan1st/JackBot | 345df784098cb9eb055b3901fe7455807c58a4e1 | [
"MIT"
] | null | null | null | actions/macro/building_positioning.py | drakonnan1st/JackBot | 345df784098cb9eb055b3901fe7455807c58a4e1 | [
"MIT"
] | null | null | null | """Everything related to building positioning goes here"""
from sc2.constants import EVOLUTIONCHAMBER, ENGINEERINGBAY
from sc2.data import ACTION_RESULT
from sc2.position import Point2
class BuildingPositioning:
"""Ok for now"""
async def prepare_building_positions(self, center):
"""Check all possible positions behind the mineral line when a hatchery is built"""
mineral_field = self.state.mineral_field
if mineral_field:
close_points = range(-11, 12)
center_position = center.position
add_positions = self.building_positions.append
# No point in separating it on variables, I united everything, it gets points that are behind minerals
viable_points = [
point
for point in (
Point2((x + center_position.x, y + center_position.y))
for x in close_points
for y in close_points
if 121 >= x * x + y * y >= 81
)
if abs(point.distance_to(mineral_field.closer_than(10, center).closest_to(point)) - 3) < 0.5
]
e_bay_ability = self.game_data.units[ENGINEERINGBAY.value].creation_ability
e_bay_mask = await self.client.query_building_placement(e_bay_ability, viable_points)
evo_ability = self.game_data.units[EVOLUTIONCHAMBER.value].creation_ability
evo_mask = await self.client.query_building_placement(evo_ability, viable_points)
viable_points = [
point
for i, point in enumerate(viable_points)
if e_bay_mask[i] == ACTION_RESULT.Success and evo_mask[i] == ACTION_RESULT.Success
]
for point in viable_points:
if self.building_positions:
if all(
abs(already_found.x - point.x) >= 3 or abs(already_found.y - point.y) >= 3
for already_found in self.building_positions
):
add_positions(point)
else:
add_positions(point)
async def get_production_position(self):
"""Find the safest position looping through all possible ones"""
if self.building_positions:
for building_position in self.building_positions:
if await self.can_place(EVOLUTIONCHAMBER, building_position):
return building_position
return None
| 45.745455 | 114 | 0.604134 | from sc2.constants import EVOLUTIONCHAMBER, ENGINEERINGBAY
from sc2.data import ACTION_RESULT
from sc2.position import Point2
class BuildingPositioning:
async def prepare_building_positions(self, center):
mineral_field = self.state.mineral_field
if mineral_field:
close_points = range(-11, 12)
center_position = center.position
add_positions = self.building_positions.append
viable_points = [
point
for point in (
Point2((x + center_position.x, y + center_position.y))
for x in close_points
for y in close_points
if 121 >= x * x + y * y >= 81
)
if abs(point.distance_to(mineral_field.closer_than(10, center).closest_to(point)) - 3) < 0.5
]
e_bay_ability = self.game_data.units[ENGINEERINGBAY.value].creation_ability
e_bay_mask = await self.client.query_building_placement(e_bay_ability, viable_points)
evo_ability = self.game_data.units[EVOLUTIONCHAMBER.value].creation_ability
evo_mask = await self.client.query_building_placement(evo_ability, viable_points)
viable_points = [
point
for i, point in enumerate(viable_points)
if e_bay_mask[i] == ACTION_RESULT.Success and evo_mask[i] == ACTION_RESULT.Success
]
for point in viable_points:
if self.building_positions:
if all(
abs(already_found.x - point.x) >= 3 or abs(already_found.y - point.y) >= 3
for already_found in self.building_positions
):
add_positions(point)
else:
add_positions(point)
async def get_production_position(self):
if self.building_positions:
for building_position in self.building_positions:
if await self.can_place(EVOLUTIONCHAMBER, building_position):
return building_position
return None
| true | true |
f72bcbbdf799538cf2dba7ece50cc212bc7de632 | 1,341 | py | Python | 2020/day03/toboggan_trajectory.py | rycmak/advent-of-code | 2a3289516f4c1d0bc1d24a38d495a93edcb19e29 | [
"MIT"
] | 1 | 2021-03-03T01:40:09.000Z | 2021-03-03T01:40:09.000Z | 2020/day03/toboggan_trajectory.py | rycmak/advent-of-code | 2a3289516f4c1d0bc1d24a38d495a93edcb19e29 | [
"MIT"
] | null | null | null | 2020/day03/toboggan_trajectory.py | rycmak/advent-of-code | 2a3289516f4c1d0bc1d24a38d495a93edcb19e29 | [
"MIT"
] | null | null | null | # Pseudo code:
# assume original map is narrow (has more rows than columns)
# transform map to array
# no. of steps downwards = no. of rows
# no. of map copies = ceil((no. of steps downwards - 1) * 3 / no. of columns)
# start at (i, j) = (0, 0)
# move across to (i + 3, j + 1)
# if element == '#', increment num_trees
# Let's try to do this without using numpy ;-p
# NB: If using numpy, could make use of concatenate, hstack, etc.
# to stack (repeat) copies of original map to the right.
# But without numpy, we'll try to use zip instead...
file = open("input.txt", "r")
map_original = [] # will be a 2D array containing original map
num_rows = 0
for line in file:
num_rows += 1
map_original.append(list(line.strip()))
map_full = map_original # map_full will be a 2D array containing full (repeated) map
# number of map copies needed horizontally
num_copies = int((num_rows - 1) * 3 / len(map_original[0])) + 1 # if using numpy, use np.ceil instead of +1
for i in range(num_copies):
# append map_full with copy of map_original
map_full = [(map_full + map_original) for map_full, map_original in zip(map_full, map_original)]
# start at position (0, 0)
column = 0
row = 0
num_trees = 0
while row < (num_rows - 1):
column += 3
row += 1
if map_full[row][column] == "#":
num_trees += 1
print("num_trees: ", num_trees)
| 31.928571 | 108 | 0.681581 |
# NB: If using numpy, could make use of concatenate, hstack, etc.
# to stack (repeat) copies of original map to the right.
# But without numpy, we'll try to use zip instead...
file = open("input.txt", "r")
map_original = []
num_rows = 0
for line in file:
num_rows += 1
map_original.append(list(line.strip()))
map_full = map_original
num_copies = int((num_rows - 1) * 3 / len(map_original[0])) + 1
for i in range(num_copies):
map_full = [(map_full + map_original) for map_full, map_original in zip(map_full, map_original)]
column = 0
row = 0
num_trees = 0
while row < (num_rows - 1):
column += 3
row += 1
if map_full[row][column] == "#":
num_trees += 1
print("num_trees: ", num_trees)
| true | true |
f72bcbc28f844d28924574ebaa52197a6519cc46 | 380 | py | Python | api.py | BabakShah/DS-SafetyPrediction | bc4c99512d04fa73994616584e3f4ab78c3e979e | [
"MIT"
] | null | null | null | api.py | BabakShah/DS-SafetyPrediction | bc4c99512d04fa73994616584e3f4ab78c3e979e | [
"MIT"
] | null | null | null | api.py | BabakShah/DS-SafetyPrediction | bc4c99512d04fa73994616584e3f4ab78c3e979e | [
"MIT"
] | 2 | 2021-06-29T17:28:03.000Z | 2022-02-26T08:41:35.000Z | import pickle
import numpy as np
xgboost = pickle.load(open('./xgboost.pkl', 'rb'))
scaler = pickle.load(open('./scaler.pkl', 'rb'))
def transform_input(input):
return scaler.transform([input])
def make_hard_prediction(input):
return xgboost.predict(transform_input(input))
def make_soft_prediction(input):
return xgboost.predict_proba(transform_input(input))[0,1]
| 23.75 | 61 | 0.747368 | import pickle
import numpy as np
xgboost = pickle.load(open('./xgboost.pkl', 'rb'))
scaler = pickle.load(open('./scaler.pkl', 'rb'))
def transform_input(input):
return scaler.transform([input])
def make_hard_prediction(input):
return xgboost.predict(transform_input(input))
def make_soft_prediction(input):
return xgboost.predict_proba(transform_input(input))[0,1]
| true | true |
f72bcd0625e2740abc5c1c5b36b3afdb5cde844c | 6,402 | py | Python | train_dalle.py | Atica57/DALLE-pytorch | 4fa108271aeb1972fcb118390ec15b656f2c328a | [
"MIT"
] | 1 | 2021-03-08T12:26:49.000Z | 2021-03-08T12:26:49.000Z | train_dalle.py | Atica57/DALLE-pytorch | 4fa108271aeb1972fcb118390ec15b656f2c328a | [
"MIT"
] | null | null | null | train_dalle.py | Atica57/DALLE-pytorch | 4fa108271aeb1972fcb118390ec15b656f2c328a | [
"MIT"
] | null | null | null | import argparse
from random import choice
from pathlib import Path
# torch
import torch
from torch.optim import Adam
from torch.nn.utils import clip_grad_norm_
# vision imports
from PIL import Image
from torchvision import transforms as T
from torch.utils.data import DataLoader, Dataset
from torchvision.datasets import ImageFolder
from torchvision.utils import make_grid, save_image
# dalle related classes and utils
from dalle_pytorch import OpenAIDiscreteVAE, DiscreteVAE, DALLE
from dalle_pytorch.simple_tokenizer import tokenize, tokenizer, VOCAB_SIZE
# argument parsing
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required = False)
group.add_argument('--vae_path', type = str,
help='path to your trained discrete VAE')
group.add_argument('--dalle_path', type = str,
help='path to your partially trained DALL-E')
parser.add_argument('--image_text_folder', type = str, required = True,
help='path to your folder of images and text for learning the DALL-E')
args = parser.parse_args()
# helpers
def exists(val):
return val is not None
# constants
VAE_PATH = args.vae_path
DALLE_PATH = args.dalle_path
RESUME = exists(DALLE_PATH)
EPOCHS = 20
BATCH_SIZE = 4
LEARNING_RATE = 3e-4
GRAD_CLIP_NORM = 0.5
MODEL_DIM = 512
TEXT_SEQ_LEN = 256
DEPTH = 2
HEADS = 4
DIM_HEAD = 64
# reconstitute vae
if RESUME:
dalle_path = Path(DALLE_PATH)
assert dalle_path.exists(), 'DALL-E model file does not exist'
loaded_obj = torch.load(str(dalle_path))
dalle_params, vae_params, weights = loaded_obj['hparams'], loaded_obj['vae_params'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
dalle_params = dict(
vae = vae,
**dalle_params
)
IMAGE_SIZE = vae_params['image_size']
else:
if exists(VAE_PATH):
vae_path = Path(VAE_PATH)
assert vae_path.exists(), 'VAE model file does not exist'
loaded_obj = torch.load(str(vae_path))
vae_params, weights = loaded_obj['hparams'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
vae.load_state_dict(weights)
else:
print('using OpenAIs pretrained VAE for encoding images to tokens')
vae_params = None
vae = OpenAIDiscreteVAE()
IMAGE_SIZE = vae.image_size
dalle_params = dict(
vae = vae,
num_text_tokens = VOCAB_SIZE,
text_seq_len = TEXT_SEQ_LEN,
dim = MODEL_DIM,
depth = DEPTH,
heads = HEADS,
dim_head = DIM_HEAD
)
# helpers
def save_model(path):
save_obj = {
'hparams': dalle_params,
'vae_params': vae_params,
'weights': dalle.state_dict()
}
torch.save(save_obj, path)
# dataset loading
class TextImageDataset(Dataset):
def __init__(self, folder, text_len = 256, image_size = 128):
super().__init__()
path = Path(folder)
text_files = [*path.glob('**/*.txt')]
image_files = [
*path.glob('**/*.png'),
*path.glob('**/*.jpg'),
*path.glob('**/*.jpeg')
]
text_files = {t.stem: t for t in text_files}
image_files = {i.stem: i for i in image_files}
keys = (image_files.keys() & text_files.keys())
self.keys = list(keys)
self.text_files = {k: v for k, v in text_files.items() if k in keys}
self.image_files = {k: v for k, v in image_files.items() if k in keys}
self.image_tranform = T.Compose([
T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img),
T.CenterCrop(image_size),
T.Resize(image_size),
T.ToTensor(),
T.Lambda(lambda t: t.expand(3, -1, -1)),
T.Normalize((0.5,) * 3, (0.5,) * 3)
])
def __len__(self):
return len(self.keys)
def __getitem__(self, ind):
key = self.keys[ind]
text_file = self.text_files[key]
image_file = self.image_files[key]
image = Image.open(image_file)
descriptions = text_file.read_text().split('\n')
descriptions = list(filter(lambda t: len(t) > 0, descriptions))
description = choice(descriptions)
tokenized_text = tokenize(description).squeeze(0)
mask = tokenized_text != 0
image_tensor = self.image_tranform(image)
return tokenized_text, image_tensor, mask
# create dataset and dataloader
ds = TextImageDataset(
args.image_text_folder,
text_len = TEXT_SEQ_LEN,
image_size = IMAGE_SIZE
)
assert len(ds) > 0, 'dataset is empty'
print(f'{len(ds)} image-text pairs found for training')
dl = DataLoader(ds, batch_size = BATCH_SIZE, shuffle = True, drop_last = True)
# initialize DALL-E
dalle = DALLE(**dalle_params).cuda()
if RESUME:
dalle.load_state_dict(weights)
# optimizer
opt = Adam(dalle.parameters(), lr = LEARNING_RATE)
# experiment tracker
import wandb
wandb.config.depth = DEPTH
wandb.config.heads = HEADS
wandb.config.dim_head = DIM_HEAD
wandb.init(project = 'dalle_train_transformer', resume = RESUME)
# training
for epoch in range(EPOCHS):
for i, (text, images, mask) in enumerate(dl):
text, images, mask = map(lambda t: t.cuda(), (text, images, mask))
loss = dalle(text, images, mask = mask, return_loss = True)
loss.backward()
clip_grad_norm_(dalle.parameters(), GRAD_CLIP_NORM)
opt.step()
opt.zero_grad()
log = {}
if i % 10 == 0:
print(epoch, i, f'loss - {loss.item()}')
log = {
**log,
'epoch': epoch,
'iter': i,
'loss': loss.item()
}
if i % 100 == 0:
sample_text = text[:1]
token_list = sample_text.masked_select(sample_text != 0).tolist()
decoded_text = tokenizer.decode(token_list)
image = dalle.generate_images(
text[:1],
mask = mask[:1],
filter_thres = 0.9 # topk sampling at 0.9
)
save_model(f'./dalle.pt')
wandb.save(f'./dalle.pt')
log = {
**log,
'image': wandb.Image(image, caption = decoded_text)
}
wandb.log(log)
save_model(f'./dalle-final.pt')
wandb.save('./dalle-final.pt')
wandb.finish()
| 24.813953 | 110 | 0.617619 | import argparse
from random import choice
from pathlib import Path
import torch
from torch.optim import Adam
from torch.nn.utils import clip_grad_norm_
from PIL import Image
from torchvision import transforms as T
from torch.utils.data import DataLoader, Dataset
from torchvision.datasets import ImageFolder
from torchvision.utils import make_grid, save_image
from dalle_pytorch import OpenAIDiscreteVAE, DiscreteVAE, DALLE
from dalle_pytorch.simple_tokenizer import tokenize, tokenizer, VOCAB_SIZE
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required = False)
group.add_argument('--vae_path', type = str,
help='path to your trained discrete VAE')
group.add_argument('--dalle_path', type = str,
help='path to your partially trained DALL-E')
parser.add_argument('--image_text_folder', type = str, required = True,
help='path to your folder of images and text for learning the DALL-E')
args = parser.parse_args()
def exists(val):
return val is not None
VAE_PATH = args.vae_path
DALLE_PATH = args.dalle_path
RESUME = exists(DALLE_PATH)
EPOCHS = 20
BATCH_SIZE = 4
LEARNING_RATE = 3e-4
GRAD_CLIP_NORM = 0.5
MODEL_DIM = 512
TEXT_SEQ_LEN = 256
DEPTH = 2
HEADS = 4
DIM_HEAD = 64
if RESUME:
dalle_path = Path(DALLE_PATH)
assert dalle_path.exists(), 'DALL-E model file does not exist'
loaded_obj = torch.load(str(dalle_path))
dalle_params, vae_params, weights = loaded_obj['hparams'], loaded_obj['vae_params'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
dalle_params = dict(
vae = vae,
**dalle_params
)
IMAGE_SIZE = vae_params['image_size']
else:
if exists(VAE_PATH):
vae_path = Path(VAE_PATH)
assert vae_path.exists(), 'VAE model file does not exist'
loaded_obj = torch.load(str(vae_path))
vae_params, weights = loaded_obj['hparams'], loaded_obj['weights']
vae = DiscreteVAE(**vae_params)
vae.load_state_dict(weights)
else:
print('using OpenAIs pretrained VAE for encoding images to tokens')
vae_params = None
vae = OpenAIDiscreteVAE()
IMAGE_SIZE = vae.image_size
dalle_params = dict(
vae = vae,
num_text_tokens = VOCAB_SIZE,
text_seq_len = TEXT_SEQ_LEN,
dim = MODEL_DIM,
depth = DEPTH,
heads = HEADS,
dim_head = DIM_HEAD
)
def save_model(path):
save_obj = {
'hparams': dalle_params,
'vae_params': vae_params,
'weights': dalle.state_dict()
}
torch.save(save_obj, path)
class TextImageDataset(Dataset):
def __init__(self, folder, text_len = 256, image_size = 128):
super().__init__()
path = Path(folder)
text_files = [*path.glob('**/*.txt')]
image_files = [
*path.glob('**/*.png'),
*path.glob('**/*.jpg'),
*path.glob('**/*.jpeg')
]
text_files = {t.stem: t for t in text_files}
image_files = {i.stem: i for i in image_files}
keys = (image_files.keys() & text_files.keys())
self.keys = list(keys)
self.text_files = {k: v for k, v in text_files.items() if k in keys}
self.image_files = {k: v for k, v in image_files.items() if k in keys}
self.image_tranform = T.Compose([
T.Lambda(lambda img: img.convert('RGB') if img.mode != 'RGB' else img),
T.CenterCrop(image_size),
T.Resize(image_size),
T.ToTensor(),
T.Lambda(lambda t: t.expand(3, -1, -1)),
T.Normalize((0.5,) * 3, (0.5,) * 3)
])
def __len__(self):
return len(self.keys)
def __getitem__(self, ind):
key = self.keys[ind]
text_file = self.text_files[key]
image_file = self.image_files[key]
image = Image.open(image_file)
descriptions = text_file.read_text().split('\n')
descriptions = list(filter(lambda t: len(t) > 0, descriptions))
description = choice(descriptions)
tokenized_text = tokenize(description).squeeze(0)
mask = tokenized_text != 0
image_tensor = self.image_tranform(image)
return tokenized_text, image_tensor, mask
ds = TextImageDataset(
args.image_text_folder,
text_len = TEXT_SEQ_LEN,
image_size = IMAGE_SIZE
)
assert len(ds) > 0, 'dataset is empty'
print(f'{len(ds)} image-text pairs found for training')
dl = DataLoader(ds, batch_size = BATCH_SIZE, shuffle = True, drop_last = True)
dalle = DALLE(**dalle_params).cuda()
if RESUME:
dalle.load_state_dict(weights)
opt = Adam(dalle.parameters(), lr = LEARNING_RATE)
import wandb
wandb.config.depth = DEPTH
wandb.config.heads = HEADS
wandb.config.dim_head = DIM_HEAD
wandb.init(project = 'dalle_train_transformer', resume = RESUME)
for epoch in range(EPOCHS):
for i, (text, images, mask) in enumerate(dl):
text, images, mask = map(lambda t: t.cuda(), (text, images, mask))
loss = dalle(text, images, mask = mask, return_loss = True)
loss.backward()
clip_grad_norm_(dalle.parameters(), GRAD_CLIP_NORM)
opt.step()
opt.zero_grad()
log = {}
if i % 10 == 0:
print(epoch, i, f'loss - {loss.item()}')
log = {
**log,
'epoch': epoch,
'iter': i,
'loss': loss.item()
}
if i % 100 == 0:
sample_text = text[:1]
token_list = sample_text.masked_select(sample_text != 0).tolist()
decoded_text = tokenizer.decode(token_list)
image = dalle.generate_images(
text[:1],
mask = mask[:1],
filter_thres = 0.9
)
save_model(f'./dalle.pt')
wandb.save(f'./dalle.pt')
log = {
**log,
'image': wandb.Image(image, caption = decoded_text)
}
wandb.log(log)
save_model(f'./dalle-final.pt')
wandb.save('./dalle-final.pt')
wandb.finish()
| true | true |
f72bcdcacf211151e2476c6876771adc77e8a368 | 930 | py | Python | CartPole/_CartPole_mathematical_helpers.py | jhuebotter/CartpoleSNNdemo | d18a85cbc45bff48295c46c9cd8c9fc00192318c | [
"MIT"
] | null | null | null | CartPole/_CartPole_mathematical_helpers.py | jhuebotter/CartpoleSNNdemo | d18a85cbc45bff48295c46c9cd8c9fc00192318c | [
"MIT"
] | null | null | null | CartPole/_CartPole_mathematical_helpers.py | jhuebotter/CartpoleSNNdemo | d18a85cbc45bff48295c46c9cd8c9fc00192318c | [
"MIT"
] | null | null | null | """
Small general mathematical functions.
This file was necessary to make CartPole module self-contained.
"""
from math import fmod
import numpy as np
# Wraps the angle into range [-π, π]
def wrap_angle_rad(angle: float) -> float:
Modulo = fmod(angle, 2 * np.pi) # positive modulo
if Modulo < -np.pi:
angle = Modulo + 2 * np.pi
elif Modulo > np.pi:
angle = Modulo - 2 * np.pi
else:
angle = Modulo
return angle
def wrap_angle_rad_inplace(angle: np.ndarray) -> None:
Modulo = np.fmod(angle, 2 * np.pi) # positive modulo
neg_wrap, pos_wrap = Modulo < -np.pi, Modulo > np.pi
angle[neg_wrap] = Modulo[neg_wrap] + 2 * np.pi
angle[pos_wrap] = Modulo[pos_wrap] - 2 * np.pi
angle[~(neg_wrap | pos_wrap)] = Modulo[~(neg_wrap | pos_wrap)]
def conditional_decorator(dec, cond):
def decorator(func):
return dec(func) if cond else func
return decorator
| 25.833333 | 66 | 0.649462 |
from math import fmod
import numpy as np
def wrap_angle_rad(angle: float) -> float:
Modulo = fmod(angle, 2 * np.pi)
if Modulo < -np.pi:
angle = Modulo + 2 * np.pi
elif Modulo > np.pi:
angle = Modulo - 2 * np.pi
else:
angle = Modulo
return angle
def wrap_angle_rad_inplace(angle: np.ndarray) -> None:
Modulo = np.fmod(angle, 2 * np.pi)
neg_wrap, pos_wrap = Modulo < -np.pi, Modulo > np.pi
angle[neg_wrap] = Modulo[neg_wrap] + 2 * np.pi
angle[pos_wrap] = Modulo[pos_wrap] - 2 * np.pi
angle[~(neg_wrap | pos_wrap)] = Modulo[~(neg_wrap | pos_wrap)]
def conditional_decorator(dec, cond):
def decorator(func):
return dec(func) if cond else func
return decorator
| true | true |
f72bce4067eef5d32a5ccf8b7a440fc4ed4d0c42 | 1,114 | py | Python | dadmatools/models/flair/parser/utils/vocab.py | njzr/DadmaTools | 64ff407d5d818d5a9216340cccf0d1cc909d3b1b | [
"Apache-2.0"
] | 25 | 2021-12-01T15:19:36.000Z | 2022-03-12T12:50:28.000Z | dadmatools/models/flair/parser/utils/vocab.py | ebad84/DadmaTools | b26ad8aa834f642d49bd120bd7cf1fdf40741be1 | [
"Apache-2.0"
] | 3 | 2021-12-14T06:34:52.000Z | 2022-02-17T08:23:20.000Z | dadmatools/models/flair/parser/utils/vocab.py | ebad84/DadmaTools | b26ad8aa834f642d49bd120bd7cf1fdf40741be1 | [
"Apache-2.0"
] | 6 | 2021-10-12T13:44:17.000Z | 2022-03-07T13:54:17.000Z | # -*- coding: utf-8 -*-
from collections.abc import Iterable
from dadmatools.models.flair.parser.utils.common import unk
class Vocab(object):
def __init__(self, counter, min_freq=1, specials=[]):
self.itos = specials
self.stoi = {token: i for i, token in enumerate(self.itos)}
self.extend([token for token, freq in counter.items()
if freq >= min_freq])
self.unk_index = self.stoi.get(unk, 0)
self.n_init = len(self)
def __len__(self):
return len(self.itos)
def __getitem__(self, key):
return self.stoi[key]
def __contains__(self, token):
return token in self.stoi
def token2id(self, sequence):
return [self.stoi.get(token, self.unk_index) for token in sequence]
def id2token(self, ids):
if isinstance(ids, Iterable):
return [self.itos[i] for i in ids]
else:
return self.itos[ids]
def extend(self, tokens):
self.itos.extend(sorted(set(tokens).difference(self.stoi)))
self.stoi = {token: i for i, token in enumerate(self.itos)}
| 28.564103 | 75 | 0.61939 |
from collections.abc import Iterable
from dadmatools.models.flair.parser.utils.common import unk
class Vocab(object):
def __init__(self, counter, min_freq=1, specials=[]):
self.itos = specials
self.stoi = {token: i for i, token in enumerate(self.itos)}
self.extend([token for token, freq in counter.items()
if freq >= min_freq])
self.unk_index = self.stoi.get(unk, 0)
self.n_init = len(self)
def __len__(self):
return len(self.itos)
def __getitem__(self, key):
return self.stoi[key]
def __contains__(self, token):
return token in self.stoi
def token2id(self, sequence):
return [self.stoi.get(token, self.unk_index) for token in sequence]
def id2token(self, ids):
if isinstance(ids, Iterable):
return [self.itos[i] for i in ids]
else:
return self.itos[ids]
def extend(self, tokens):
self.itos.extend(sorted(set(tokens).difference(self.stoi)))
self.stoi = {token: i for i, token in enumerate(self.itos)}
| true | true |
f72bd002b56ea30cddba78b9525df205cb9088df | 7,561 | py | Python | tests/lint/check_file_type.py | PhilippvK/tvm | e7748aac40bd4c263882323393ea8896837614a9 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null | tests/lint/check_file_type.py | PhilippvK/tvm | e7748aac40bd4c263882323393ea8896837614a9 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null | tests/lint/check_file_type.py | PhilippvK/tvm | e7748aac40bd4c263882323393ea8896837614a9 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Helper tool to check file types that are allowed to checkin."""
import os
import sys
import subprocess
# List of file types we allow
ALLOW_EXTENSION = {
# source code
"cc",
"c",
"h",
"s",
"rs",
"m",
"mm",
"g4",
"gradle",
"js",
"tcl",
"scala",
"java",
"go",
"ts",
"sh",
"py",
"pyi",
"pxi",
"pyd",
"pyx",
"cu",
"bat",
# relay text format
"rly",
# configurations
"mk",
"in",
"cmake",
"xml",
"toml",
"yml",
"yaml",
"json",
# docs
"txt",
"md",
"rst",
# sgx
"edl",
"lds",
# ios
"pbxproj",
"plist",
"xcworkspacedata",
"storyboard",
# hw/chisel
"sbt",
"properties",
"v",
"sdc",
# generated parser
"interp",
"tokens",
# interface definition
"idl",
# opencl file
"cl",
# zephyr config file
"conf",
# linker scripts
"ld",
}
# List of file names allowed
ALLOW_FILE_NAME = {
".gitignore",
".eslintignore",
".gitattributes",
"README",
"Makefile",
"Doxyfile",
"pylintrc",
"rat-excludes",
"log4j.properties",
".clang-format",
".gitmodules",
"CODEOWNERS",
".scalafmt.conf",
"Cargo.lock",
"with_the_same_user",
}
# List of specific files allowed in relpath to <proj_root>
ALLOW_SPECIFIC_FILE = {
"LICENSE",
"NOTICE",
"KEYS",
"DISCLAIMER",
"Jenkinsfile",
"mypy.ini",
# cargo config
"rust/runtime/tests/test_wasm32/.cargo/config",
"rust/tvm-graph-rt/tests/test_wasm32/.cargo/config",
"apps/sgx/.cargo/config",
"apps/wasm-standalone/wasm-graph/.cargo/config",
# html for demo purposes
"web/apps/browser/rpc_server.html",
# images are normally not allowed
# discuss with committers before add more images
"apps/android_rpc/app/src/main/res/mipmap-hdpi/ic_launcher.png",
"apps/android_rpc/app/src/main/res/mipmap-mdpi/ic_launcher.png",
# documentation related files
"docs/_static/css/tvm_theme.css",
"docs/_static/img/tvm-logo-small.png",
"docs/_static/img/tvm-logo-square.png",
# pytest config
"pytest.ini",
# microTVM tests
"tests/micro/zephyr/testdata/digit-2.jpg",
"tests/micro/zephyr/testdata/digit-9.jpg",
"tests/micro/zephyr/testdata/mnist-8.onnx",
"tests/micro/zephyr/testdata/ic_sample_fp32_8.npy",
# microTVM Zephyr runtime
"apps/microtvm/zephyr/template_project/CMakeLists.txt.template",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-arm",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-xilinx-aarch64",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-i386",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv32",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv64",
# microTVM Virtual Machines
"apps/microtvm/reference-vm/zephyr/Vagrantfile",
"apps/microtvm/reference-vm/zephyr/base-box/Vagrantfile.packer-template",
}
def filename_allowed(name):
"""Check if name is allowed by the current policy.
Paramaters
----------
name : str
Input name
Returns
-------
allowed : bool
Whether the filename is allowed.
"""
arr = name.rsplit(".", 1)
if arr[-1] in ALLOW_EXTENSION:
return True
if os.path.basename(name) in ALLOW_FILE_NAME:
return True
if os.path.basename(name).startswith("Dockerfile"):
return True
if name.startswith("3rdparty"):
return True
if name in ALLOW_SPECIFIC_FILE:
return True
return False
def copyright_line(line):
# Following two items are intentionally break apart
# so that the copyright detector won't detect the file itself.
if line.find("Copyright " + "(c)") != -1:
return True
# break pattern into two lines to avoid false-negative check
spattern1 = "Copyright"
if line.find(spattern1) != -1 and line.find("by") != -1:
return True
return False
def check_asf_copyright(fname):
if fname.endswith(".png"):
return True
if not os.path.isfile(fname):
return True
has_asf_header = False
has_copyright = False
try:
for line in open(fname):
if line.find("Licensed to the Apache Software Foundation") != -1:
has_asf_header = True
if copyright_line(line):
has_copyright = True
if has_asf_header and has_copyright:
return False
except UnicodeDecodeError:
pass
return True
def main():
cmd = ["git", "ls-files"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(out, _) = proc.communicate()
assert proc.returncode == 0, f'{" ".join(cmd)} errored: {out}'
res = out.decode("utf-8")
flist = res.split()
error_list = []
for fname in flist:
if not filename_allowed(fname):
error_list.append(fname)
if error_list:
report = "------File type check report----\n"
report += "\n".join(error_list)
report += "\nFound %d files that are now allowed\n" % len(error_list)
report += (
"We do not check in binary files into the repo.\n"
"If necessary, please discuss with committers and"
"modify tests/lint/check_file_type.py to enable the file you need.\n"
)
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
asf_copyright_list = []
for fname in res.split():
if not check_asf_copyright(fname):
asf_copyright_list.append(fname)
if asf_copyright_list:
report = "------File type check report----\n"
report += "\n".join(asf_copyright_list) + "\n"
report += "------Found %d files that has ASF header with copyright message----\n" % len(
asf_copyright_list
)
report += "--- Files with ASF header do not need Copyright lines.\n"
report += "--- Contributors retain copyright to their contribution by default.\n"
report += "--- If a file comes with a different license, consider put it under the 3rdparty folder instead.\n"
report += "---\n"
report += "--- You can use the following steps to remove the copyright lines\n"
report += "--- Create file_list.txt in your text editor\n"
report += "--- Copy paste the above content in file-list into file_list.txt\n"
report += "--- python3 tests/lint/add_asf_header.py file_list.txt\n"
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
print("check_file_type.py: all checks passed..")
if __name__ == "__main__":
main()
| 28.003704 | 118 | 0.624653 |
import os
import sys
import subprocess
ALLOW_EXTENSION = {
"cc",
"c",
"h",
"s",
"rs",
"m",
"mm",
"g4",
"gradle",
"js",
"tcl",
"scala",
"java",
"go",
"ts",
"sh",
"py",
"pyi",
"pxi",
"pyd",
"pyx",
"cu",
"bat",
"rly",
"mk",
"in",
"cmake",
"xml",
"toml",
"yml",
"yaml",
"json",
"txt",
"md",
"rst",
"edl",
"lds",
"pbxproj",
"plist",
"xcworkspacedata",
"storyboard",
"sbt",
"properties",
"v",
"sdc",
"interp",
"tokens",
"idl",
"cl",
"conf",
"ld",
}
ALLOW_FILE_NAME = {
".gitignore",
".eslintignore",
".gitattributes",
"README",
"Makefile",
"Doxyfile",
"pylintrc",
"rat-excludes",
"log4j.properties",
".clang-format",
".gitmodules",
"CODEOWNERS",
".scalafmt.conf",
"Cargo.lock",
"with_the_same_user",
}
ALLOW_SPECIFIC_FILE = {
"LICENSE",
"NOTICE",
"KEYS",
"DISCLAIMER",
"Jenkinsfile",
"mypy.ini",
"rust/runtime/tests/test_wasm32/.cargo/config",
"rust/tvm-graph-rt/tests/test_wasm32/.cargo/config",
"apps/sgx/.cargo/config",
"apps/wasm-standalone/wasm-graph/.cargo/config",
"web/apps/browser/rpc_server.html",
"apps/android_rpc/app/src/main/res/mipmap-hdpi/ic_launcher.png",
"apps/android_rpc/app/src/main/res/mipmap-mdpi/ic_launcher.png",
"docs/_static/css/tvm_theme.css",
"docs/_static/img/tvm-logo-small.png",
"docs/_static/img/tvm-logo-square.png",
"pytest.ini",
"tests/micro/zephyr/testdata/digit-2.jpg",
"tests/micro/zephyr/testdata/digit-9.jpg",
"tests/micro/zephyr/testdata/mnist-8.onnx",
"tests/micro/zephyr/testdata/ic_sample_fp32_8.npy",
"apps/microtvm/zephyr/template_project/CMakeLists.txt.template",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-arm",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-xilinx-aarch64",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-i386",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv32",
"apps/microtvm/zephyr/template_project/qemu-hack/qemu-system-riscv64",
"apps/microtvm/reference-vm/zephyr/Vagrantfile",
"apps/microtvm/reference-vm/zephyr/base-box/Vagrantfile.packer-template",
}
def filename_allowed(name):
arr = name.rsplit(".", 1)
if arr[-1] in ALLOW_EXTENSION:
return True
if os.path.basename(name) in ALLOW_FILE_NAME:
return True
if os.path.basename(name).startswith("Dockerfile"):
return True
if name.startswith("3rdparty"):
return True
if name in ALLOW_SPECIFIC_FILE:
return True
return False
def copyright_line(line):
if line.find("Copyright " + "(c)") != -1:
return True
# break pattern into two lines to avoid false-negative check
spattern1 = "Copyright"
if line.find(spattern1) != -1 and line.find("by") != -1:
return True
return False
def check_asf_copyright(fname):
if fname.endswith(".png"):
return True
if not os.path.isfile(fname):
return True
has_asf_header = False
has_copyright = False
try:
for line in open(fname):
if line.find("Licensed to the Apache Software Foundation") != -1:
has_asf_header = True
if copyright_line(line):
has_copyright = True
if has_asf_header and has_copyright:
return False
except UnicodeDecodeError:
pass
return True
def main():
cmd = ["git", "ls-files"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
(out, _) = proc.communicate()
assert proc.returncode == 0, f'{" ".join(cmd)} errored: {out}'
res = out.decode("utf-8")
flist = res.split()
error_list = []
for fname in flist:
if not filename_allowed(fname):
error_list.append(fname)
if error_list:
report = "------File type check report----\n"
report += "\n".join(error_list)
report += "\nFound %d files that are now allowed\n" % len(error_list)
report += (
"We do not check in binary files into the repo.\n"
"If necessary, please discuss with committers and"
"modify tests/lint/check_file_type.py to enable the file you need.\n"
)
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
asf_copyright_list = []
for fname in res.split():
if not check_asf_copyright(fname):
asf_copyright_list.append(fname)
if asf_copyright_list:
report = "------File type check report----\n"
report += "\n".join(asf_copyright_list) + "\n"
report += "------Found %d files that has ASF header with copyright message----\n" % len(
asf_copyright_list
)
report += "--- Files with ASF header do not need Copyright lines.\n"
report += "--- Contributors retain copyright to their contribution by default.\n"
report += "--- If a file comes with a different license, consider put it under the 3rdparty folder instead.\n"
report += "---\n"
report += "--- You can use the following steps to remove the copyright lines\n"
report += "--- Create file_list.txt in your text editor\n"
report += "--- Copy paste the above content in file-list into file_list.txt\n"
report += "--- python3 tests/lint/add_asf_header.py file_list.txt\n"
sys.stderr.write(report)
sys.stderr.flush()
sys.exit(-1)
print("check_file_type.py: all checks passed..")
if __name__ == "__main__":
main()
| true | true |
f72bd0034cfdcd02a88b42c82d265f8271a74c19 | 30,361 | py | Python | covid/wallet/cc_wallet/cc_wallet.py | grayfallstown/covid-blockchain | 194d5351c70d3ee5d928f767e21c7894cfbb59a7 | [
"Apache-2.0"
] | null | null | null | covid/wallet/cc_wallet/cc_wallet.py | grayfallstown/covid-blockchain | 194d5351c70d3ee5d928f767e21c7894cfbb59a7 | [
"Apache-2.0"
] | null | null | null | covid/wallet/cc_wallet/cc_wallet.py | grayfallstown/covid-blockchain | 194d5351c70d3ee5d928f767e21c7894cfbb59a7 | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
import logging
import time
from dataclasses import replace
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Set
from blspy import AugSchemeMPL, G2Element
from covid.consensus.cost_calculator import calculate_cost_of_program, NPCResult
from covid.full_node.bundle_tools import simple_solution_generator
from covid.full_node.mempool_check_conditions import get_name_puzzle_conditions
from covid.protocols.wallet_protocol import PuzzleSolutionResponse
from covid.types.blockchain_format.coin import Coin
from covid.types.blockchain_format.program import Program
from covid.types.blockchain_format.sized_bytes import bytes32
from covid.types.coin_spend import CoinSpend
from covid.types.generator_types import BlockGenerator
from covid.types.spend_bundle import SpendBundle
from covid.util.byte_types import hexstr_to_bytes
from covid.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from covid.util.ints import uint8, uint32, uint64, uint128
from covid.util.json_util import dict_to_json_str
from covid.wallet.block_record import HeaderBlockRecord
from covid.wallet.cc_wallet.cc_info import CCInfo
from covid.wallet.cc_wallet.cc_utils import (
CC_MOD,
SpendableCC,
cc_puzzle_for_inner_puzzle,
cc_puzzle_hash_for_inner_puzzle_hash,
get_lineage_proof_from_coin_and_puz,
spend_bundle_for_spendable_ccs,
uncurry_cc,
)
from covid.wallet.derivation_record import DerivationRecord
from covid.wallet.puzzles.genesis_by_coin_id_with_0 import (
create_genesis_or_zero_coin_checker,
genesis_coin_id_for_genesis_coin_checker,
lineage_proof_for_genesis,
)
from covid.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
from covid.wallet.transaction_record import TransactionRecord
from covid.wallet.util.transaction_type import TransactionType
from covid.wallet.util.wallet_types import WalletType
from covid.wallet.wallet import Wallet
from covid.wallet.wallet_coin_record import WalletCoinRecord
from covid.wallet.wallet_info import WalletInfo
class CCWallet:
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
cc_coin_record: WalletCoinRecord
cc_info: CCInfo
standard_wallet: Wallet
base_puzzle_program: Optional[bytes]
base_inner_puzzle_hash: Optional[bytes32]
cost_of_single_tx: Optional[int]
@staticmethod
async def create_new_cc(
wallet_state_manager: Any,
wallet: Wallet,
amount: uint64,
):
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(None, [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise ValueError("Internal Error")
try:
spend_bundle = await self.generate_new_coloured_coin(amount)
except Exception:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise
await self.wallet_state_manager.add_new_wallet(self, self.id())
# Change and actual coloured coin
non_ephemeral_spends: List[Coin] = spend_bundle.not_ephemeral_additions()
cc_coin = None
puzzle_store = self.wallet_state_manager.puzzle_store
for c in non_ephemeral_spends:
info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash)
if info is None:
raise ValueError("Internal Error")
id, wallet_type = info
if id == self.id():
cc_coin = c
if cc_coin is None:
raise ValueError("Internal Error, unable to generate new coloured coin")
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.wallet_state_manager.main_wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
await self.standard_wallet.push_transaction(regular_record)
await self.standard_wallet.push_transaction(cc_record)
return self
@staticmethod
async def create_wallet_for_cc(
wallet_state_manager: Any,
wallet: Wallet,
genesis_checker_hex: str,
) -> CCWallet:
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(Program.from_bytes(bytes.fromhex(genesis_checker_hex)), [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise Exception("wallet_info is None")
await self.wallet_state_manager.add_new_wallet(self, self.id())
return self
@staticmethod
async def create(
wallet_state_manager: Any,
wallet: Wallet,
wallet_info: WalletInfo,
) -> CCWallet:
self = CCWallet()
self.log = logging.getLogger(__name__)
self.cost_of_single_tx = None
self.wallet_state_manager = wallet_state_manager
self.wallet_info = wallet_info
self.standard_wallet = wallet
self.cc_info = CCInfo.from_bytes(hexstr_to_bytes(self.wallet_info.data))
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.COLOURED_COIN)
def id(self) -> uint32:
return self.wallet_info.id
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint64:
if record_list is None:
record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.id())
amount: uint64 = uint64(0)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
amount = uint64(amount + record.coin.amount)
self.log.info(f"Confirmed balance for cc wallet {self.id()} is {amount}")
return uint64(amount)
async def get_unconfirmed_balance(self, unspent_records=None) -> uint128:
confirmed = await self.get_confirmed_balance(unspent_records)
unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
self.id()
)
addition_amount = 0
removal_amount = 0
for record in unconfirmed_tx:
if TransactionType(record.type) is TransactionType.INCOMING_TX:
addition_amount += record.amount
else:
removal_amount += record.amount
result = confirmed - removal_amount + addition_amount
self.log.info(f"Unconfirmed balance for cc wallet {self.id()} is {result}")
return uint128(result)
async def get_max_send_amount(self, records=None):
spendable: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
)
if len(spendable) == 0:
return 0
spendable.sort(reverse=True, key=lambda record: record.coin.amount)
if self.cost_of_single_tx is None:
coin = spendable[0].coin
tx = await self.generate_signed_transaction(
[coin.amount], [coin.puzzle_hash], coins={coin}, ignore_max_send_amount=True
)
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
# npc contains names of the coins removed, puzzle_hashes and their spend conditions
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
safe_mode=True,
)
cost_result: uint64 = calculate_cost_of_program(
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
)
self.cost_of_single_tx = cost_result
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
max_cost = self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 2 # avoid full block TXs
current_cost = 0
total_amount = 0
total_coin_count = 0
for record in spendable:
current_cost += self.cost_of_single_tx
total_amount += record.coin.amount
total_coin_count += 1
if current_cost + self.cost_of_single_tx > max_cost:
break
return total_amount
async def get_name(self):
return self.wallet_info.name
async def set_name(self, new_name: str):
new_info = replace(self.wallet_info, name=new_name)
self.wallet_info = new_info
await self.wallet_state_manager.user_store.update_wallet(self.wallet_info, False)
def get_colour(self) -> str:
assert self.cc_info.my_genesis_checker is not None
return bytes(self.cc_info.my_genesis_checker).hex()
async def coin_added(self, coin: Coin, height: uint32):
"""Notification from wallet state manager that wallet has been received."""
self.log.info(f"CC wallet has been notified that {coin} was added")
search_for_parent: bool = True
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
lineage_proof = Program.to((1, [coin.parent_coin_info, inner_puzzle.get_tree_hash(), coin.amount]))
await self.add_lineage(coin.name(), lineage_proof, True)
for name, lineage_proofs in self.cc_info.lineage_proofs:
if coin.parent_coin_info == name:
search_for_parent = False
break
if search_for_parent:
data: Dict[str, Any] = {
"data": {
"action_data": {
"api_name": "request_puzzle_solution",
"height": height,
"coin_name": coin.parent_coin_info,
"received_coin": coin.name(),
}
}
}
data_str = dict_to_json_str(data)
await self.wallet_state_manager.create_action(
name="request_puzzle_solution",
wallet_id=self.id(),
wallet_type=self.type(),
callback="puzzle_solution_received",
done=False,
data=data_str,
in_transaction=True,
)
async def puzzle_solution_received(self, response: PuzzleSolutionResponse, action_id: int):
coin_name = response.coin_name
height = response.height
puzzle: Program = response.puzzle
r = uncurry_cc(puzzle)
header_hash = self.wallet_state_manager.blockchain.height_to_hash(height)
block: Optional[
HeaderBlockRecord
] = await self.wallet_state_manager.blockchain.block_store.get_header_block_record(header_hash)
if block is None:
return None
removals = block.removals
if r is not None:
mod_hash, genesis_coin_checker, inner_puzzle = r
self.log.info(f"parent: {coin_name} inner_puzzle for parent is {inner_puzzle}")
parent_coin = None
for coin in removals:
if coin.name() == coin_name:
parent_coin = coin
if parent_coin is None:
raise ValueError("Error in finding parent")
lineage_proof = get_lineage_proof_from_coin_and_puz(parent_coin, puzzle)
await self.add_lineage(coin_name, lineage_proof)
await self.wallet_state_manager.action_store.action_done(action_id)
async def get_new_inner_hash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
async def get_new_inner_puzzle(self) -> Program:
return await self.standard_wallet.get_new_puzzle()
async def get_puzzle_hash(self, new: bool):
return await self.standard_wallet.get_puzzle_hash(new)
async def get_new_puzzlehash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
def puzzle_for_pk(self, pubkey) -> Program:
inner_puzzle = self.standard_wallet.puzzle_for_pk(bytes(pubkey))
cc_puzzle: Program = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, inner_puzzle)
self.base_puzzle_program = bytes(cc_puzzle)
self.base_inner_puzzle_hash = inner_puzzle.get_tree_hash()
return cc_puzzle
async def get_new_cc_puzzle_hash(self):
return (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash
# Create a new coin of value 0 with a given colour
async def generate_zero_val_coin(self, send=True, exclude: List[Coin] = None) -> SpendBundle:
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
if exclude is None:
exclude = []
coins = await self.standard_wallet.select_coins(0, exclude)
assert coins != set()
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner = await self.get_new_inner_hash()
cc_puzzle_hash: Program = cc_puzzle_hash_for_inner_puzzle_hash(
CC_MOD, self.cc_info.my_genesis_checker, cc_inner
)
tx: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
uint64(0), cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx.spend_bundle is not None
full_spend: SpendBundle = tx.spend_bundle
self.log.info(f"Generate zero val coin: cc_puzzle_hash is {cc_puzzle_hash}")
# generate eve coin so we can add future lineage_proofs even if we don't eve spend
eve_coin = Coin(origin_id, cc_puzzle_hash, uint64(0))
await self.add_lineage(
eve_coin.name(),
Program.to(
(
1,
[eve_coin.parent_coin_info, cc_inner, eve_coin.amount],
)
),
)
await self.add_lineage(eve_coin.parent_coin_info, Program.to((0, [origin.as_list(), 1])))
if send:
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=full_spend.name(),
)
await self.wallet_state_manager.add_transaction(regular_record)
await self.wallet_state_manager.add_pending_transaction(cc_record)
return full_spend
async def get_spendable_balance(self, records=None) -> uint64:
coins = await self.get_cc_spendable_coins(records)
amount = 0
for record in coins:
amount += record.coin.amount
return uint64(amount)
async def get_pending_change_balance(self) -> uint64:
unconfirmed_tx = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.id())
addition_amount = 0
for record in unconfirmed_tx:
if not record.is_in_mempool():
continue
our_spend = False
for coin in record.removals:
# Don't count eve spend as change
if coin.parent_coin_info.hex() == self.get_colour():
continue
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
our_spend = True
break
if our_spend is not True:
continue
for coin in record.additions:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
addition_amount += coin.amount
return uint64(addition_amount)
async def get_cc_spendable_coins(self, records=None) -> List[WalletCoinRecord]:
result: List[WalletCoinRecord] = []
record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet(
self.id(), records
)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
result.append(record)
return result
async def select_coins(self, amount: uint64) -> Set[Coin]:
"""
Returns a set of coins that can be used for generating a new transaction.
Note: Must be called under wallet state manager lock
"""
spendable_am = await self.get_confirmed_balance()
if amount > spendable_am:
error_msg = f"Can't select amount higher than our spendable balance {amount}, spendable {spendable_am}"
self.log.warning(error_msg)
raise ValueError(error_msg)
self.log.info(f"About to select coins for amount {amount}")
spendable: List[WalletCoinRecord] = await self.get_cc_spendable_coins()
sum = 0
used_coins: Set = set()
# Use older coins first
spendable.sort(key=lambda r: r.confirmed_block_height)
# Try to use coins from the store, if there isn't enough of "unused"
# coins use change coins that are not confirmed yet
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
self.id()
)
for coinrecord in spendable:
if sum >= amount and len(used_coins) > 0:
break
if coinrecord.coin.name() in unconfirmed_removals:
continue
sum += coinrecord.coin.amount
used_coins.add(coinrecord.coin)
self.log.info(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
# This happens when we couldn't use one of the coins because it's already used
# but unconfirmed, and we are waiting for the change. (unconfirmed_additions)
if sum < amount:
raise ValueError(
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
)
self.log.info(f"Successfully selected coins: {used_coins}")
return used_coins
async def get_sigs(self, innerpuz: Program, innersol: Program, coin_name: bytes32) -> List[G2Element]:
puzzle_hash = innerpuz.get_tree_hash()
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
sigs: List[G2Element] = []
error, conditions, cost = conditions_dict_for_solution(
innerpuz, innersol, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM
)
if conditions is not None:
for _, msg in pkm_pairs_for_conditions_dict(
conditions, coin_name, self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
):
signature = AugSchemeMPL.sign(synthetic_secret_key, msg)
sigs.append(signature)
return sigs
async def inner_puzzle_for_cc_puzhash(self, cc_hash: bytes32) -> Program:
record: DerivationRecord = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(
cc_hash.hex()
)
inner_puzzle: Program = self.standard_wallet.puzzle_for_pk(bytes(record.pubkey))
return inner_puzzle
async def get_lineage_proof_for_coin(self, coin) -> Optional[Program]:
for name, proof in self.cc_info.lineage_proofs:
if name == coin.parent_coin_info:
return proof
return None
async def generate_signed_transaction(
self,
amounts: List[uint64],
puzzle_hashes: List[bytes32],
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
ignore_max_send_amount: bool = False,
) -> TransactionRecord:
# Get coins and calculate amount of change required
outgoing_amount = uint64(sum(amounts))
total_outgoing = outgoing_amount + fee
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if total_outgoing > max_send:
raise ValueError(f"Can't send more than {max_send} in a single transaction")
if coins is None:
selected_coins: Set[Coin] = await self.select_coins(uint64(total_outgoing))
else:
selected_coins = coins
total_amount = sum([x.amount for x in selected_coins])
change = total_amount - total_outgoing
primaries = []
for amount, puzzle_hash in zip(amounts, puzzle_hashes):
primaries.append({"puzzlehash": puzzle_hash, "amount": amount})
if change > 0:
changepuzzlehash = await self.get_new_inner_hash()
primaries.append({"puzzlehash": changepuzzlehash, "amount": change})
coin = list(selected_coins)[0]
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
genesis_id = genesis_coin_id_for_genesis_coin_checker(self.cc_info.my_genesis_checker)
spendable_cc_list = []
innersol_list = []
sigs: List[G2Element] = []
first = True
for coin in selected_coins:
coin_inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if first:
first = False
if fee > 0:
innersol = self.standard_wallet.make_solution(primaries=primaries, fee=fee)
else:
innersol = self.standard_wallet.make_solution(primaries=primaries)
else:
innersol = self.standard_wallet.make_solution()
innersol_list.append(innersol)
lineage_proof = await self.get_lineage_proof_for_coin(coin)
assert lineage_proof is not None
spendable_cc_list.append(SpendableCC(coin, genesis_id, inner_puzzle, lineage_proof))
sigs = sigs + await self.get_sigs(coin_inner_puzzle, innersol, coin.name())
spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
self.cc_info.my_genesis_checker,
spendable_cc_list,
innersol_list,
sigs,
)
# TODO add support for array in stored records
return TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=puzzle_hashes[0],
amount=uint64(outgoing_amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
)
async def add_lineage(self, name: bytes32, lineage: Optional[Program], in_transaction=False):
self.log.info(f"Adding parent {name}: {lineage}")
current_list = self.cc_info.lineage_proofs.copy()
current_list.append((name, lineage))
cc_info: CCInfo = CCInfo(self.cc_info.my_genesis_checker, current_list)
await self.save_info(cc_info, in_transaction)
async def save_info(self, cc_info: CCInfo, in_transaction):
self.cc_info = cc_info
current_info = self.wallet_info
data_str = bytes(cc_info).hex()
wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
self.wallet_info = wallet_info
await self.wallet_state_manager.user_store.update_wallet(wallet_info, in_transaction)
async def generate_new_coloured_coin(self, amount: uint64) -> SpendBundle:
coins = await self.standard_wallet.select_coins(amount)
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner_hash = await self.get_new_inner_hash()
await self.add_lineage(origin_id, Program.to((0, [origin.as_list(), 0])))
genesis_coin_checker = create_genesis_or_zero_coin_checker(origin_id)
minted_cc_puzzle_hash = cc_puzzle_hash_for_inner_puzzle_hash(CC_MOD, genesis_coin_checker, cc_inner_hash)
tx_record: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
amount, minted_cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx_record.spend_bundle is not None
lineage_proof: Optional[Program] = lineage_proof_for_genesis(origin)
lineage_proofs = [(origin_id, lineage_proof)]
cc_info: CCInfo = CCInfo(genesis_coin_checker, lineage_proofs)
await self.save_info(cc_info, False)
return tx_record.spend_bundle
async def create_spend_bundle_relative_amount(self, cc_amount, zero_coin: Coin = None) -> Optional[SpendBundle]:
# If we're losing value then get coloured coins with at least that much value
# If we're gaining value then our amount doesn't matter
if cc_amount < 0:
cc_spends = await self.select_coins(abs(cc_amount))
else:
if zero_coin is None:
return None
cc_spends = set()
cc_spends.add(zero_coin)
if cc_spends is None:
return None
# Calculate output amount given relative difference and sum of actual values
spend_value = sum([coin.amount for coin in cc_spends])
cc_amount = spend_value + cc_amount
# Loop through coins and create solution for innerpuzzle
list_of_solutions = []
output_created = None
sigs: List[G2Element] = []
for coin in cc_spends:
if output_created is None:
newinnerpuzhash = await self.get_new_inner_hash()
innersol = self.standard_wallet.make_solution(
primaries=[{"puzzlehash": newinnerpuzhash, "amount": cc_amount}]
)
output_created = coin
else:
innersol = self.standard_wallet.make_solution(consumed=[output_created.name()])
innerpuz: Program = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
sigs = sigs + await self.get_sigs(innerpuz, innersol, coin.name())
lineage_proof = await self.get_lineage_proof_for_coin(coin)
puzzle_reveal = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, innerpuz)
# Use coin info to create solution and add coin and solution to list of CoinSpends
solution = [
innersol,
coin.as_list(),
lineage_proof,
None,
None,
None,
None,
None,
]
list_of_solutions.append(CoinSpend(coin, puzzle_reveal, Program.to(solution)))
aggsig = AugSchemeMPL.aggregate(sigs)
return SpendBundle(list_of_solutions, aggsig)
| 40.589572 | 118 | 0.647969 | from __future__ import annotations
import logging
import time
from dataclasses import replace
from secrets import token_bytes
from typing import Any, Dict, List, Optional, Set
from blspy import AugSchemeMPL, G2Element
from covid.consensus.cost_calculator import calculate_cost_of_program, NPCResult
from covid.full_node.bundle_tools import simple_solution_generator
from covid.full_node.mempool_check_conditions import get_name_puzzle_conditions
from covid.protocols.wallet_protocol import PuzzleSolutionResponse
from covid.types.blockchain_format.coin import Coin
from covid.types.blockchain_format.program import Program
from covid.types.blockchain_format.sized_bytes import bytes32
from covid.types.coin_spend import CoinSpend
from covid.types.generator_types import BlockGenerator
from covid.types.spend_bundle import SpendBundle
from covid.util.byte_types import hexstr_to_bytes
from covid.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict
from covid.util.ints import uint8, uint32, uint64, uint128
from covid.util.json_util import dict_to_json_str
from covid.wallet.block_record import HeaderBlockRecord
from covid.wallet.cc_wallet.cc_info import CCInfo
from covid.wallet.cc_wallet.cc_utils import (
CC_MOD,
SpendableCC,
cc_puzzle_for_inner_puzzle,
cc_puzzle_hash_for_inner_puzzle_hash,
get_lineage_proof_from_coin_and_puz,
spend_bundle_for_spendable_ccs,
uncurry_cc,
)
from covid.wallet.derivation_record import DerivationRecord
from covid.wallet.puzzles.genesis_by_coin_id_with_0 import (
create_genesis_or_zero_coin_checker,
genesis_coin_id_for_genesis_coin_checker,
lineage_proof_for_genesis,
)
from covid.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import (
DEFAULT_HIDDEN_PUZZLE_HASH,
calculate_synthetic_secret_key,
)
from covid.wallet.transaction_record import TransactionRecord
from covid.wallet.util.transaction_type import TransactionType
from covid.wallet.util.wallet_types import WalletType
from covid.wallet.wallet import Wallet
from covid.wallet.wallet_coin_record import WalletCoinRecord
from covid.wallet.wallet_info import WalletInfo
class CCWallet:
wallet_state_manager: Any
log: logging.Logger
wallet_info: WalletInfo
cc_coin_record: WalletCoinRecord
cc_info: CCInfo
standard_wallet: Wallet
base_puzzle_program: Optional[bytes]
base_inner_puzzle_hash: Optional[bytes32]
cost_of_single_tx: Optional[int]
@staticmethod
async def create_new_cc(
wallet_state_manager: Any,
wallet: Wallet,
amount: uint64,
):
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(None, [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise ValueError("Internal Error")
try:
spend_bundle = await self.generate_new_coloured_coin(amount)
except Exception:
await wallet_state_manager.user_store.delete_wallet(self.id())
raise
await self.wallet_state_manager.add_new_wallet(self, self.id())
non_ephemeral_spends: List[Coin] = spend_bundle.not_ephemeral_additions()
cc_coin = None
puzzle_store = self.wallet_state_manager.puzzle_store
for c in non_ephemeral_spends:
info = await puzzle_store.wallet_info_for_puzzle_hash(c.puzzle_hash)
if info is None:
raise ValueError("Internal Error")
id, wallet_type = info
if id == self.id():
cc_coin = c
if cc_coin is None:
raise ValueError("Internal Error, unable to generate new coloured coin")
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.wallet_state_manager.main_wallet.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_coin.puzzle_hash,
amount=uint64(cc_coin.amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=None,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
await self.standard_wallet.push_transaction(regular_record)
await self.standard_wallet.push_transaction(cc_record)
return self
@staticmethod
async def create_wallet_for_cc(
wallet_state_manager: Any,
wallet: Wallet,
genesis_checker_hex: str,
) -> CCWallet:
self = CCWallet()
self.cost_of_single_tx = None
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
self.standard_wallet = wallet
self.log = logging.getLogger(__name__)
self.wallet_state_manager = wallet_state_manager
self.cc_info = CCInfo(Program.from_bytes(bytes.fromhex(genesis_checker_hex)), [])
info_as_string = bytes(self.cc_info).hex()
self.wallet_info = await wallet_state_manager.user_store.create_wallet(
"CC Wallet", WalletType.COLOURED_COIN, info_as_string
)
if self.wallet_info is None:
raise Exception("wallet_info is None")
await self.wallet_state_manager.add_new_wallet(self, self.id())
return self
@staticmethod
async def create(
wallet_state_manager: Any,
wallet: Wallet,
wallet_info: WalletInfo,
) -> CCWallet:
self = CCWallet()
self.log = logging.getLogger(__name__)
self.cost_of_single_tx = None
self.wallet_state_manager = wallet_state_manager
self.wallet_info = wallet_info
self.standard_wallet = wallet
self.cc_info = CCInfo.from_bytes(hexstr_to_bytes(self.wallet_info.data))
self.base_puzzle_program = None
self.base_inner_puzzle_hash = None
return self
@classmethod
def type(cls) -> uint8:
return uint8(WalletType.COLOURED_COIN)
def id(self) -> uint32:
return self.wallet_info.id
async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint64:
if record_list is None:
record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.id())
amount: uint64 = uint64(0)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
amount = uint64(amount + record.coin.amount)
self.log.info(f"Confirmed balance for cc wallet {self.id()} is {amount}")
return uint64(amount)
async def get_unconfirmed_balance(self, unspent_records=None) -> uint128:
confirmed = await self.get_confirmed_balance(unspent_records)
unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(
self.id()
)
addition_amount = 0
removal_amount = 0
for record in unconfirmed_tx:
if TransactionType(record.type) is TransactionType.INCOMING_TX:
addition_amount += record.amount
else:
removal_amount += record.amount
result = confirmed - removal_amount + addition_amount
self.log.info(f"Unconfirmed balance for cc wallet {self.id()} is {result}")
return uint128(result)
async def get_max_send_amount(self, records=None):
spendable: List[WalletCoinRecord] = list(
await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records)
)
if len(spendable) == 0:
return 0
spendable.sort(reverse=True, key=lambda record: record.coin.amount)
if self.cost_of_single_tx is None:
coin = spendable[0].coin
tx = await self.generate_signed_transaction(
[coin.amount], [coin.puzzle_hash], coins={coin}, ignore_max_send_amount=True
)
program: BlockGenerator = simple_solution_generator(tx.spend_bundle)
result: NPCResult = get_name_puzzle_conditions(
program,
self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM,
cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE,
safe_mode=True,
)
cost_result: uint64 = calculate_cost_of_program(
program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE
)
self.cost_of_single_tx = cost_result
self.log.info(f"Cost of a single tx for standard wallet: {self.cost_of_single_tx}")
max_cost = self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 2
current_cost = 0
total_amount = 0
total_coin_count = 0
for record in spendable:
current_cost += self.cost_of_single_tx
total_amount += record.coin.amount
total_coin_count += 1
if current_cost + self.cost_of_single_tx > max_cost:
break
return total_amount
async def get_name(self):
return self.wallet_info.name
async def set_name(self, new_name: str):
new_info = replace(self.wallet_info, name=new_name)
self.wallet_info = new_info
await self.wallet_state_manager.user_store.update_wallet(self.wallet_info, False)
def get_colour(self) -> str:
assert self.cc_info.my_genesis_checker is not None
return bytes(self.cc_info.my_genesis_checker).hex()
async def coin_added(self, coin: Coin, height: uint32):
self.log.info(f"CC wallet has been notified that {coin} was added")
search_for_parent: bool = True
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
lineage_proof = Program.to((1, [coin.parent_coin_info, inner_puzzle.get_tree_hash(), coin.amount]))
await self.add_lineage(coin.name(), lineage_proof, True)
for name, lineage_proofs in self.cc_info.lineage_proofs:
if coin.parent_coin_info == name:
search_for_parent = False
break
if search_for_parent:
data: Dict[str, Any] = {
"data": {
"action_data": {
"api_name": "request_puzzle_solution",
"height": height,
"coin_name": coin.parent_coin_info,
"received_coin": coin.name(),
}
}
}
data_str = dict_to_json_str(data)
await self.wallet_state_manager.create_action(
name="request_puzzle_solution",
wallet_id=self.id(),
wallet_type=self.type(),
callback="puzzle_solution_received",
done=False,
data=data_str,
in_transaction=True,
)
async def puzzle_solution_received(self, response: PuzzleSolutionResponse, action_id: int):
coin_name = response.coin_name
height = response.height
puzzle: Program = response.puzzle
r = uncurry_cc(puzzle)
header_hash = self.wallet_state_manager.blockchain.height_to_hash(height)
block: Optional[
HeaderBlockRecord
] = await self.wallet_state_manager.blockchain.block_store.get_header_block_record(header_hash)
if block is None:
return None
removals = block.removals
if r is not None:
mod_hash, genesis_coin_checker, inner_puzzle = r
self.log.info(f"parent: {coin_name} inner_puzzle for parent is {inner_puzzle}")
parent_coin = None
for coin in removals:
if coin.name() == coin_name:
parent_coin = coin
if parent_coin is None:
raise ValueError("Error in finding parent")
lineage_proof = get_lineage_proof_from_coin_and_puz(parent_coin, puzzle)
await self.add_lineage(coin_name, lineage_proof)
await self.wallet_state_manager.action_store.action_done(action_id)
async def get_new_inner_hash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
async def get_new_inner_puzzle(self) -> Program:
return await self.standard_wallet.get_new_puzzle()
async def get_puzzle_hash(self, new: bool):
return await self.standard_wallet.get_puzzle_hash(new)
async def get_new_puzzlehash(self) -> bytes32:
return await self.standard_wallet.get_new_puzzlehash()
def puzzle_for_pk(self, pubkey) -> Program:
inner_puzzle = self.standard_wallet.puzzle_for_pk(bytes(pubkey))
cc_puzzle: Program = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, inner_puzzle)
self.base_puzzle_program = bytes(cc_puzzle)
self.base_inner_puzzle_hash = inner_puzzle.get_tree_hash()
return cc_puzzle
async def get_new_cc_puzzle_hash(self):
return (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash
async def generate_zero_val_coin(self, send=True, exclude: List[Coin] = None) -> SpendBundle:
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
if exclude is None:
exclude = []
coins = await self.standard_wallet.select_coins(0, exclude)
assert coins != set()
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner = await self.get_new_inner_hash()
cc_puzzle_hash: Program = cc_puzzle_hash_for_inner_puzzle_hash(
CC_MOD, self.cc_info.my_genesis_checker, cc_inner
)
tx: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
uint64(0), cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx.spend_bundle is not None
full_spend: SpendBundle = tx.spend_bundle
self.log.info(f"Generate zero val coin: cc_puzzle_hash is {cc_puzzle_hash}")
eve_coin = Coin(origin_id, cc_puzzle_hash, uint64(0))
await self.add_lineage(
eve_coin.name(),
Program.to(
(
1,
[eve_coin.parent_coin_info, cc_inner, eve_coin.amount],
)
),
)
await self.add_lineage(eve_coin.parent_coin_info, Program.to((0, [origin.as_list(), 1])))
if send:
regular_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(10),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=uint32(1),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=token_bytes(),
)
cc_record = TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=cc_puzzle_hash,
amount=uint64(0),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=full_spend,
additions=full_spend.additions(),
removals=full_spend.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.INCOMING_TX.value),
name=full_spend.name(),
)
await self.wallet_state_manager.add_transaction(regular_record)
await self.wallet_state_manager.add_pending_transaction(cc_record)
return full_spend
async def get_spendable_balance(self, records=None) -> uint64:
coins = await self.get_cc_spendable_coins(records)
amount = 0
for record in coins:
amount += record.coin.amount
return uint64(amount)
async def get_pending_change_balance(self) -> uint64:
unconfirmed_tx = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.id())
addition_amount = 0
for record in unconfirmed_tx:
if not record.is_in_mempool():
continue
our_spend = False
for coin in record.removals:
# Don't count eve spend as change
if coin.parent_coin_info.hex() == self.get_colour():
continue
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
our_spend = True
break
if our_spend is not True:
continue
for coin in record.additions:
if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()):
addition_amount += coin.amount
return uint64(addition_amount)
async def get_cc_spendable_coins(self, records=None) -> List[WalletCoinRecord]:
result: List[WalletCoinRecord] = []
record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet(
self.id(), records
)
for record in record_list:
lineage = await self.get_lineage_proof_for_coin(record.coin)
if lineage is not None:
result.append(record)
return result
async def select_coins(self, amount: uint64) -> Set[Coin]:
spendable_am = await self.get_confirmed_balance()
if amount > spendable_am:
error_msg = f"Can't select amount higher than our spendable balance {amount}, spendable {spendable_am}"
self.log.warning(error_msg)
raise ValueError(error_msg)
self.log.info(f"About to select coins for amount {amount}")
spendable: List[WalletCoinRecord] = await self.get_cc_spendable_coins()
sum = 0
used_coins: Set = set()
# Use older coins first
spendable.sort(key=lambda r: r.confirmed_block_height)
# Try to use coins from the store, if there isn't enough of "unused"
unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet(
self.id()
)
for coinrecord in spendable:
if sum >= amount and len(used_coins) > 0:
break
if coinrecord.coin.name() in unconfirmed_removals:
continue
sum += coinrecord.coin.amount
used_coins.add(coinrecord.coin)
self.log.info(f"Selected coin: {coinrecord.coin.name()} at height {coinrecord.confirmed_block_height}!")
if sum < amount:
raise ValueError(
"Can't make this transaction at the moment. Waiting for the change from the previous transaction."
)
self.log.info(f"Successfully selected coins: {used_coins}")
return used_coins
async def get_sigs(self, innerpuz: Program, innersol: Program, coin_name: bytes32) -> List[G2Element]:
puzzle_hash = innerpuz.get_tree_hash()
pubkey, private = await self.wallet_state_manager.get_keys(puzzle_hash)
synthetic_secret_key = calculate_synthetic_secret_key(private, DEFAULT_HIDDEN_PUZZLE_HASH)
sigs: List[G2Element] = []
error, conditions, cost = conditions_dict_for_solution(
innerpuz, innersol, self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM
)
if conditions is not None:
for _, msg in pkm_pairs_for_conditions_dict(
conditions, coin_name, self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA
):
signature = AugSchemeMPL.sign(synthetic_secret_key, msg)
sigs.append(signature)
return sigs
async def inner_puzzle_for_cc_puzhash(self, cc_hash: bytes32) -> Program:
record: DerivationRecord = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(
cc_hash.hex()
)
inner_puzzle: Program = self.standard_wallet.puzzle_for_pk(bytes(record.pubkey))
return inner_puzzle
async def get_lineage_proof_for_coin(self, coin) -> Optional[Program]:
for name, proof in self.cc_info.lineage_proofs:
if name == coin.parent_coin_info:
return proof
return None
async def generate_signed_transaction(
self,
amounts: List[uint64],
puzzle_hashes: List[bytes32],
fee: uint64 = uint64(0),
origin_id: bytes32 = None,
coins: Set[Coin] = None,
ignore_max_send_amount: bool = False,
) -> TransactionRecord:
# Get coins and calculate amount of change required
outgoing_amount = uint64(sum(amounts))
total_outgoing = outgoing_amount + fee
if not ignore_max_send_amount:
max_send = await self.get_max_send_amount()
if total_outgoing > max_send:
raise ValueError(f"Can't send more than {max_send} in a single transaction")
if coins is None:
selected_coins: Set[Coin] = await self.select_coins(uint64(total_outgoing))
else:
selected_coins = coins
total_amount = sum([x.amount for x in selected_coins])
change = total_amount - total_outgoing
primaries = []
for amount, puzzle_hash in zip(amounts, puzzle_hashes):
primaries.append({"puzzlehash": puzzle_hash, "amount": amount})
if change > 0:
changepuzzlehash = await self.get_new_inner_hash()
primaries.append({"puzzlehash": changepuzzlehash, "amount": change})
coin = list(selected_coins)[0]
inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if self.cc_info.my_genesis_checker is None:
raise ValueError("My genesis checker is None")
genesis_id = genesis_coin_id_for_genesis_coin_checker(self.cc_info.my_genesis_checker)
spendable_cc_list = []
innersol_list = []
sigs: List[G2Element] = []
first = True
for coin in selected_coins:
coin_inner_puzzle = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
if first:
first = False
if fee > 0:
innersol = self.standard_wallet.make_solution(primaries=primaries, fee=fee)
else:
innersol = self.standard_wallet.make_solution(primaries=primaries)
else:
innersol = self.standard_wallet.make_solution()
innersol_list.append(innersol)
lineage_proof = await self.get_lineage_proof_for_coin(coin)
assert lineage_proof is not None
spendable_cc_list.append(SpendableCC(coin, genesis_id, inner_puzzle, lineage_proof))
sigs = sigs + await self.get_sigs(coin_inner_puzzle, innersol, coin.name())
spend_bundle = spend_bundle_for_spendable_ccs(
CC_MOD,
self.cc_info.my_genesis_checker,
spendable_cc_list,
innersol_list,
sigs,
)
return TransactionRecord(
confirmed_at_height=uint32(0),
created_at_time=uint64(int(time.time())),
to_puzzle_hash=puzzle_hashes[0],
amount=uint64(outgoing_amount),
fee_amount=uint64(0),
confirmed=False,
sent=uint32(0),
spend_bundle=spend_bundle,
additions=spend_bundle.additions(),
removals=spend_bundle.removals(),
wallet_id=self.id(),
sent_to=[],
trade_id=None,
type=uint32(TransactionType.OUTGOING_TX.value),
name=spend_bundle.name(),
)
async def add_lineage(self, name: bytes32, lineage: Optional[Program], in_transaction=False):
self.log.info(f"Adding parent {name}: {lineage}")
current_list = self.cc_info.lineage_proofs.copy()
current_list.append((name, lineage))
cc_info: CCInfo = CCInfo(self.cc_info.my_genesis_checker, current_list)
await self.save_info(cc_info, in_transaction)
async def save_info(self, cc_info: CCInfo, in_transaction):
self.cc_info = cc_info
current_info = self.wallet_info
data_str = bytes(cc_info).hex()
wallet_info = WalletInfo(current_info.id, current_info.name, current_info.type, data_str)
self.wallet_info = wallet_info
await self.wallet_state_manager.user_store.update_wallet(wallet_info, in_transaction)
async def generate_new_coloured_coin(self, amount: uint64) -> SpendBundle:
coins = await self.standard_wallet.select_coins(amount)
origin = coins.copy().pop()
origin_id = origin.name()
cc_inner_hash = await self.get_new_inner_hash()
await self.add_lineage(origin_id, Program.to((0, [origin.as_list(), 0])))
genesis_coin_checker = create_genesis_or_zero_coin_checker(origin_id)
minted_cc_puzzle_hash = cc_puzzle_hash_for_inner_puzzle_hash(CC_MOD, genesis_coin_checker, cc_inner_hash)
tx_record: TransactionRecord = await self.standard_wallet.generate_signed_transaction(
amount, minted_cc_puzzle_hash, uint64(0), origin_id, coins
)
assert tx_record.spend_bundle is not None
lineage_proof: Optional[Program] = lineage_proof_for_genesis(origin)
lineage_proofs = [(origin_id, lineage_proof)]
cc_info: CCInfo = CCInfo(genesis_coin_checker, lineage_proofs)
await self.save_info(cc_info, False)
return tx_record.spend_bundle
async def create_spend_bundle_relative_amount(self, cc_amount, zero_coin: Coin = None) -> Optional[SpendBundle]:
# If we're gaining value then our amount doesn't matter
if cc_amount < 0:
cc_spends = await self.select_coins(abs(cc_amount))
else:
if zero_coin is None:
return None
cc_spends = set()
cc_spends.add(zero_coin)
if cc_spends is None:
return None
# Calculate output amount given relative difference and sum of actual values
spend_value = sum([coin.amount for coin in cc_spends])
cc_amount = spend_value + cc_amount
# Loop through coins and create solution for innerpuzzle
list_of_solutions = []
output_created = None
sigs: List[G2Element] = []
for coin in cc_spends:
if output_created is None:
newinnerpuzhash = await self.get_new_inner_hash()
innersol = self.standard_wallet.make_solution(
primaries=[{"puzzlehash": newinnerpuzhash, "amount": cc_amount}]
)
output_created = coin
else:
innersol = self.standard_wallet.make_solution(consumed=[output_created.name()])
innerpuz: Program = await self.inner_puzzle_for_cc_puzhash(coin.puzzle_hash)
sigs = sigs + await self.get_sigs(innerpuz, innersol, coin.name())
lineage_proof = await self.get_lineage_proof_for_coin(coin)
puzzle_reveal = cc_puzzle_for_inner_puzzle(CC_MOD, self.cc_info.my_genesis_checker, innerpuz)
# Use coin info to create solution and add coin and solution to list of CoinSpends
solution = [
innersol,
coin.as_list(),
lineage_proof,
None,
None,
None,
None,
None,
]
list_of_solutions.append(CoinSpend(coin, puzzle_reveal, Program.to(solution)))
aggsig = AugSchemeMPL.aggregate(sigs)
return SpendBundle(list_of_solutions, aggsig)
| true | true |
f72bd026e80c9b36f6569dca3e1e436f15855ecb | 417 | py | Python | tests/integration/test_segway_train.py | procha2/segway-pipeline | d0d3b8603eea9c9cbe92b56899c670dc41e89ca8 | [
"MIT"
] | 1 | 2021-03-13T11:34:45.000Z | 2021-03-13T11:34:45.000Z | tests/integration/test_segway_train.py | procha2/segway-pipeline | d0d3b8603eea9c9cbe92b56899c670dc41e89ca8 | [
"MIT"
] | null | null | null | tests/integration/test_segway_train.py | procha2/segway-pipeline | d0d3b8603eea9c9cbe92b56899c670dc41e89ca8 | [
"MIT"
] | 1 | 2020-10-01T11:48:17.000Z | 2020-10-01T11:48:17.000Z | from pathlib import Path
import pytest
@pytest.mark.workflow("test_segway_train")
def test_segway_train_traindirs_match(test_data_dir, workflow_dir, traindirs_match):
actual_traindir_path = workflow_dir / Path("test-output/traindir.tar.gz")
expected_traindir_path = test_data_dir / Path("segway_train_traindir.tar.gz")
assert traindirs_match(actual_traindir_path, expected_traindir_path, workflow_dir)
| 37.909091 | 86 | 0.822542 | from pathlib import Path
import pytest
@pytest.mark.workflow("test_segway_train")
def test_segway_train_traindirs_match(test_data_dir, workflow_dir, traindirs_match):
actual_traindir_path = workflow_dir / Path("test-output/traindir.tar.gz")
expected_traindir_path = test_data_dir / Path("segway_train_traindir.tar.gz")
assert traindirs_match(actual_traindir_path, expected_traindir_path, workflow_dir)
| true | true |
f72bd02dde43d0b610faacf919e7788a178ef0a9 | 1,359 | py | Python | vsts/vsts/service_endpoint/v4_1/models/service_endpoint_execution_owner.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | vsts/vsts/service_endpoint/v4_1/models/service_endpoint_execution_owner.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | vsts/vsts/service_endpoint/v4_1/models/service_endpoint_execution_owner.py | kenkuo/azure-devops-python-api | 9e920bd25e938fa89ff7f60153e5b9e113ca839d | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceEndpointExecutionOwner(Model):
"""ServiceEndpointExecutionOwner.
:param _links:
:type _links: :class:`ReferenceLinks <service-endpoint.v4_1.models.ReferenceLinks>`
:param id: Gets or sets the Id of service endpoint execution owner.
:type id: int
:param name: Gets or sets the name of service endpoint execution owner.
:type name: str
"""
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None):
super(ServiceEndpointExecutionOwner, self).__init__()
self._links = _links
self.id = id
self.name = name
| 39.970588 | 94 | 0.532009 |
from msrest.serialization import Model
class ServiceEndpointExecutionOwner(Model):
_attribute_map = {
'_links': {'key': '_links', 'type': 'ReferenceLinks'},
'id': {'key': 'id', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'}
}
def __init__(self, _links=None, id=None, name=None):
super(ServiceEndpointExecutionOwner, self).__init__()
self._links = _links
self.id = id
self.name = name
| true | true |
f72bd05b364336d0dfa60b4f0cb91cab8257f3a8 | 17,511 | py | Python | cirq-google/cirq_google/serialization/op_serializer_test.py | augustehirth/Cirq | e616710a0fa243524a9f6d7bc0d35e6b952fe3d0 | [
"Apache-2.0"
] | null | null | null | cirq-google/cirq_google/serialization/op_serializer_test.py | augustehirth/Cirq | e616710a0fa243524a9f6d7bc0d35e6b952fe3d0 | [
"Apache-2.0"
] | null | null | null | cirq-google/cirq_google/serialization/op_serializer_test.py | augustehirth/Cirq | e616710a0fa243524a9f6d7bc0d35e6b952fe3d0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List
import copy
import numpy as np
import pytest
import sympy
from google.protobuf import json_format
import cirq
import cirq_google as cg
from cirq_google.api import v2
DEFAULT_TOKEN = 'test_tag'
def op_proto(json: Dict) -> v2.program_pb2.Operation:
op = v2.program_pb2.Operation()
json_format.ParseDict(json, op)
return op
class GateWithAttribute(cirq.SingleQubitGate):
def __init__(self, val):
self.val = val
class GateWithProperty(cirq.SingleQubitGate):
def __init__(self, val, not_req=None):
self._val = val
self._not_req = not_req
@property
def val(self):
return self._val
class GateWithMethod(cirq.SingleQubitGate):
def __init__(self, val):
self._val = val
def get_val(self):
return self._val
class SubclassGate(GateWithAttribute):
pass
def get_val(op):
return op.gate.get_val()
TEST_CASES = (
(float, 1.0, {'arg_value': {'float_value': 1.0}}),
(str, 'abc', {'arg_value': {'string_value': 'abc'}}),
(float, 1, {'arg_value': {'float_value': 1.0}}),
(List[bool], [True, False], {'arg_value': {'bool_values': {'values': [True, False]}}}),
(List[bool], (True, False), {'arg_value': {'bool_values': {'values': [True, False]}}}),
(
List[bool],
np.array([True, False], dtype=bool),
{'arg_value': {'bool_values': {'values': [True, False]}}},
),
(sympy.Symbol, sympy.Symbol('x'), {'symbol': 'x'}),
(float, sympy.Symbol('x'), {'symbol': 'x'}),
(
float,
sympy.Symbol('x') - sympy.Symbol('y'),
{
'func': {
'type': 'add',
'args': [
{'symbol': 'x'},
{
'func': {
'type': 'mul',
'args': [{'arg_value': {'float_value': -1.0}}, {'symbol': 'y'}],
}
},
],
}
},
),
)
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_attribute(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithAttribute(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_property(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithProperty(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_callable(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithMethod,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter=get_val)
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithMethod(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
def test_to_proto_gate_predicate():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithAttribute(0)(q)) is None
assert serializer.to_proto(GateWithAttribute(1)(q)) is not None
assert not serializer.can_serialize_operation(GateWithAttribute(0)(q))
assert serializer.can_serialize_operation(GateWithAttribute(1)(q))
def test_to_proto_gate_mismatch():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='GateWithAttribute.*GateWithProperty'):
serializer.to_proto(GateWithAttribute(1.0)(q))
def test_to_proto_unsupported_type():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=bytes, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='bytes'):
serializer.to_proto(GateWithProperty(b's')(q))
def test_to_proto_named_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.NamedQubit('a')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': 'a'}],
}
)
assert result == expected
def test_to_proto_line_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.LineQubit('10')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': '10'}],
}
)
assert result == expected
def test_to_proto_required_but_not_present():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, op_getter=lambda x: None
)
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='required'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_no_getattr():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='nope')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='does not have'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_not_required_ok():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val'),
cg.SerializingArg(
serialized_name='not_req',
serialized_type=float,
op_getter='not_req',
required=False,
),
],
)
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithProperty(0.125)(q)) == expected
@pytest.mark.parametrize(
('val_type', 'val'),
(
(float, 's'),
(str, 1.0),
(sympy.Symbol, 1.0),
(List[bool], [1.0]),
(List[bool], 'a'),
(List[bool], (1.0,)),
),
)
def test_to_proto_type_mismatch(val_type, val):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match=str(type(val))):
serializer.to_proto(GateWithProperty(val)(q))
def test_can_serialize_operation_subclass():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 1)
assert serializer.can_serialize_operation(SubclassGate(1)(q))
assert not serializer.can_serialize_operation(SubclassGate(0)(q))
def test_defaults_not_serialized():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, default=1.0, op_getter='val'
)
],
)
q = cirq.GridQubit(1, 2)
no_default = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
assert no_default == serializer.to_proto(GateWithAttribute(0.125)(q))
with_default = op_proto({'gate': {'id': 'my_gate'}, 'qubits': [{'id': '1_2'}]})
assert with_default == serializer.to_proto(GateWithAttribute(1.0)(q))
def test_token_serialization():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_value': 'my_token',
}
)
assert expected == serializer.to_proto(GateWithAttribute(0.125)(q).with_tags(tag))
ONE_CONSTANT = [v2.program_pb2.Constant(string_value='my_token')]
TWO_CONSTANTS = [
v2.program_pb2.Constant(string_value='other_token'),
v2.program_pb2.Constant(string_value='my_token'),
]
@pytest.mark.parametrize(
('constants', 'expected_index', 'expected_constants'),
(
([], 0, ONE_CONSTANT),
(ONE_CONSTANT, 0, ONE_CONSTANT),
(TWO_CONSTANTS, 1, TWO_CONSTANTS),
),
)
def test_token_serialization_with_constant_reference(constants, expected_index, expected_constants):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
# Make a local copy since we are modifying the array in-place.
constants = copy.copy(constants)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_constant_index': expected_index,
}
)
assert expected == serializer.to_proto(
GateWithAttribute(0.125)(q).with_tags(tag), constants=constants
)
assert constants == expected_constants
def default_circuit_proto():
op1 = v2.program_pb2.Operation()
op1.gate.id = 'x_pow'
op1.args['half_turns'].arg_value.string_value = 'k'
op1.qubits.add().id = '1_1'
op2 = v2.program_pb2.Operation()
op2.gate.id = 'x_pow'
op2.args['half_turns'].arg_value.float_value = 1.0
op2.qubits.add().id = '1_2'
op2.token_constant_index = 0
return v2.program_pb2.Circuit(
scheduling_strategy=v2.program_pb2.Circuit.MOMENT_BY_MOMENT,
moments=[
v2.program_pb2.Moment(
operations=[op1, op2],
),
],
)
def default_circuit():
return cirq.FrozenCircuit(
cirq.X(cirq.GridQubit(1, 1)) ** sympy.Symbol('k'),
cirq.X(cirq.GridQubit(1, 2)).with_tags(DEFAULT_TOKEN),
cirq.measure(cirq.GridQubit(1, 1), key='m'),
)
def test_circuit_op_serializer_properties():
serializer = cg.CircuitOpSerializer()
assert serializer.internal_type == cirq.FrozenCircuit
assert serializer.serialized_id == 'circuit'
def test_can_serialize_circuit_op():
serializer = cg.CircuitOpSerializer()
assert serializer.can_serialize_operation(cirq.CircuitOperation(default_circuit()))
assert not serializer.can_serialize_operation(cirq.X(cirq.GridQubit(1, 1)))
def test_circuit_op_to_proto_errors():
serializer = cg.CircuitOpSerializer()
to_serialize = cirq.CircuitOperation(default_circuit())
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, constants=constants)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, raw_constants=raw_constants)
with pytest.raises(ValueError, match='Serializer expected CircuitOperation'):
serializer.to_proto(
v2.program_pb2.Operation(), constants=constants, raw_constants=raw_constants
)
bad_raw_constants = {cirq.FrozenCircuit(): 0}
with pytest.raises(ValueError, match='Encountered a circuit not in the constants table'):
serializer.to_proto(to_serialize, constants=constants, raw_constants=bad_raw_constants)
with pytest.raises(ValueError, match='Cannot serialize repetitions of type'):
serializer.to_proto(
to_serialize ** sympy.Symbol('a'), constants=constants, raw_constants=raw_constants
)
@pytest.mark.parametrize('repetitions', [1, 5, ['a', 'b', 'c']])
def test_circuit_op_to_proto(repetitions):
serializer = cg.CircuitOpSerializer()
if isinstance(repetitions, int):
repetition_ids = None
else:
repetition_ids = repetitions
repetitions = len(repetition_ids)
to_serialize = cirq.CircuitOperation(
circuit=default_circuit(),
qubit_map={cirq.GridQubit(1, 1): cirq.GridQubit(1, 2)},
measurement_key_map={'m': 'results'},
param_resolver={'k': 1.0},
repetitions=repetitions,
repetition_ids=repetition_ids,
)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
repetition_spec = v2.program_pb2.RepetitionSpecification()
if repetition_ids is None:
repetition_spec.repetition_count = repetitions
else:
for rep_id in repetition_ids:
repetition_spec.repetition_ids.ids.append(rep_id)
qubit_map = v2.program_pb2.QubitMapping()
q_p1 = qubit_map.entries.add()
q_p1.key.id = '1_1'
q_p1.value.id = '1_2'
measurement_key_map = v2.program_pb2.MeasurementKeyMapping()
meas_p1 = measurement_key_map.entries.add()
meas_p1.key.string_key = 'm'
meas_p1.value.string_key = 'results'
arg_map = v2.program_pb2.ArgMapping()
arg_p1 = arg_map.entries.add()
arg_p1.key.arg_value.string_value = 'k'
arg_p1.value.arg_value.float_value = 1.0
expected = v2.program_pb2.CircuitOperation(
circuit_constant_index=1,
repetition_specification=repetition_spec,
qubit_map=qubit_map,
measurement_key_map=measurement_key_map,
arg_map=arg_map,
)
actual = serializer.to_proto(to_serialize, constants=constants, raw_constants=raw_constants)
assert actual == expected
| 32.669776 | 100 | 0.640911 |
from typing import Dict, List
import copy
import numpy as np
import pytest
import sympy
from google.protobuf import json_format
import cirq
import cirq_google as cg
from cirq_google.api import v2
DEFAULT_TOKEN = 'test_tag'
def op_proto(json: Dict) -> v2.program_pb2.Operation:
op = v2.program_pb2.Operation()
json_format.ParseDict(json, op)
return op
class GateWithAttribute(cirq.SingleQubitGate):
def __init__(self, val):
self.val = val
class GateWithProperty(cirq.SingleQubitGate):
def __init__(self, val, not_req=None):
self._val = val
self._not_req = not_req
@property
def val(self):
return self._val
class GateWithMethod(cirq.SingleQubitGate):
def __init__(self, val):
self._val = val
def get_val(self):
return self._val
class SubclassGate(GateWithAttribute):
pass
def get_val(op):
return op.gate.get_val()
TEST_CASES = (
(float, 1.0, {'arg_value': {'float_value': 1.0}}),
(str, 'abc', {'arg_value': {'string_value': 'abc'}}),
(float, 1, {'arg_value': {'float_value': 1.0}}),
(List[bool], [True, False], {'arg_value': {'bool_values': {'values': [True, False]}}}),
(List[bool], (True, False), {'arg_value': {'bool_values': {'values': [True, False]}}}),
(
List[bool],
np.array([True, False], dtype=bool),
{'arg_value': {'bool_values': {'values': [True, False]}}},
),
(sympy.Symbol, sympy.Symbol('x'), {'symbol': 'x'}),
(float, sympy.Symbol('x'), {'symbol': 'x'}),
(
float,
sympy.Symbol('x') - sympy.Symbol('y'),
{
'func': {
'type': 'add',
'args': [
{'symbol': 'x'},
{
'func': {
'type': 'mul',
'args': [{'arg_value': {'float_value': -1.0}}, {'symbol': 'y'}],
}
},
],
}
},
),
)
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_attribute(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithAttribute(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_property(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithProperty(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
@pytest.mark.parametrize(('val_type', 'val', 'arg_value'), TEST_CASES)
def test_to_proto_callable(val_type, val, arg_value):
serializer = cg.GateOpSerializer(
gate_type=GateWithMethod,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter=get_val)
],
)
q = cirq.GridQubit(1, 2)
result = serializer.to_proto(GateWithMethod(val)(q), arg_function_language='linear')
expected = op_proto(
{'gate': {'id': 'my_gate'}, 'args': {'my_val': arg_value}, 'qubits': [{'id': '1_2'}]}
)
assert result == expected
def test_to_proto_gate_predicate():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithAttribute(0)(q)) is None
assert serializer.to_proto(GateWithAttribute(1)(q)) is not None
assert not serializer.can_serialize_operation(GateWithAttribute(0)(q))
assert serializer.can_serialize_operation(GateWithAttribute(1)(q))
def test_to_proto_gate_mismatch():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='GateWithAttribute.*GateWithProperty'):
serializer.to_proto(GateWithAttribute(1.0)(q))
def test_to_proto_unsupported_type():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=bytes, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='bytes'):
serializer.to_proto(GateWithProperty(b's')(q))
def test_to_proto_named_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.NamedQubit('a')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': 'a'}],
}
)
assert result == expected
def test_to_proto_line_qubit_supported():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.LineQubit('10')
arg_value = 1.0
result = serializer.to_proto(GateWithProperty(arg_value)(q))
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': arg_value}}},
'qubits': [{'id': '10'}],
}
)
assert result == expected
def test_to_proto_required_but_not_present():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, op_getter=lambda x: None
)
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='required'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_no_getattr():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='nope')],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match='does not have'):
serializer.to_proto(GateWithProperty(1.0)(q))
def test_to_proto_not_required_ok():
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val'),
cg.SerializingArg(
serialized_name='not_req',
serialized_type=float,
op_getter='not_req',
required=False,
),
],
)
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
q = cirq.GridQubit(1, 2)
assert serializer.to_proto(GateWithProperty(0.125)(q)) == expected
@pytest.mark.parametrize(
('val_type', 'val'),
(
(float, 's'),
(str, 1.0),
(sympy.Symbol, 1.0),
(List[bool], [1.0]),
(List[bool], 'a'),
(List[bool], (1.0,)),
),
)
def test_to_proto_type_mismatch(val_type, val):
serializer = cg.GateOpSerializer(
gate_type=GateWithProperty,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(serialized_name='my_val', serialized_type=val_type, op_getter='val')
],
)
q = cirq.GridQubit(1, 2)
with pytest.raises(ValueError, match=str(type(val))):
serializer.to_proto(GateWithProperty(val)(q))
def test_can_serialize_operation_subclass():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
can_serialize_predicate=lambda x: x.gate.val == 1,
)
q = cirq.GridQubit(1, 1)
assert serializer.can_serialize_operation(SubclassGate(1)(q))
assert not serializer.can_serialize_operation(SubclassGate(0)(q))
def test_defaults_not_serialized():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[
cg.SerializingArg(
serialized_name='my_val', serialized_type=float, default=1.0, op_getter='val'
)
],
)
q = cirq.GridQubit(1, 2)
no_default = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
}
)
assert no_default == serializer.to_proto(GateWithAttribute(0.125)(q))
with_default = op_proto({'gate': {'id': 'my_gate'}, 'qubits': [{'id': '1_2'}]})
assert with_default == serializer.to_proto(GateWithAttribute(1.0)(q))
def test_token_serialization():
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_value': 'my_token',
}
)
assert expected == serializer.to_proto(GateWithAttribute(0.125)(q).with_tags(tag))
ONE_CONSTANT = [v2.program_pb2.Constant(string_value='my_token')]
TWO_CONSTANTS = [
v2.program_pb2.Constant(string_value='other_token'),
v2.program_pb2.Constant(string_value='my_token'),
]
@pytest.mark.parametrize(
('constants', 'expected_index', 'expected_constants'),
(
([], 0, ONE_CONSTANT),
(ONE_CONSTANT, 0, ONE_CONSTANT),
(TWO_CONSTANTS, 1, TWO_CONSTANTS),
),
)
def test_token_serialization_with_constant_reference(constants, expected_index, expected_constants):
serializer = cg.GateOpSerializer(
gate_type=GateWithAttribute,
serialized_gate_id='my_gate',
args=[cg.SerializingArg(serialized_name='my_val', serialized_type=float, op_getter='val')],
)
constants = copy.copy(constants)
q = cirq.GridQubit(1, 2)
tag = cg.CalibrationTag('my_token')
expected = op_proto(
{
'gate': {'id': 'my_gate'},
'args': {'my_val': {'arg_value': {'float_value': 0.125}}},
'qubits': [{'id': '1_2'}],
'token_constant_index': expected_index,
}
)
assert expected == serializer.to_proto(
GateWithAttribute(0.125)(q).with_tags(tag), constants=constants
)
assert constants == expected_constants
def default_circuit_proto():
op1 = v2.program_pb2.Operation()
op1.gate.id = 'x_pow'
op1.args['half_turns'].arg_value.string_value = 'k'
op1.qubits.add().id = '1_1'
op2 = v2.program_pb2.Operation()
op2.gate.id = 'x_pow'
op2.args['half_turns'].arg_value.float_value = 1.0
op2.qubits.add().id = '1_2'
op2.token_constant_index = 0
return v2.program_pb2.Circuit(
scheduling_strategy=v2.program_pb2.Circuit.MOMENT_BY_MOMENT,
moments=[
v2.program_pb2.Moment(
operations=[op1, op2],
),
],
)
def default_circuit():
return cirq.FrozenCircuit(
cirq.X(cirq.GridQubit(1, 1)) ** sympy.Symbol('k'),
cirq.X(cirq.GridQubit(1, 2)).with_tags(DEFAULT_TOKEN),
cirq.measure(cirq.GridQubit(1, 1), key='m'),
)
def test_circuit_op_serializer_properties():
serializer = cg.CircuitOpSerializer()
assert serializer.internal_type == cirq.FrozenCircuit
assert serializer.serialized_id == 'circuit'
def test_can_serialize_circuit_op():
serializer = cg.CircuitOpSerializer()
assert serializer.can_serialize_operation(cirq.CircuitOperation(default_circuit()))
assert not serializer.can_serialize_operation(cirq.X(cirq.GridQubit(1, 1)))
def test_circuit_op_to_proto_errors():
serializer = cg.CircuitOpSerializer()
to_serialize = cirq.CircuitOperation(default_circuit())
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, constants=constants)
with pytest.raises(ValueError, match='CircuitOp serialization requires a constants list'):
serializer.to_proto(to_serialize, raw_constants=raw_constants)
with pytest.raises(ValueError, match='Serializer expected CircuitOperation'):
serializer.to_proto(
v2.program_pb2.Operation(), constants=constants, raw_constants=raw_constants
)
bad_raw_constants = {cirq.FrozenCircuit(): 0}
with pytest.raises(ValueError, match='Encountered a circuit not in the constants table'):
serializer.to_proto(to_serialize, constants=constants, raw_constants=bad_raw_constants)
with pytest.raises(ValueError, match='Cannot serialize repetitions of type'):
serializer.to_proto(
to_serialize ** sympy.Symbol('a'), constants=constants, raw_constants=raw_constants
)
@pytest.mark.parametrize('repetitions', [1, 5, ['a', 'b', 'c']])
def test_circuit_op_to_proto(repetitions):
serializer = cg.CircuitOpSerializer()
if isinstance(repetitions, int):
repetition_ids = None
else:
repetition_ids = repetitions
repetitions = len(repetition_ids)
to_serialize = cirq.CircuitOperation(
circuit=default_circuit(),
qubit_map={cirq.GridQubit(1, 1): cirq.GridQubit(1, 2)},
measurement_key_map={'m': 'results'},
param_resolver={'k': 1.0},
repetitions=repetitions,
repetition_ids=repetition_ids,
)
constants = [
v2.program_pb2.Constant(string_value=DEFAULT_TOKEN),
v2.program_pb2.Constant(circuit_value=default_circuit_proto()),
]
raw_constants = {
DEFAULT_TOKEN: 0,
default_circuit(): 1,
}
repetition_spec = v2.program_pb2.RepetitionSpecification()
if repetition_ids is None:
repetition_spec.repetition_count = repetitions
else:
for rep_id in repetition_ids:
repetition_spec.repetition_ids.ids.append(rep_id)
qubit_map = v2.program_pb2.QubitMapping()
q_p1 = qubit_map.entries.add()
q_p1.key.id = '1_1'
q_p1.value.id = '1_2'
measurement_key_map = v2.program_pb2.MeasurementKeyMapping()
meas_p1 = measurement_key_map.entries.add()
meas_p1.key.string_key = 'm'
meas_p1.value.string_key = 'results'
arg_map = v2.program_pb2.ArgMapping()
arg_p1 = arg_map.entries.add()
arg_p1.key.arg_value.string_value = 'k'
arg_p1.value.arg_value.float_value = 1.0
expected = v2.program_pb2.CircuitOperation(
circuit_constant_index=1,
repetition_specification=repetition_spec,
qubit_map=qubit_map,
measurement_key_map=measurement_key_map,
arg_map=arg_map,
)
actual = serializer.to_proto(to_serialize, constants=constants, raw_constants=raw_constants)
assert actual == expected
| true | true |
f72bd06d2f9f175effe7beb9a4509946d29b33e0 | 158 | py | Python | build.py | pengguanjun/zeno_learn | bd5298f14180f1b2ce1edb83305bbc2ce4c7d0c8 | [
"MIT"
] | 4 | 2021-08-03T16:26:52.000Z | 2022-03-30T10:32:23.000Z | build.py | pengguanjun/zeno_learn | bd5298f14180f1b2ce1edb83305bbc2ce4c7d0c8 | [
"MIT"
] | 1 | 2021-11-09T10:54:24.000Z | 2021-11-09T10:54:24.000Z | build.py | pengguanjun/zeno_learn | bd5298f14180f1b2ce1edb83305bbc2ce4c7d0c8 | [
"MIT"
] | 3 | 2021-11-09T10:48:16.000Z | 2021-11-09T15:18:02.000Z | #!/usr/bin/env python3
import subprocess
subprocess.check_call(['cmake', '-B', 'build'])
subprocess.check_call(['cmake', '--build', 'build', '--parallel'])
| 22.571429 | 66 | 0.670886 |
import subprocess
subprocess.check_call(['cmake', '-B', 'build'])
subprocess.check_call(['cmake', '--build', 'build', '--parallel'])
| true | true |
f72bd075c6cfb39433750fb4807078e2c2bb2e91 | 4,623 | py | Python | SuperB/api/serializers.py | rialrustamov/SuperB-E-Commerce-RR | 099fcfb50bd1623237fd352a87d19926dda52904 | [
"MIT"
] | null | null | null | SuperB/api/serializers.py | rialrustamov/SuperB-E-Commerce-RR | 099fcfb50bd1623237fd352a87d19926dda52904 | [
"MIT"
] | null | null | null | SuperB/api/serializers.py | rialrustamov/SuperB-E-Commerce-RR | 099fcfb50bd1623237fd352a87d19926dda52904 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from django.db.models import fields
from product.models import Product, ProductVersion, Category, Review, Image
from order.models import ShoppingCart, Wishlist, CartItem
from blog.models import Category as BlogCategory, Blog
from django.contrib.auth import get_user_model
from user.models import *
from core.models import *
User = get_user_model()
class ProductOverviewSerializer(serializers.ModelSerializer):
review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = '__all__'
def get_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ProductSerializer(serializers.ModelSerializer):
total_quantity = serializers.SerializerMethodField()
main_product = serializers.SerializerMethodField()
versions = serializers.SerializerMethodField()
main_image = serializers.SerializerMethodField()
product_review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = ['title', 'price', 'discount_price', 'category', 'brand', 'description',
'total_quantity', 'main_product', 'main_image', 'versions', 'product_tag', 'product_review']
def get_total_quantity(self, obj):
return obj.total_quantity
def get_main_product(self, obj):
return ProductVersionSerializer(obj.main_product).data
def get_versions(self, obj):
qs = obj.versions.exclude(id=obj.main_product.id)
return ProductVersionSerializer(qs, many=True).data
def get_main_image(self, obj):
if obj.main_product.main_photo.image:
return obj.main_product.main_photo.image.url
return None
def get_product_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
fields = "__all__"
class ProductVersionSerializer(serializers.ModelSerializer):
product = ProductOverviewSerializer()
image = serializers.SerializerMethodField()
class Meta:
model = ProductVersion
fields = '__all__'
def get_image(self, obj):
qs = obj.product_photo.all()
return ImageSerializer(qs, many=True).data
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = "__all__"
class UserSerializer(serializers.ModelSerializer):
password_confirmation = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
password = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
def validate(self, attrs):
password = attrs['password']
password_confirmation = attrs.pop('password_confirmation')
if password != password_confirmation:
raise serializers.ValidationError(
{'password': 'Passwords must match.'})
return super().validate(attrs)
def create(self, validated_data):
password = validated_data.pop('password')
validated_data['username'] = validated_data['email']
user = super().create(validated_data=validated_data)
user.set_password(password)
user.save()
return user
class Meta:
model = User
fields = ('email', 'password', 'password_confirmation',
'first_name', 'last_name')
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
class BlogCategorySerializer(serializers.ModelSerializer):
class Meta:
model = BlogCategory
fields = '__all__'
class BlogSerializer(serializers.ModelSerializer):
class Meta:
model = Blog
fields = '__all__'
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
class CardSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = ShoppingCart
fields = '__all__'
class CardItemSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer()
class Meta:
model = CartItem
fields = '__all__'
class WishlistSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = Wishlist
fields = '__all__'
class SubscriberSerializer(serializers.ModelSerializer):
class Meta:
model = Subscriber
fields = '__all__' | 28.361963 | 110 | 0.689812 | from rest_framework import serializers
from django.db.models import fields
from product.models import Product, ProductVersion, Category, Review, Image
from order.models import ShoppingCart, Wishlist, CartItem
from blog.models import Category as BlogCategory, Blog
from django.contrib.auth import get_user_model
from user.models import *
from core.models import *
User = get_user_model()
class ProductOverviewSerializer(serializers.ModelSerializer):
review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = '__all__'
def get_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ProductSerializer(serializers.ModelSerializer):
total_quantity = serializers.SerializerMethodField()
main_product = serializers.SerializerMethodField()
versions = serializers.SerializerMethodField()
main_image = serializers.SerializerMethodField()
product_review = serializers.SerializerMethodField()
class Meta:
model = Product
fields = ['title', 'price', 'discount_price', 'category', 'brand', 'description',
'total_quantity', 'main_product', 'main_image', 'versions', 'product_tag', 'product_review']
def get_total_quantity(self, obj):
return obj.total_quantity
def get_main_product(self, obj):
return ProductVersionSerializer(obj.main_product).data
def get_versions(self, obj):
qs = obj.versions.exclude(id=obj.main_product.id)
return ProductVersionSerializer(qs, many=True).data
def get_main_image(self, obj):
if obj.main_product.main_photo.image:
return obj.main_product.main_photo.image.url
return None
def get_product_review(self, obj):
qs = obj.product_review.all()
return ReviewSerializer(qs, many=True).data
class ImageSerializer(serializers.ModelSerializer):
class Meta:
model = Image
fields = "__all__"
class ProductVersionSerializer(serializers.ModelSerializer):
product = ProductOverviewSerializer()
image = serializers.SerializerMethodField()
class Meta:
model = ProductVersion
fields = '__all__'
def get_image(self, obj):
qs = obj.product_photo.all()
return ImageSerializer(qs, many=True).data
class ReviewSerializer(serializers.ModelSerializer):
class Meta:
model = Review
fields = "__all__"
class UserSerializer(serializers.ModelSerializer):
password_confirmation = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
password = serializers.CharField(
style={'input_type': 'password'}, write_only=True)
def validate(self, attrs):
password = attrs['password']
password_confirmation = attrs.pop('password_confirmation')
if password != password_confirmation:
raise serializers.ValidationError(
{'password': 'Passwords must match.'})
return super().validate(attrs)
def create(self, validated_data):
password = validated_data.pop('password')
validated_data['username'] = validated_data['email']
user = super().create(validated_data=validated_data)
user.set_password(password)
user.save()
return user
class Meta:
model = User
fields = ('email', 'password', 'password_confirmation',
'first_name', 'last_name')
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
class BlogCategorySerializer(serializers.ModelSerializer):
class Meta:
model = BlogCategory
fields = '__all__'
class BlogSerializer(serializers.ModelSerializer):
class Meta:
model = Blog
fields = '__all__'
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
class CardSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = ShoppingCart
fields = '__all__'
class CardItemSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer()
class Meta:
model = CartItem
fields = '__all__'
class WishlistSerializer(serializers.ModelSerializer):
product = ProductVersionSerializer(many=True)
class Meta:
model = Wishlist
fields = '__all__'
class SubscriberSerializer(serializers.ModelSerializer):
class Meta:
model = Subscriber
fields = '__all__' | true | true |
f72bd109a480d8206801746e5910512f5d37e064 | 444 | py | Python | tests/test_cpplibhub.py | iotanbo/cpplibhub | 0758d416c8d2c0b29d70300f25ccc898a7ad64df | [
"MIT"
] | null | null | null | tests/test_cpplibhub.py | iotanbo/cpplibhub | 0758d416c8d2c0b29d70300f25ccc898a7ad64df | [
"MIT"
] | null | null | null | tests/test_cpplibhub.py | iotanbo/cpplibhub | 0758d416c8d2c0b29d70300f25ccc898a7ad64df | [
"MIT"
] | null | null | null |
import pytest
from click.testing import CliRunner
from cpplibhub.cli import main
@pytest.fixture(scope="module")
def runner():
return CliRunner()
def test_main(runner):
# assert main([]) == 0 # run without click
result = runner.invoke(main)
# result = runner.invoke(main, ['--name', 'Amy'])
assert result.exit_code == 0
# assert result.output == 'Hello Amy!\n'
# TODO: test more command line options and args
| 22.2 | 53 | 0.671171 |
import pytest
from click.testing import CliRunner
from cpplibhub.cli import main
@pytest.fixture(scope="module")
def runner():
return CliRunner()
def test_main(runner):
r.invoke(main)
assert result.exit_code == 0
| true | true |
f72bd1370d655bb4ca4fe412e26c161f507ca79b | 7,667 | py | Python | lib/dblatex-0.3.2/lib/dbtexmf/dblatex/grubber/plugins.py | jonathanmorley/HR-XSL | 799b1075cbec4cda3d686d588eea92a62d59963f | [
"Apache-2.0"
] | 1 | 2017-12-29T23:23:14.000Z | 2017-12-29T23:23:14.000Z | lib/dblatex-0.3.2/lib/dbtexmf/dblatex/grubber/plugins.py | jonathanmorley/HR-XSL | 799b1075cbec4cda3d686d588eea92a62d59963f | [
"Apache-2.0"
] | null | null | null | lib/dblatex-0.3.2/lib/dbtexmf/dblatex/grubber/plugins.py | jonathanmorley/HR-XSL | 799b1075cbec4cda3d686d588eea92a62d59963f | [
"Apache-2.0"
] | null | null | null | # This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2002--2006
"""
Mechanisms to dynamically load extra modules to help the LaTeX compilation.
All the modules must be derived from the TexModule class.
"""
import imp
from os.path import *
from msg import _, msg
import sys
class TexModule (object):
"""
This is the base class for modules. Each module should define a class
named 'Module' that derives from this one. The default implementation
provides all required methods with no effects.
"""
def __init__ (self, env, dict):
"""
The constructor receives two arguments: 'env' is the compiling
environment, 'dict' is a dictionary that describes the command that
caused the module to load.
"""
def pre_compile (self):
"""
This method is called before the first LaTeX compilation. It is
supposed to build any file that LaTeX would require to compile the
document correctly. The method must return true on failure.
"""
return 0
def post_compile (self):
"""
This method is called after each LaTeX compilation. It is supposed to
process the compilation results and possibly request a new
compilation. The method must return true on failure.
"""
return 0
def last_compile (self):
"""
This method is called after the last LaTeX compilation.
It is supposed to terminate the compilation for its specific needs.
The method must return true on failure.
"""
return 0
def clean (self):
"""
This method is called when cleaning the compiled files. It is supposed
to remove all the files that this modules generates.
"""
def command (self, cmd, args):
"""
This is called when a directive for the module is found in the source.
The method can raise 'AttributeError' when the directive does not
exist and 'TypeError' if the syntax is wrong. By default, when called
with argument "foo" it calls the method "do_foo" if it exists, and
fails otherwise.
"""
getattr(self, "do_" + cmd)(*args)
def get_errors (self):
"""
This is called if something has failed during an operation performed
by this module. The method returns a generator with items of the same
form as in LaTeXDep.get_errors.
"""
if None:
yield None
class Plugins (object):
"""
This class gathers operations related to the management of external Python
modules. Modules are requested through the `register' method, and
they are searched for first in the current directory, then in the
(possibly) specified Python package (using Python's path).
"""
def __init__ (self, path=None):
"""
Initialize the module set, possibly setting a path name in which
modules will be searched for.
"""
self.modules = {}
if not path:
self.path = [dirname(__file__)]
sys.path.append(self.path[0])
else:
self.path = path
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.modules[name]
def register (self, name):
"""
Attempt to register a module with the specified name. If an
appropriate module is found, load it and store it in the object's
dictionary. Return 0 if no module was found, 1 if a module was found
and loaded, and 2 if the module was found but already loaded.
"""
if self.modules.has_key(name):
return 2
try:
file, path, descr = imp.find_module(name, [""])
except ImportError:
if not self.path:
return 0
try:
file, path, descr = imp.find_module(name, self.path)
except ImportError:
return 0
module = imp.load_module(name, file, path, descr)
file.close()
self.modules[name] = module
return 1
def clear(self):
"""
Empty the module table, unregistering every module registered. No
modules are unloaded, however, but this has no other effect than
speeding the registration if the modules are loaded again.
"""
self.modules.clear()
class Modules (Plugins):
"""
This class gathers all operations related to the management of modules.
The modules are searched for first in the current directory, then as
scripts in the 'modules' directory in the program's data directort, then
as a Python module in the package `rubber.latex'.
"""
def __init__ (self, env):
#Plugins.__init__(self, rubber.rules.latex.__path__)
Plugins.__init__(self)
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.objects[name]
def has_key (self, name):
"""
Check if a given module is loaded.
"""
return self.objects.has_key(name)
def register (self, name, dict={}):
"""
Attempt to register a package with the specified name. If a module is
found, create an object from the module's class called `Module',
passing it the environment and `dict' as arguments, and execute all
delayed commands for this module. The dictionary describes the
command that caused the registration.
"""
if self.has_key(name):
msg.debug(_("module %s already registered") % name)
return 2
# First look for a script
moddir = ""
mod = None
for path in "", join(moddir, "modules"):
file = join(path, name + ".rub")
if exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name)
break
# Then look for a Python module
if not mod:
if Plugins.register(self, name) == 0:
msg.debug(_("no support found for %s") % name)
return 0
mod = self.modules[name].Module(self.env, dict)
msg.log(_("built-in module %s registered") % name)
# Run any delayed commands.
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
mod.command(cmd, args)
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def clear (self):
"""
Unregister all modules.
"""
Plugins.clear(self)
self.objects = {}
self.commands = {}
def command (self, mod, cmd, args):
"""
Send a command to a particular module. If this module is not loaded,
store the command so that it will be sent when the module is register.
"""
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars.copy()))
| 33.480349 | 78 | 0.589148 |
import imp
from os.path import *
from msg import _, msg
import sys
class TexModule (object):
def __init__ (self, env, dict):
def pre_compile (self):
return 0
def post_compile (self):
return 0
def last_compile (self):
return 0
def clean (self):
def command (self, cmd, args):
getattr(self, "do_" + cmd)(*args)
def get_errors (self):
if None:
yield None
class Plugins (object):
def __init__ (self, path=None):
self.modules = {}
if not path:
self.path = [dirname(__file__)]
sys.path.append(self.path[0])
else:
self.path = path
def __getitem__ (self, name):
return self.modules[name]
def register (self, name):
if self.modules.has_key(name):
return 2
try:
file, path, descr = imp.find_module(name, [""])
except ImportError:
if not self.path:
return 0
try:
file, path, descr = imp.find_module(name, self.path)
except ImportError:
return 0
module = imp.load_module(name, file, path, descr)
file.close()
self.modules[name] = module
return 1
def clear(self):
self.modules.clear()
class Modules (Plugins):
def __init__ (self, env):
Plugins.__init__(self)
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
return self.objects[name]
def has_key (self, name):
return self.objects.has_key(name)
def register (self, name, dict={}):
if self.has_key(name):
msg.debug(_("module %s already registered") % name)
return 2
moddir = ""
mod = None
for path in "", join(moddir, "modules"):
file = join(path, name + ".rub")
if exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name)
break
if not mod:
if Plugins.register(self, name) == 0:
msg.debug(_("no support found for %s") % name)
return 0
mod = self.modules[name].Module(self.env, dict)
msg.log(_("built-in module %s registered") % name)
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
mod.command(cmd, args)
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def clear (self):
Plugins.clear(self)
self.objects = {}
self.commands = {}
def command (self, mod, cmd, args):
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars.copy()))
| true | true |
f72bd2060174e51a551060a884d7fdfeb1276fa6 | 15,646 | py | Python | pandaharvester/harvesterbody/master.py | nikmagini/harvester | 1d62dd0e35b53a51919b0250fffec478778f460a | [
"Apache-2.0"
] | 11 | 2017-06-01T10:16:58.000Z | 2019-11-22T08:41:36.000Z | pandaharvester/harvesterbody/master.py | nikmagini/harvester | 1d62dd0e35b53a51919b0250fffec478778f460a | [
"Apache-2.0"
] | 34 | 2016-10-25T19:15:24.000Z | 2021-03-05T12:59:04.000Z | pandaharvester/harvesterbody/master.py | nikmagini/harvester | 1d62dd0e35b53a51919b0250fffec478778f460a | [
"Apache-2.0"
] | 17 | 2016-10-24T13:29:45.000Z | 2021-03-23T17:35:27.000Z | import os
import pwd
import grp
import sys
import socket
import signal
import logging
import daemon.pidfile
import argparse
import threading
import cProfile
import atexit
from future.utils import iteritems
try:
import pprofile
except Exception:
pass
from pandalogger import logger_config
from pandaharvester import commit_timestamp
from pandaharvester import panda_pkg_info
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestermisc.apfmon import Apfmon
# logger
_logger = core_utils.setup_logger('master')
# for singleton
master_instance = False
master_lock = threading.Lock()
# the master class which runs the main process
class Master(object):
# constructor
def __init__(self, single_mode=False, stop_event=None, daemon_mode=True):
# initialize database and config
self.singleMode = single_mode
self.stopEvent = stop_event
self.daemonMode = daemon_mode
from pandaharvester.harvestercore.communicator_pool import CommunicatorPool
self.communicatorPool = CommunicatorPool()
from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
self.queueConfigMapper = QueueConfigMapper()
from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy
dbProxy = DBProxy()
dbProxy.make_tables(self.queueConfigMapper)
# main loop
def start(self):
# thread list
thrList = []
# Credential Manager
from pandaharvester.harvesterbody.cred_manager import CredManager
thr = CredManager(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute()
thr.start()
thrList.append(thr)
# Command manager
from pandaharvester.harvesterbody.command_manager import CommandManager
thr = CommandManager(self.communicatorPool, self.queueConfigMapper, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Cacher
from pandaharvester.harvesterbody.cacher import Cacher
thr = Cacher(self.communicatorPool, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute(force_update=True, skip_lock=True)
thr.start()
thrList.append(thr)
# Watcher
from pandaharvester.harvesterbody.watcher import Watcher
thr = Watcher(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Job Fetcher
from pandaharvester.harvesterbody.job_fetcher import JobFetcher
nThr = harvester_config.jobfetcher.nThreads
for iThr in range(nThr):
thr = JobFetcher(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Propagator
from pandaharvester.harvesterbody.propagator import Propagator
nThr = harvester_config.propagator.nThreads
for iThr in range(nThr):
thr = Propagator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Monitor
from pandaharvester.harvesterbody.monitor import Monitor
nThr = harvester_config.monitor.nThreads
for iThr in range(nThr):
thr = Monitor(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Preparator
from pandaharvester.harvesterbody.preparator import Preparator
nThr = harvester_config.preparator.nThreads
for iThr in range(nThr):
thr = Preparator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Submitter
from pandaharvester.harvesterbody.submitter import Submitter
nThr = harvester_config.submitter.nThreads
for iThr in range(nThr):
thr = Submitter(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Stager
from pandaharvester.harvesterbody.stager import Stager
nThr = harvester_config.stager.nThreads
for iThr in range(nThr):
thr = Stager(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# EventFeeder
from pandaharvester.harvesterbody.event_feeder import EventFeeder
nThr = harvester_config.eventfeeder.nThreads
for iThr in range(nThr):
thr = EventFeeder(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Sweeper
from pandaharvester.harvesterbody.sweeper import Sweeper
nThr = harvester_config.sweeper.nThreads
for iThr in range(nThr):
thr = Sweeper(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Service monitor
try:
sm_active = harvester_config.service_monitor.active
except:
sm_active = False
if sm_active:
from pandaharvester.harvesterbody.service_monitor import ServiceMonitor
thr = ServiceMonitor(options.pid, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
# Report itself to APF Mon
apf_mon = Apfmon(self.queueConfigMapper)
apf_mon.create_factory()
apf_mon.create_labels()
##################
# loop on stop event to be interruptable since thr.join blocks signal capture in python 2.7
while True:
if self.singleMode or not self.daemonMode:
break
self.stopEvent.wait(1)
if self.stopEvent.is_set():
break
##################
# join
if self.daemonMode:
for thr in thrList:
thr.join()
# dummy context
class DummyContext(object):
def __enter__(self):
return self
def __exit__(self, *x):
pass
# wrapper for stderr
class StdErrWrapper(object):
def write(self, message):
# set a header and footer to the message to make it easier to parse
wrapped_message = '#####START#####\n{0}#####END#####\n'.format(message)
_logger.error(wrapped_message)
def flush(self):
_logger.handlers[0].flush()
def fileno(self):
return _logger.handlers[0].stream.fileno()
def isatty(self):
return _logger.handlers[0].stream.isatty()
# profiler
prof = None
# options
options = None
# main
def main(daemon_mode=True):
global prof
global options
# parse option
parser = argparse.ArgumentParser()
parser.add_argument('--pid', action='store', dest='pid', default=None,
help='pid filename')
parser.add_argument('--single', action='store_true', dest='singleMode', default=False,
help='use single mode')
parser.add_argument('--hostname_file', action='store', dest='hostNameFile', default=None,
help='to record the hostname where harvester is launched')
parser.add_argument('--rotate_log', action='store_true', dest='rotateLog', default=False,
help='rollover log files before launching harvester')
parser.add_argument('--version', action='store_true', dest='showVersion', default=False,
help='show version information and exit')
parser.add_argument('--profile_output', action='store', dest='profileOutput', default=None,
help='filename to save the results of profiler')
parser.add_argument('--profile_mode', action='store', dest='profileMode', default='s',
help='profile mode. s (statistic), d (deterministic), or t (thread-aware)')
parser.add_argument('--memory_logging', action='store_true', dest='memLogging', default=False,
help='add information of memory usage in each logging message')
parser.add_argument('--foreground', action='store_true', dest='foreground', default=False,
help='run in the foreground not to be daemonized')
options = parser.parse_args()
# show version information
if options.showVersion:
print ("Version : {0}".format(panda_pkg_info.release_version))
print ("Last commit : {0}".format(commit_timestamp.timestamp))
return
# check pid
if options.pid is not None and os.path.exists(options.pid):
print ("ERROR: Cannot start since lock file {0} already exists".format(options.pid))
return
# uid and gid
uid = pwd.getpwnam(harvester_config.master.uname).pw_uid
gid = grp.getgrnam(harvester_config.master.gname).gr_gid
# get umask
umask = os.umask(0)
os.umask(umask)
# memory logging
if options.memLogging:
core_utils.enable_memory_profiling()
# hostname
if options.hostNameFile is not None:
with open(options.hostNameFile, 'w') as f:
f.write(socket.getfqdn())
# rollover log files
if options.rotateLog:
core_utils.do_log_rollover()
if hasattr(_logger.handlers[0], 'doRollover'):
_logger.handlers[0].doRollover()
if daemon_mode and not options.foreground:
# redirect messages to stdout
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setFormatter(_logger.handlers[0].formatter)
_logger.addHandler(stdoutHandler)
# collect streams not to be closed by daemon
files_preserve = []
for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict):
if loggerName.startswith('panda'):
for handler in loggerObj.handlers:
if hasattr(handler, 'stream'):
files_preserve.append(handler.stream)
sys.stderr = StdErrWrapper()
# make daemon context
dc = daemon.DaemonContext(stdout=sys.stdout,
stderr=sys.stderr,
uid=uid,
gid=gid,
umask=umask,
files_preserve=files_preserve,
pidfile=daemon.pidfile.PIDLockFile(options.pid))
else:
dc = DummyContext()
with dc:
# remove pidfile to prevent child processes crashing in atexit
if not options.singleMode:
dc.pidfile = None
if options.pid:
core_utils.set_file_permission(options.pid)
core_utils.set_file_permission(logger_config.daemon['logdir'])
_logger.info("start : version = {0}, last_commit = {1}".format(panda_pkg_info.release_version,
commit_timestamp.timestamp))
# stop event
stopEvent = threading.Event()
# profiler
prof = None
if options.profileOutput is not None:
# run with profiler
if options.profileMode == 'd':
# deterministic
prof = pprofile.Profile()
elif options.profileMode == 't':
# thread-aware
prof = pprofile.ThreadProfile()
else:
# statistic
prof = cProfile.Profile()
# post process for profiler
def disable_profiler():
global prof
if prof is not None:
# disable profiler
prof.disable()
# dump results
prof.dump_stats(options.profileOutput)
prof = None
# delete PID
def delete_pid(pid):
try:
os.remove(pid)
except Exception:
pass
# signal handlers
def catch_sigkill(sig, frame):
disable_profiler()
_logger.info('got signal={0} to be killed'.format(sig))
try:
os.remove(options.pid)
except Exception:
pass
try:
if os.getppid() == 1:
os.killpg(os.getpgrp(), signal.SIGKILL)
else:
os.kill(os.getpid(), signal.SIGKILL)
except Exception:
core_utils.dump_error_message(_logger)
_logger.error('failed to be killed')
'''
def catch_sigterm(sig, frame):
_logger.info('got signal={0} to be terminated'.format(sig))
stopEvent.set()
# register del function
if os.getppid() == 1 and options.pid:
atexit.register(delete_pid, options.pid)
# set alarm just in case
signal.alarm(30)
'''
def catch_debug(sig, frame):
_logger.info('got signal={0} to go into debugger mode'.format(sig))
from trepan.interfaces import server
from trepan.api import debug
try:
portNum = harvester_config.master.debugger_port
except Exception:
portNum = 19550
connection_opts = {'IO': 'TCP', 'PORT': portNum}
interface = server.ServerInterface(connection_opts=connection_opts)
dbg_opts = {'interface': interface}
_logger.info('starting debugger on port {0}'.format(portNum))
debug(dbg_opts=dbg_opts)
# set handler
if daemon_mode:
signal.signal(signal.SIGINT, catch_sigkill)
signal.signal(signal.SIGHUP, catch_sigkill)
signal.signal(signal.SIGTERM, catch_sigkill)
signal.signal(signal.SIGALRM, catch_sigkill)
signal.signal(signal.SIGUSR1, catch_debug)
signal.signal(signal.SIGUSR2, catch_sigkill)
# start master
master = Master(single_mode=options.singleMode, stop_event=stopEvent, daemon_mode=daemon_mode)
if master is None:
prof = None
else:
# enable profiler
if prof is not None:
prof.enable()
# run master
master.start()
# disable profiler
disable_profiler()
if daemon_mode:
_logger.info('terminated')
if __name__ == "__main__":
main()
else:
# started by WSGI
with master_lock:
if not master_instance:
main(daemon_mode=False)
master_instance = True
# import application entry for WSGI
from pandaharvester.harvestermessenger.apache_messenger import application
| 37.430622 | 104 | 0.604244 | import os
import pwd
import grp
import sys
import socket
import signal
import logging
import daemon.pidfile
import argparse
import threading
import cProfile
import atexit
from future.utils import iteritems
try:
import pprofile
except Exception:
pass
from pandalogger import logger_config
from pandaharvester import commit_timestamp
from pandaharvester import panda_pkg_info
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestermisc.apfmon import Apfmon
_logger = core_utils.setup_logger('master')
master_instance = False
master_lock = threading.Lock()
class Master(object):
def __init__(self, single_mode=False, stop_event=None, daemon_mode=True):
self.singleMode = single_mode
self.stopEvent = stop_event
self.daemonMode = daemon_mode
from pandaharvester.harvestercore.communicator_pool import CommunicatorPool
self.communicatorPool = CommunicatorPool()
from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
self.queueConfigMapper = QueueConfigMapper()
from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy
dbProxy = DBProxy()
dbProxy.make_tables(self.queueConfigMapper)
def start(self):
thrList = []
from pandaharvester.harvesterbody.cred_manager import CredManager
thr = CredManager(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute()
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.command_manager import CommandManager
thr = CommandManager(self.communicatorPool, self.queueConfigMapper, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.cacher import Cacher
thr = Cacher(self.communicatorPool, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.execute(force_update=True, skip_lock=True)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.watcher import Watcher
thr = Watcher(single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.job_fetcher import JobFetcher
nThr = harvester_config.jobfetcher.nThreads
for iThr in range(nThr):
thr = JobFetcher(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.propagator import Propagator
nThr = harvester_config.propagator.nThreads
for iThr in range(nThr):
thr = Propagator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.monitor import Monitor
nThr = harvester_config.monitor.nThreads
for iThr in range(nThr):
thr = Monitor(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.preparator import Preparator
nThr = harvester_config.preparator.nThreads
for iThr in range(nThr):
thr = Preparator(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.submitter import Submitter
nThr = harvester_config.submitter.nThreads
for iThr in range(nThr):
thr = Submitter(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.stager import Stager
nThr = harvester_config.stager.nThreads
for iThr in range(nThr):
thr = Stager(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.event_feeder import EventFeeder
nThr = harvester_config.eventfeeder.nThreads
for iThr in range(nThr):
thr = EventFeeder(self.communicatorPool,
self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
from pandaharvester.harvesterbody.sweeper import Sweeper
nThr = harvester_config.sweeper.nThreads
for iThr in range(nThr):
thr = Sweeper(self.queueConfigMapper,
single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
try:
sm_active = harvester_config.service_monitor.active
except:
sm_active = False
if sm_active:
from pandaharvester.harvesterbody.service_monitor import ServiceMonitor
thr = ServiceMonitor(options.pid, single_mode=self.singleMode)
thr.set_stop_event(self.stopEvent)
thr.start()
thrList.append(thr)
apf_mon = Apfmon(self.queueConfigMapper)
apf_mon.create_factory()
apf_mon.create_labels()
if self.stopEvent.is_set():
break
return self
def __exit__(self, *x):
pass
class StdErrWrapper(object):
def write(self, message):
wrapped_message = '#####START#####\n{0}#####END#####\n'.format(message)
_logger.error(wrapped_message)
def flush(self):
_logger.handlers[0].flush()
def fileno(self):
return _logger.handlers[0].stream.fileno()
def isatty(self):
return _logger.handlers[0].stream.isatty()
prof = None
options = None
def main(daemon_mode=True):
global prof
global options
parser = argparse.ArgumentParser()
parser.add_argument('--pid', action='store', dest='pid', default=None,
help='pid filename')
parser.add_argument('--single', action='store_true', dest='singleMode', default=False,
help='use single mode')
parser.add_argument('--hostname_file', action='store', dest='hostNameFile', default=None,
help='to record the hostname where harvester is launched')
parser.add_argument('--rotate_log', action='store_true', dest='rotateLog', default=False,
help='rollover log files before launching harvester')
parser.add_argument('--version', action='store_true', dest='showVersion', default=False,
help='show version information and exit')
parser.add_argument('--profile_output', action='store', dest='profileOutput', default=None,
help='filename to save the results of profiler')
parser.add_argument('--profile_mode', action='store', dest='profileMode', default='s',
help='profile mode. s (statistic), d (deterministic), or t (thread-aware)')
parser.add_argument('--memory_logging', action='store_true', dest='memLogging', default=False,
help='add information of memory usage in each logging message')
parser.add_argument('--foreground', action='store_true', dest='foreground', default=False,
help='run in the foreground not to be daemonized')
options = parser.parse_args()
if options.showVersion:
print ("Version : {0}".format(panda_pkg_info.release_version))
print ("Last commit : {0}".format(commit_timestamp.timestamp))
return
if options.pid is not None and os.path.exists(options.pid):
print ("ERROR: Cannot start since lock file {0} already exists".format(options.pid))
return
uid = pwd.getpwnam(harvester_config.master.uname).pw_uid
gid = grp.getgrnam(harvester_config.master.gname).gr_gid
umask = os.umask(0)
os.umask(umask)
if options.memLogging:
core_utils.enable_memory_profiling()
if options.hostNameFile is not None:
with open(options.hostNameFile, 'w') as f:
f.write(socket.getfqdn())
if options.rotateLog:
core_utils.do_log_rollover()
if hasattr(_logger.handlers[0], 'doRollover'):
_logger.handlers[0].doRollover()
if daemon_mode and not options.foreground:
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setFormatter(_logger.handlers[0].formatter)
_logger.addHandler(stdoutHandler)
files_preserve = []
for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict):
if loggerName.startswith('panda'):
for handler in loggerObj.handlers:
if hasattr(handler, 'stream'):
files_preserve.append(handler.stream)
sys.stderr = StdErrWrapper()
dc = daemon.DaemonContext(stdout=sys.stdout,
stderr=sys.stderr,
uid=uid,
gid=gid,
umask=umask,
files_preserve=files_preserve,
pidfile=daemon.pidfile.PIDLockFile(options.pid))
else:
dc = DummyContext()
with dc:
if not options.singleMode:
dc.pidfile = None
if options.pid:
core_utils.set_file_permission(options.pid)
core_utils.set_file_permission(logger_config.daemon['logdir'])
_logger.info("start : version = {0}, last_commit = {1}".format(panda_pkg_info.release_version,
commit_timestamp.timestamp))
stopEvent = threading.Event()
prof = None
if options.profileOutput is not None:
if options.profileMode == 'd':
prof = pprofile.Profile()
elif options.profileMode == 't':
prof = pprofile.ThreadProfile()
else:
prof = cProfile.Profile()
def disable_profiler():
global prof
if prof is not None:
prof.disable()
prof.dump_stats(options.profileOutput)
prof = None
def delete_pid(pid):
try:
os.remove(pid)
except Exception:
pass
def catch_sigkill(sig, frame):
disable_profiler()
_logger.info('got signal={0} to be killed'.format(sig))
try:
os.remove(options.pid)
except Exception:
pass
try:
if os.getppid() == 1:
os.killpg(os.getpgrp(), signal.SIGKILL)
else:
os.kill(os.getpid(), signal.SIGKILL)
except Exception:
core_utils.dump_error_message(_logger)
_logger.error('failed to be killed')
def catch_debug(sig, frame):
_logger.info('got signal={0} to go into debugger mode'.format(sig))
from trepan.interfaces import server
from trepan.api import debug
try:
portNum = harvester_config.master.debugger_port
except Exception:
portNum = 19550
connection_opts = {'IO': 'TCP', 'PORT': portNum}
interface = server.ServerInterface(connection_opts=connection_opts)
dbg_opts = {'interface': interface}
_logger.info('starting debugger on port {0}'.format(portNum))
debug(dbg_opts=dbg_opts)
if daemon_mode:
signal.signal(signal.SIGINT, catch_sigkill)
signal.signal(signal.SIGHUP, catch_sigkill)
signal.signal(signal.SIGTERM, catch_sigkill)
signal.signal(signal.SIGALRM, catch_sigkill)
signal.signal(signal.SIGUSR1, catch_debug)
signal.signal(signal.SIGUSR2, catch_sigkill)
master = Master(single_mode=options.singleMode, stop_event=stopEvent, daemon_mode=daemon_mode)
if master is None:
prof = None
else:
if prof is not None:
prof.enable()
master.start()
disable_profiler()
if daemon_mode:
_logger.info('terminated')
if __name__ == "__main__":
main()
else:
with master_lock:
if not master_instance:
main(daemon_mode=False)
master_instance = True
from pandaharvester.harvestermessenger.apache_messenger import application
| true | true |
f72bd257412e17141e60758b6a2232418acfb73b | 895 | py | Python | sdks/python/test/test_PurgeResponse.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | null | null | null | sdks/python/test/test_PurgeResponse.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 6 | 2019-10-23T06:38:53.000Z | 2022-01-22T07:57:58.000Z | sdks/python/test/test_PurgeResponse.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 2 | 2019-10-23T06:31:05.000Z | 2021-08-21T17:32:47.000Z | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import unittest
import appcenter_sdk
from PurgeResponse.clsPurgeResponse import PurgeResponse # noqa: E501
from appcenter_sdk.rest import ApiException
class TestPurgeResponse(unittest.TestCase):
"""PurgeResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPurgeResponse(self):
"""Test PurgeResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = appcenter_sdk.models.clsPurgeResponse.PurgeResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 22.375 | 85 | 0.709497 |
from __future__ import absolute_import
import unittest
import appcenter_sdk
from PurgeResponse.clsPurgeResponse import PurgeResponse
from appcenter_sdk.rest import ApiException
class TestPurgeResponse(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testPurgeResponse(self):
s
if __name__ == '__main__':
unittest.main()
| true | true |
f72bd2eca21eed63e0f6123f34b5568c86396f4a | 176,314 | py | Python | mkt/reviewers/tests/test_views.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | mkt/reviewers/tests/test_views.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | mkt/reviewers/tests/test_views.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import json
import re
import time
from datetime import datetime, timedelta
from itertools import cycle
from os import path
from django import test
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
import mock
import requests
import waffle
from cache_nuggets.lib import Token
from jingo.helpers import urlparams
from nose import SkipTest
from nose.tools import eq_, ok_
from post_request_task import task as post_request_task
from pyquery import PyQuery as pq
from requests.structures import CaseInsensitiveDict
import mkt
import mkt.ratings
import mkt.site.tests
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from mkt.abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.tests.test_views import CommTestMixin
from mkt.comm.utils import create_comm_note
from mkt.constants import MANIFEST_CONTENT_TYPE, comm
from mkt.developers.models import ActivityLog, AppLog
from mkt.files.models import File
from mkt.ratings.models import Review, ReviewFlag
from mkt.reviewers.models import (SHOWCASE_TAG, CannedResponse,
EscalationQueue, RereviewQueue,
ReviewerScore)
from mkt.reviewers.utils import ReviewersQueuesHelper
from mkt.reviewers.views import (_progress, app_review, queue_apps,
route_reviewer)
from mkt.site.fixtures import fixture
from mkt.site.helpers import absolutify, isotime
from mkt.site.storage_utils import private_storage, public_storage
from mkt.site.tests import (check_links, days_ago, formset, initial,
req_factory_factory, user_factory)
from mkt.site.utils import app_factory, make_rated, paginate, version_factory
from mkt.submit.tests.test_views import BasePackagedAppTest, SetupFilesMixin
from mkt.tags.models import Tag
from mkt.users.models import UserProfile
from mkt.versions.models import Version
from mkt.webapps.indexers import WebappIndexer
from mkt.webapps.models import AddonDeviceType, Webapp
from mkt.webapps.tasks import unindex_webapps
from mkt.websites.utils import website_factory
from mkt.zadmin.models import get_config, set_config
TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
"""
Generate and return data for a management form for `num` attachments
"""
return {'attachment-TOTAL_FORMS': max(1, num),
'attachment-INITIAL_FORMS': 0,
'attachment-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
"""Generate and return data for `num` attachments """
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+')
data.update({
'attachment-%d-attachment' % n: attachment,
'attachment-%d-description' % n: descriptions[i]
})
return data
class TestedonManagementMixin(object):
def _testedon_management_form(self, num=0):
"""
Generate and return data for a management form for `num` tested on
platforms.
"""
return {'testedon-TOTAL_FORMS': max(1, num),
'testedon-INITIAL_FORMS': 0,
'testedon-MAX_NUM_FORMS': 1000}
def _platforms(self, num, device_types=[u'\xd0esktop', u'FirefoxOS'],
devices=[u'PC ', u'ZT\xc8 Open'],
versions=[u'34', u'1.3<']):
"""Generate and return data for `num` tested on platforms """
data = {}
if num > 0:
for n in xrange(num):
i = n % len(device_types)
data.update({
'testedon-%d-device_type' % n: device_types[i],
'testedon-%d-device' % n: devices[i],
'testedon-%d-version' % n: versions[i],
})
return data
class AppReviewerTest(mkt.site.tests.TestCase):
def setUp(self):
super(AppReviewerTest, self).setUp()
self.reviewer_user = user_factory(email='editor')
self.grant_permission(self.reviewer_user, 'Apps:Review')
self.snr_reviewer_user = user_factory(email='snrreviewer')
self.grant_permission(self.snr_reviewer_user, 'Apps:Review,Apps:Edit,'
'Apps:ReviewEscalated,Apps:ReviewPrivileged',
name='Senior App Reviewers')
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
self.regular_user = user_factory(email='regular')
self.contact_user = user_factory(email='contact')
self.login_as_editor()
def login_as_admin(self):
self.login(self.admin_user)
def login_as_editor(self):
self.login(self.reviewer_user)
def login_as_senior_reviewer(self):
self.login(self.snr_reviewer_user)
def check_actions(self, expected, elements):
"""Check the action buttons on the review page.
`expected` is a list of tuples containing action name and action form
value. `elements` is a PyQuery list of input elements.
"""
for idx, item in enumerate(expected):
text, form_value = item
e = elements.eq(idx)
eq_(e.parent().text(), text)
eq_(e.attr('name'), 'action')
eq_(e.val(), form_value)
def uses_es(self):
return waffle.switch_is_active('reviewer-tools-elasticsearch')
class AccessMixin(object):
def test_403_for_non_editor(self, *args, **kwargs):
self.login('regular@mozilla.com')
eq_(self.client.head(self.url).status_code, 403)
def test_302_for_anonymous(self, *args, **kwargs):
self.client.logout()
eq_(self.client.head(self.url).status_code, 302)
class SearchMixin(object):
def test_search_query(self):
# Light test to make sure queues can handle search queries.
res = self.client.get(self.url, {'text_query': 'test'})
eq_(res.status_code, 200)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock)
class TestReviewersHome(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersHome, self).setUp()
self.url = reverse('reviewers.home')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Bear',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING})]
self.packaged_app = app_factory(name='Dinosaur',
status=mkt.STATUS_PUBLIC,
is_packaged=True)
version_factory(addon=self.packaged_app,
file_kw={'status': mkt.STATUS_PENDING})
# Add a disabled app for good measure.
app_factory(name='Elephant', disabled_by_user=True,
status=mkt.STATUS_PENDING)
# Escalate one app to make sure it doesn't affect stats.
escalated = app_factory(name='Eyelash Pit Viper',
status=mkt.STATUS_PENDING)
EscalationQueue.objects.create(addon=escalated)
# Add a public app under re-review.
rereviewed = app_factory(name='Finch', status=mkt.STATUS_PUBLIC)
rq = RereviewQueue.objects.create(addon=rereviewed)
rq.update(created=self.days_ago(1))
# Add an app with latest update deleted. It shouldn't affect anything.
app = app_factory(name='Great White Shark',
status=mkt.STATUS_PUBLIC,
version_kw={'version': '1.0'},
is_packaged=True)
v = version_factory(addon=app,
version='2.1',
file_kw={'status': mkt.STATUS_PENDING})
v.update(deleted=True)
def test_route_reviewer(self):
# App reviewers go to apps home.
req = mkt.site.tests.req_factory_factory(
reverse('reviewers'),
user=UserProfile.objects.get(email='editor@mozilla.com'))
r = route_reviewer(req)
self.assert3xx(r, reverse('reviewers.home'))
def test_progress_pending(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(8))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['pending']['week'], 1)
eq_(counts['pending']['new'], 1)
eq_(counts['pending']['old'], 1)
eq_(counts['pending']['med'], 1)
self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333)
def test_progress_rereview(self):
rq = RereviewQueue.objects.create(addon=self.apps[0])
rq.update(created=self.days_ago(8))
rq = RereviewQueue.objects.create(addon=self.apps[1])
rq.update(created=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['rereview']['week'], 1)
eq_(counts['rereview']['new'], 1)
eq_(counts['rereview']['old'], 1)
eq_(counts['rereview']['med'], 1)
self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333)
def test_progress_updated(self):
extra_app = app_factory(name='Jackalope',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(8))
extra_app = app_factory(name='Jackrabbit',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(25))
counts, percentages = _progress()
eq_(counts['updates']['week'], 1)
eq_(counts['updates']['new'], 1)
eq_(counts['updates']['old'], 1)
eq_(counts['updates']['med'], 1)
self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333)
def test_stats_waiting(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(5))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
self.packaged_app.update(created=self.days_ago(1))
doc = pq(self.client.get(self.url).content)
anchors = doc('.editor-stats-title a')
eq_(anchors.eq(0).text(), '3 Pending App Reviews')
eq_(anchors.eq(1).text(), '1 Re-review')
eq_(anchors.eq(2).text(), '1 Update Review')
divs = doc('.editor-stats-table > div')
# Pending review.
eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.')
# Re-reviews.
eq_(divs.eq(2).text(), '1 unreviewed app submission this week.')
# Update review.
eq_(divs.eq(4).text(), '1 unreviewed app submission this week.')
# Maths.
# Pending review.
eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%')
# Re-reviews.
eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%')
# Update review.
eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%')
def test_reviewer_leaders(self):
reviewers = UserProfile.objects.all()[:2]
# 1st user reviews 2, 2nd user only 1.
users = cycle(reviewers)
for app in self.apps:
mkt.log(mkt.LOG.APPROVE_VERSION, app, app.latest_version,
user=users.next(), details={'comments': 'hawt'})
doc = pq(self.client.get(self.url).content.decode('utf-8'))
# Top Reviews.
table = doc('#editors-stats .editor-stats-table').eq(0)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
# Top Reviews this month.
table = doc('#editors-stats .editor-stats-table').eq(1)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
class FlagsMixin(object):
def test_flag_packaged_app(self):
self.apps[0].update(is_packaged=True)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_packaged, True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0)
flag = td('div.sprite-reviewer-packaged-app')
eq_(flag.length, 1)
def test_flag_premium_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_premium(), True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-premium')
eq_(flags.length, 1)
def test_flag_free_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1)
def test_flag_premium_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp').length, 1)
def test_flag_info(self):
self.apps[0].latest_version.update(has_info_request=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-info')
eq_(flags.length, 1)
def test_flag_comment(self):
self.apps[0].latest_version.update(has_editor_comment=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-editor')
eq_(flags.length, 1)
class XSSMixin(object):
def test_xss_in_queue(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)('#addon-queue tbody').html()
assert '<script>' in tbody
assert '<script>' not in tbody
class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestAppQueue, self).setUp()
yesterday = self.days_ago(1)
long_ago = self.days_ago(2)
self.apps = [app_factory(name='XXX',
status=mkt.STATUS_PENDING,
version_kw={'nomination': long_ago},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='YYY',
status=mkt.STATUS_PENDING,
version_kw={'nomination': yesterday},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='ZZZ')]
self.apps[0].update(created=self.days_ago(12))
self.apps[1].update(created=self.days_ago(11))
# Quick sanity check.
eq_(self.apps[0].latest_version.nomination, long_ago)
eq_(self.apps[1].latest_version.nomination, yesterday)
RereviewQueue.objects.create(addon=self.apps[2])
self.url = reverse('reviewers.apps.queue_pending')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestAppQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_queue_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.queue_viewing')).status_code,
200)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = Webapp.objects.filter(
status=mkt.STATUS_PENDING).order_by('created')
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_pending(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_rejected(self):
# Check action buttons for a previously rejected app.
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_cantreview(self):
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_canreview(self):
self.login_as_senior_reviewer()
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.apps[0], device_type=1)
AddonDeviceType.objects.create(addon=self.apps[0], device_type=2)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)')
eq_(tds('ul li:not(.unavailable)').length, 2)
def test_payments(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
self.apps[1].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)')
eq_(tds.eq(0).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_PREMIUM]))
eq_(tds.eq(1).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_FREE_INAPP]))
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
if self.uses_es():
self.refresh(doctypes=('webapp', 'homescreen'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (0)')
def test_homescreen_count(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
# self.apps[2] is not pending so doesn't show up either.
eq_([a.app.id for a in res.context['addons']], [self.apps[1].id])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_incomplete_no_in_queue(self):
[app.update(status=mkt.STATUS_NULL) for app in self.apps]
if self.uses_es():
self.reindex(Webapp)
req = req_factory_factory(
self.url,
user=UserProfile.objects.get(email='editor@mozilla.com'))
doc = pq(queue_apps(req).content)
assert not doc('#addon-queue tbody tr').length
def test_waiting_time(self):
"""Check objects show queue objects' created."""
res = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(res.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps[0:2]]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestAppQueueES(mkt.site.tests.ESTestCase, TestAppQueue):
def setUp(self):
super(TestAppQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestRereviewQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
RereviewQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
RereviewQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
RereviewQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
self.url = reverse('reviewers.apps.queue_rereview')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestRereviewQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
RereviewQueue.objects.all().order_by('created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.rereviewqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
self.assertSetEqual([a.app.id for a in res.context['addons']],
[a.id for a in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (2)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(RereviewQueue.objects.filter(addon=app).exists(), False)
class TestRereviewQueueES(mkt.site.tests.ESTestCase, TestRereviewQueue):
def setUp(self):
super(TestRereviewQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
# Prevent update_cached_manifests at setUp() since it gets called and tries
# to access files when we add versions.
@mock.patch('mkt.webapps.tasks.update_cached_manifests', False)
def setUp(self):
super(TestUpdateQueue, self).setUp()
post_request_task._start_queuing_tasks()
app1 = app_factory(is_packaged=True, name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
app2 = app_factory(is_packaged=True, name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
version_factory(addon=app1, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
version_factory(addon=app2, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
post_request_task._send_tasks_and_stop_queuing()
self.apps = list(Webapp.objects.order_by('id'))
self.url = reverse('reviewers.apps.queue_updates')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestUpdateQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
self.apps[0].versions.latest().update(nomination=self.days_ago(2))
self.apps[1].versions.latest().update(nomination=self.days_ago(1))
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
expected = [
(unicode(self.apps[0].name), self.review_url(self.apps[0])),
(unicode(self.apps[1].name), self.review_url(self.apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_public_senior_reviewer(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
def test_homescreen(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(doctypes=('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']],
[app.id for app in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Escalations (1)')
def test_order(self):
self.apps[0].update(created=self.days_ago(10))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].versions.latest().update(nomination=self.days_ago(1))
self.apps[1].versions.latest().update(nomination=self.days_ago(4))
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = list(res.context['addons'])
eq_(apps[0].app.id, self.apps[1].id)
eq_(apps[1].app.id, self.apps[0].id)
def test_only_updates_in_queue(self):
# Add new packaged app, which should only show up in the pending queue.
app = app_factory(is_packaged=True, name='ZZZ',
status=mkt.STATUS_PENDING,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING})
self.apps.append(app)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app not in apps, (
'Unexpected: Found a new packaged app in the updates queue.')
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (2)')
def test_approved_update_in_queue(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_APPROVED,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.apps.append(app)
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_update_queue_with_empty_nomination(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_NULL,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': None})
self.apps.append(app)
first_version = app.latest_version
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=None,
file_kw={'status': mkt.STATUS_PENDING})
# Now that we have a version with nomination=None, reset app status.
app.update(status=mkt.STATUS_APPROVED)
File.objects.filter(version=first_version).update(status=app.status)
# Safeguard: we /really/ want to test with nomination=None.
eq_(app.latest_version.reload().nomination, None)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_deleted_version_not_in_queue(self):
"""
This tests that an app with a prior pending version that got
deleted doesn't trigger the app to remain in the review queue.
"""
app = self.apps[0]
# File is PENDING and delete current version.
old_ver = app.versions.order_by('id')[0]
old_ver.files.latest().update(status=mkt.STATUS_PENDING)
old_ver.delete()
# "Approve" the app.
app.versions.latest().files.latest().update(status=mkt.STATUS_PUBLIC)
eq_(app.reload().status, mkt.STATUS_PUBLIC)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Verify that our app has 2 versions.
eq_(Version.with_deleted.filter(addon=app).count(), 2)
# Verify the apps in the context are what we expect.
doc = pq(res.content)
eq_(doc('.tabnav li a')[2].text, u'Updates (1)')
apps = [a.app.id for a in res.context['addons']]
ok_(app.id not in apps)
ok_(self.apps[1].id in apps)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestUpdateQueueES(mkt.site.tests.ESTestCase, TestUpdateQueue):
def setUp(self):
super(TestUpdateQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.refresh(doctypes=('homescreen', 'webapp'))
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin,
SearchMixin, XSSMixin):
def setUp(self):
super(TestEscalationQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
EscalationQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
EscalationQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
EscalationQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
self.login_as_senior_reviewer()
self.url = reverse('reviewers.apps.queue_escalated')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestEscalationQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_flag_blocked(self):
# Blocklisted apps should only be in the update queue, so this flag
# check is here rather than in FlagsMixin.
self.apps[0].update(status=mkt.STATUS_BLOCKED)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-blocked')
eq_(flags.length, 1)
def test_no_access_regular_reviewer(self):
self.login_as_editor()
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
EscalationQueue.objects.all().order_by('addon__created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.escalationqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (3)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(EscalationQueue.objects.filter(addon=app).exists(), False)
class TestEscalationQueueES(mkt.site.tests.ESTestCase, TestEscalationQueue):
def setUp(self):
super(TestEscalationQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
class TestReviewTransaction(AttachmentManagementMixin,
mkt.site.tests.MockEsMixin,
mkt.site.tests.MockBrowserIdMixin,
test.TransactionTestCase,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewTransaction, self).setUp()
mkt.site.tests.TestCase.grant_permission(
user_factory(email='editor'), 'Apps:Review')
self.mock_browser_id()
def get_app(self):
return Webapp.objects.get(id=337141)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign(self, sign_mock, json_mock, update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
with private_storage.open(
self.version.files.all()[0].file_path, 'w') as f:
f.write('.')
public_storage.delete(self.version.files.all()[0].signed_file_path)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
update_cached_manifests.reset_mock()
sign_mock.return_value = None # Didn't fail.
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign')
def test_public_sign_failure(self, sign_mock, json_mock,
update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
sign_mock.side_effect = packaged.SigningError
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PENDING)
eq_(update_cached_manifests.delay.call_count, 0)
class TestReviewMixin(object):
# E.g commreply+12e0caffc4ca4174a6f62300c0ff180a@marketplace.firefox.com .
COMM_REPLY_RE = r'^commreply\+[a-f0-9]+\@marketplace\.firefox\.com$'
def post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _check_email(self, msg, subject, to=None):
if to:
eq_(msg.to, to)
else:
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
assert re.match(self.COMM_REPLY_RE, msg.extra_headers['Reply-To'])
eq_(msg.cc, [])
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
if subject:
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
def _get_mail(self, email):
return filter(lambda x: x.to[0].startswith(email), mail.outbox)[0]
def _check_email_dev_and_contact(self, subject, outbox_len=2):
"""
Helper for checking developer and Mozilla contact get emailed.
"""
eq_(len(mail.outbox), outbox_len)
# Developer.
self._check_email(self._get_mail('steamcube'), subject)
# Mozilla contact.
self._check_email(self._get_mail('contact'), subject,
to=[self.mozilla_contact])
def _check_thread(self):
thread = self.app.threads
eq_(thread.count(), 1)
thread = thread.get()
perms = ('developer', 'reviewer', 'staff')
for key in perms:
assert getattr(thread, 'read_permission_%s' % key)
def _check_email_body(self, msg=None):
if not msg:
msg = mail.outbox[0]
body = msg.message().as_string()
url = self.app.get_url_path()
assert url in body, 'Could not find apps detail URL in %s' % msg
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def _check_score(self, reviewed_type):
scores = ReviewerScore.objects.all()
assert len(scores) > 0
eq_(scores[0].score, mkt.REVIEWED_SCORES[reviewed_type])
eq_(scores[0].note_key, reviewed_type)
class TestReviewApp(SetupFilesMixin, AppReviewerTest, TestReviewMixin,
AccessMixin, AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
make_rated(self.app)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact)
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.file = self.version.all_files[0]
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.setup_files()
def get_app(self):
return Webapp.objects.get(id=337141)
def test_review_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.review_viewing')).status_code,
200)
@mock.patch('mkt.webapps.models.Webapp.in_rereview_queue')
def test_rereview(self, is_rereview_queue):
is_rereview_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-rereview').length
@mock.patch('mkt.webapps.models.Webapp.in_escalation_queue')
def test_escalated(self, in_escalation_queue):
in_escalation_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-escalation').length
def test_cannot_review_my_app(self):
with self.settings(ALLOW_SELF_REVIEWS=False):
self.app.addonuser_set.create(
user=UserProfile.objects.get(email='editor@mozilla.com'))
res = self.client.head(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_cannot_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
res = self.client.get(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_review_no_latest_version(self):
self.app.versions.all().delete()
self.app.reload()
eq_(self.app.latest_version, None)
eq_(self.app.current_version, None)
response = self.client.get(self.url)
eq_(response.status_code, 200)
doc = pq(response.content)
assert not doc('input[name=action][value=info]').length
assert not doc('input[name=action][value=comment]').length
assert not doc('input[name=action][value=public]').length
assert not doc('input[name=action][value=reject]').length
# Also try with a packaged app.
self.app.update(is_packaged=True)
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_sr_can_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
self.login_as_senior_reviewer()
eq_(self.client.get(self.url).status_code, 200)
data = {'action': 'public', 'comments': 'yo'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_pending'))
def test_pending_to_reject_w_device_overrides(self):
# This shouldn't be possible unless there's form hacking.
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_TABLET.id)
eq_(self.app.publish_type, mkt.PUBLISH_IMMEDIATE)
data = {'action': 'reject', 'comments': 'something',
'device_override': [mkt.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
eq_(set([o.id for o in app.device_types]),
set([mkt.DEVICE_DESKTOP.id, mkt.DEVICE_TABLET.id]))
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
def test_pending_to_public_w_requirements_overrides(self):
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
# Since features have been changed by the reviewer, the app should not
# be immediately published.
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_public_w_requirements_removed(self):
self.app.latest_version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': False}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
# Since features have been changed by the reviewer, the app should not
# be immediately published.
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_reject_w_requirements_overrides(self):
# Rejecting an app doesn't let you override features requirements.
data = {'action': 'reject', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
def test_pending_to_public_w_requirements_overrides_nothing_changed(self):
self.version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_PUBLIC)
action_id = mkt.LOG.REVIEW_FEATURES_OVERRIDE.id
assert not AppLog.objects.filter(
addon=self.app, activity_log__action=action_id).exists()
@mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock)
def test_incomplete_cant_approve(self):
self.app.update(status=mkt.STATUS_NULL)
self.app.latest_version.files.update(status=mkt.STATUS_NULL)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# Still incomplete.
eq_(self.get_app().status, mkt.STATUS_NULL)
def test_notification_email_translation(self):
# https://bugzilla.mozilla.org/show_bug.cgi?id=1127790
raise SkipTest
"""Test that the app name is translated with the app's default_locale
and not the reviewer's when we are sending notification emails."""
original_name = unicode(self.app.name)
fr_translation = u'Mais allô quoi!'
es_translation = u'¿Dónde está la biblioteca?'
self.app.name = {
'fr': fr_translation,
'es': es_translation,
}
self.app.default_locale = 'fr'
self.app.save()
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(translation.get_language(), 'es')
eq_(len(mail.outbox), 2)
msg = mail.outbox[0]
assert original_name not in msg.subject
assert es_translation not in msg.subject
assert fr_translation in msg.subject
assert original_name not in msg.body
assert es_translation not in msg.body
assert fr_translation in msg.body
@mock.patch('lib.crypto.packaged.sign')
def test_require_sig_for_public(self, sign):
sign.side_effect = packaged.SigningError
self.get_app().update(is_packaged=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
def _test_pending_to_public(self):
self.app.update(mozilla_contact='')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], ('Approved'))
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_pending_to_public(self):
self._test_pending_to_public()
@mock.patch('mkt.reviewers.views.messages.success')
def test_pending_to_escalation(self, messages):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
eq_(messages.call_args_list[0][0][1], 'Review successfully processed.')
def test_pending_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
self._check_email_dev_and_contact('Banned')
def test_pending_to_disable(self):
# Only senior reviewers can ban apps.
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(len(mail.outbox), 0)
def _test_escalation_to_public(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
def test_escalation_to_public(self):
self._test_escalation_to_public()
def test_escalation_to_reject(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_escalation_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Banned')
def test_escalation_to_disable(self):
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='escalated')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(EscalationQueue.objects.count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_escalation(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
EscalationQueue.objects.create(addon=self.app)
data = {'action': 'clear_escalation', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
eq_(EscalationQueue.objects.count(), 0)
self._check_log(mkt.LOG.ESCALATION_CLEARED)
# Ensure we don't send email to developer on clearing escalations.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_rereview_to_reject(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(RereviewQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0)
self._check_email_dev_and_contact('Banned')
def test_rereview_to_disable(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='rereview')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1)
eq_(len(mail.outbox), 0)
def test_manual_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'manual_rereview', 'comments': 'man dem'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# The app status shouldn't change.
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.count(), 1)
self._check_log(mkt.LOG.REREVIEW_MANUAL)
# Ensure we don't send email to developer on manual rereviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_clear_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_clear_rereview_unlisted(self):
self.app.update(status=mkt.STATUS_UNLISTED)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_escalation(self):
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
def test_more_information(self):
# Test the same for all queues.
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
self._check_log(mkt.LOG.REQUEST_INFORMATION)
vqs = self.get_app().versions.all()
eq_(vqs.count(), 1)
eq_(vqs.filter(has_info_request=True).count(), 1)
self._check_email_dev_and_contact('Reviewer comment')
def test_multi_cc_email(self):
# Test multiple mozilla_contact emails via more information.
contacts = [user_factory(email=u'á').email,
user_factory(email=u'ç').email]
self.mozilla_contact = ', '.join(contacts)
self.app.update(mozilla_contact=self.mozilla_contact)
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 3)
subject = 'Reviewer comment'
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail(contacts[0]), subject,
to=[contacts[0]])
self._check_email(self._get_mail(contacts[1]), subject,
to=[contacts[1]])
def test_comment(self):
# Test the same for all queues.
data = {'action': 'comment', 'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_log(mkt.LOG.COMMENT_VERSION)
def test_receipt_no_node(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('#receipt-check-result')), 0)
def test_receipt_has_node(self):
self.get_app().update(premium_type=mkt.ADDON_PREMIUM)
res = self.client.get(self.url)
eq_(len(pq(res.content)('.reviewers-desktop #receipt-check-result')),
1)
eq_(len(pq(res.content)('.reviewers-mobile #receipt-check-result')),
1)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json(self, mock_get):
m = mock.Mock()
m.content = 'the manifest contents <script>'
m.headers = CaseInsensitiveDict(
{'content-type': 'application/x-web-app-manifest+json <script>'})
mock_get.return_value = m
expected = {
'content': 'the manifest contents <script>',
'headers': {'content-type':
'application/x-web-app-manifest+json <script>'},
'success': True,
'permissions': {}
}
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), expected)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_unicode(self, mock_get):
m = mock.Mock()
m.content = u'كك some foreign ish'
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'كك some foreign ish',
'headers': {}, 'success': True,
'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding(self, mock_get):
m = mock.Mock()
m.content = open(self.manifest_path('non-utf8.webapp')).read()
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert u'"name": "W2MO\u017d"' in data['content']
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding_empty(self, mock_get):
m = mock.Mock()
m.content = ''
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'', 'headers': {},
'success': True, 'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_traceback_in_response(self, mock_get):
m = mock.Mock()
m.content = {'name': 'Some name'}
m.headers = CaseInsensitiveDict({})
mock_get.side_effect = requests.exceptions.SSLError
mock_get.return_value = m
# We should not 500 on a traceback.
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert data['content'], 'There should be a content with the traceback'
eq_(data['headers'], {})
@mock.patch('mkt.reviewers.views.json.dumps')
def test_manifest_json_packaged(self, mock_):
# Test that when the app is packaged, _mini_manifest is called.
mock_.return_value = '{}'
self.get_app().update(is_packaged=True)
res = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
assert mock_.called
@mock.patch('mkt.reviewers.views._get_manifest_json')
def test_manifest_json_perms(self, mock_):
mock_.return_value = {
'permissions': {
"foo": {"description": "foo"},
"camera": {"description": "<script>"}
}
}
self.get_app().update(is_packaged=True)
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content)['permissions'],
{'foo': {'description': 'foo', 'type': 'web'},
'camera': {'description': '<script>', 'type': 'priv'}})
def test_abuse(self):
AbuseReport.objects.create(addon=self.app, message='!@#$')
res = self.client.get(self.url)
doc = pq(res.content)
dd = doc('.reviewers-desktop #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
dd = doc('.reviewers-mobile #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
def _attachment_form_data(self, num=1, action='comment'):
data = {'action': action,
'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=num))
data.update(self._attachments(num))
return data
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('mkt.site.storage_utils.LocalFileStorage.save')
def test_no_attachments(self, save_mock):
""" Test addition of no attachment """
data = self._attachment_form_data(num=0, action='public')
data.update(self._testedon_management_form())
self.post(data)
eq_(save_mock.called, False, save_mock.call_args_list)
def test_idn_app_domain(self):
response = self.client.get(self.url)
assert 'IDN domain!' not in response.content
self.get_app().update(app_domain=u'http://www.allïzom.org')
response = self.client.get(self.url)
assert 'IDN domain!' in response.content
def test_xss_domain(self):
# It shouldn't be possible to have this in app domain, it will never
# validate, but better safe than sorry.
self.get_app().update(app_domain=u'<script>alert(42)</script>')
response = self.client.get(self.url)
assert '<script>alert(42)</script>' not in response.content
assert '<script>alert(42)</script>' in response.content
def test_priority_flag_cleared_for_public(self):
self.get_app().update(priority_review=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, False)
def test_priority_flag_uncleared_for_reject(self):
self.get_app().update(priority_review=True)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, True)
def test_is_showcase_checkbox(self):
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 0)
app = self.get_app()
Tag(tag_text=SHOWCASE_TAG).save_tag(app)
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 1)
def test_is_showcase_on(self):
# Note: Using action=comment b/c it does less and keeps test faster.
data = {'action': 'comment', 'comments': 'blah', 'is_showcase': 'on'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG in tags
# Check email is sent to curation board.
msg = self._get_mail('appcurationboard')
eq_(msg.to, [settings.APP_CURATION_BOARD_EMAIL])
eq_(msg.subject,
u'App [%s] nominated to be featured' % self.get_app().name)
def test_is_showcase_off(self):
# Clearing contact so we don't get a superflous email below.
self.app.update(mozilla_contact='')
# Note: Using action=comment b/c it does less and keeps test faster.
# Note: `is_showcase` isn't passed b/c checkboxes.
data = {'action': 'comment', 'comments': 'blah'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG not in tags
# Check no email is sent.
eq_(len(mail.outbox), 0)
def test_versions_history_pagination(self):
self.app.update(is_packaged=True)
version_factory(addon=self.app, version='2.0')
version_factory(addon=self.app, version='3.0')
# Mock paginate to paginate with only 2 versions to limit the
# number of versions this test has to create.
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(self.url).content)
eq_(len(content('#review-files tr.listing-body')), 2)
eq_(len(content('#review-files-paginate a[rel=next]')), 1)
eq_(len(content('#review-files-paginate a[rel=prev]')), 0)
link = content('#review-files-paginate a[rel=next]')[0].attrib['href']
eq_(link, '%s?page=2#history' % self.url)
# Look at page 2.
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(link).content)
eq_(len(content('#review-files tr.listing-body')), 1)
eq_(len(content('#review-files-paginate a[rel=next]')), 0)
eq_(len(content('#review-files-paginate a[rel=prev]')), 1)
eq_(content('#review-files-paginate a[rel=prev]')[0].attrib['href'],
'%s?page=1#history' % self.url)
class TestCannedResponses(AppReviewerTest):
def setUp(self):
super(TestCannedResponses, self).setUp()
self.login_as_editor()
self.app = app_factory(name='XXX', status=mkt.STATUS_PENDING)
self.cr = CannedResponse.objects.create(
name=u'app reason', response=u'app reason body',
sort_group=u'public')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_ok(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
form = r.context['form']
choices = form.fields['canned_response'].choices[1][1]
# choices is grouped by the sort_group, where choices[0] is the
# default "Choose a response..." option.
# Within that, it's paired by [group, [[response, name],...]].
# So above, choices[1][1] gets the first real group's list of
# responses.
eq_(len(choices), 1)
assert self.cr.response in choices[0]
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApproveHostedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin, TestedonManagementMixin):
"""
A separate test class for apps going to an approved state. All other state
transitions are tested above.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApproveHostedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Web App Review" successfully processed (+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
self._check_message(messages)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
# File status is PUBLIC since it is the only version.
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_message(messages)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
# The app is not private but can still be installed by team members,
# so we should call those:
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
# App is private so we don't send this yet.
eq_(index_webapps.delay.call_count, 1)
def test_pending_to_reject(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'suxor'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(index_webapps.delay.call_count, 1)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
"""
A separate test class for packaged apps going to an approved state.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None, is_packaged=True)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Packaged App Review" successfully processed '
'(+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_rejected(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_pending_to_approved_app_private_prior_version_rejected(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
"""
Test that everything works out ok when v1.0 was rejected and developer
submitted v1.1 that is then approved. This should still be considered a
packaged review (not an update) and set the approved version to PUBLIC
since the proir verison is DISABLED. See bug 1075042.
"""
self.app.update(status=mkt.STATUS_REJECTED,
publish_type=mkt.PUBLISH_PRIVATE)
self.file.update(status=mkt.STATUS_DISABLED)
self.new_version = version_factory(
addon=self.app, version='1.1',
file_kw={'status': mkt.STATUS_PENDING})
index_webapps.delay.reset_mock()
update_cached_manifests.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(self.app.current_version, None)
eq_(self.app.latest_version, self.new_version)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.latest_version, self.new_version)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedVersions(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
"""
A separate test class for packaged apps with a 2nd version going to an
approved state.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedVersions, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.app.update(status=mkt.STATUS_PUBLIC,
mozilla_contact=self.mozilla_contact,
is_packaged=True)
self.new_version = version_factory(
addon=self.app, version='2.0',
file_kw={'status': mkt.STATUS_PENDING})
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Updated Packaged App Review" successfully processed '
'(+40 points, 40 total).')
def test_version_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages,
sign_mock):
self.app.update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_rejected_app_public(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_PUBLIC)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
class TestReviewLog(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewLog, self).setUp()
# Note: if `created` is not specified, `app_factory` uses a randomly
# generated timestamp.
self.apps = [app_factory(name='XXX', created=days_ago(3),
status=mkt.STATUS_PENDING),
app_factory(name='YYY', created=days_ago(2),
status=mkt.STATUS_PENDING)]
self.url = reverse('reviewers.apps.logs')
patcher = mock.patch.object(settings, 'TASK_USER_ID',
self.admin_user.id)
patcher.start()
self.addCleanup(patcher.stop)
def get_user(self):
return self.reviewer_user
def make_approvals(self):
d = 1
for app in self.apps:
days_ago = self.days_ago(d)
mkt.log(mkt.LOG.REJECT_VERSION, app, app.latest_version,
user=self.get_user(), details={'comments': 'youwin'},
created=days_ago)
# Throw in a few tasks logs that shouldn't get queried.
mkt.log(mkt.LOG.REREVIEW_MANIFEST_CHANGE, app, app.latest_version,
user=self.admin_user, details={'comments': 'foo'},
created=days_ago)
d += 1
def make_an_approval(self, action, comment='youwin', user=None, app=None):
if not user:
user = self.get_user()
if not app:
app = self.apps[0]
mkt.log(action, app, app.latest_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('#log-filter button'), 'No filters.'
# Should have 2 showing.
rows = doc('tbody tr')
logs = rows.filter(':not(.hide)')
eq_(logs.length, 2)
# Ensure that the app links are valid.
eq_(logs.find('.name .app-link').eq(0).attr('href'),
self.apps[0].get_url_path())
eq_(logs.find('.name .app-link').eq(1).attr('href'),
self.apps[1].get_url_path())
eq_(rows.filter('.hide').eq(0).text(), 'youwin')
def test_search_app_soft_deleted(self):
self.make_approvals()
self.apps[0].update(status=mkt.STATUS_DELETED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
all_reviews = [d.attrib.get('data-addonid')
for d in doc('#log-listing tbody tr')]
assert str(self.apps[0].pk) in all_reviews, (
'Soft deleted review did not show up in listing')
def test_xss(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
mkt.log(mkt.LOG.REJECT_VERSION, a, a.latest_version,
user=self.get_user(), details={'comments': 'xss!'})
r = self.client.get(self.url)
eq_(r.status_code, 200)
inner_html = pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
# Make sure we show the stuff we just made.
date = time.strftime('%Y-%m-%d')
r = self.client.get(self.url, dict(end=date))
eq_(r.status_code, 200)
doc = pq(r.content)('#log-listing tbody')
eq_(doc('tr:not(.hide)').length, 2)
eq_(doc('tr.hide').eq(0).text(), 'youwin')
def test_end_filter_wrong(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
r = self.client.get(self.url, dict(end='wrong!'))
# If this is broken, we'll get a traceback.
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tr:not(.hide)').length, 3)
def test_search_comment_exists(self):
"""Search by comment."""
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='hello'))
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tbody tr.hide').eq(0).text(), 'hello')
def test_search_comment_doesnt_exist(self):
"""Search by comment, with no results."""
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='bye'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_author_exists(self):
"""Search by author."""
self.make_approvals()
user = UserProfile.objects.get(email='regular@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user, comment='hi')
r = self.client.get(self.url, dict(search='regular'))
eq_(r.status_code, 200)
rows = pq(r.content)('#log-listing tbody tr')
eq_(rows.filter(':not(.hide)').length, 1)
eq_(rows.filter('.hide').eq(0).text(), 'hi')
def test_search_author_doesnt_exist(self):
"""Search by author, with no results."""
self.make_approvals()
user = UserProfile.objects.get(email='editor@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user)
r = self.client.get(self.url, dict(search='wrong'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_addon_exists(self):
"""Search by add-on name."""
self.make_approvals()
app = self.apps[0]
r = self.client.get(self.url, dict(search=app.name))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_by_slug_exists(self):
"""Search by app slug."""
app = self.apps[0]
app.app_slug = 'a-fox-was-sly'
app.save()
self.make_approvals()
r = self.client.get(self.url, dict(search='fox'))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_doesnt_exist(self):
"""Search by add-on name, with no results."""
self.make_approvals()
r = self.client.get(self.url, dict(search='zzz'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
@mock.patch('mkt.developers.models.ActivityLog.arguments', new=mock.Mock)
def test_addon_missing(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td').eq(1).text(),
'App has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(mkt.LOG.REQUEST_INFORMATION)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'More information requested')
def test_escalate_logs(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'Reviewer escalation')
def test_no_double_encode(self):
version = self.apps[0].latest_version
version.update(version='<foo>')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
assert '<foo>' in pq(r.content)('#log-listing tr td').eq(1).text(), (
'Double-encoded string was found in reviewer log.')
class TestMotd(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestMotd, self).setUp()
self.url = reverse('reviewers.apps.motd')
self.key = u'mkt_reviewers_motd'
set_config(self.key, u'original value')
def test_perms_not_editor(self):
self.client.logout()
req = self.client.get(self.url, follow=True)
self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url))
self.client.login('regular@mozilla.com')
eq_(self.client.get(self.url).status_code, 403)
def test_perms_not_motd(self):
# Any type of reviewer can see the MOTD.
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'], None)
# No redirect means it didn't save.
eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200)
eq_(get_config(self.key), u'original value')
def test_motd_change(self):
# Only users in the MOTD group can POST.
user = self.reviewer_user
self.grant_permission(user, 'AppReviewerMOTD:Edit')
self.login_as_editor()
# Get is a 200 with a form.
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'].initial['motd'], u'original value')
# Empty post throws an error.
req = self.client.post(self.url, dict(motd=''))
eq_(req.status_code, 200) # Didn't redirect after save.
eq_(pq(req.content)('#editor-motd .errorlist').text(),
'This field is required.')
# A real post now.
req = self.client.post(self.url, dict(motd='new motd'))
self.assert3xx(req, self.url)
eq_(get_config(self.key), u'new motd')
class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin,
TestReviewMixin, TestedonManagementMixin):
"""
Integration test that notes are created and that emails are
sent to the right groups of people.
"""
def setUp(self):
super(TestReviewAppComm, self).setUp()
self.app = app_factory(rated=True, status=mkt.STATUS_PENDING,
mozilla_contact='contact@mozilla.com')
self.app.addonuser_set.create(user=user_factory(email='steamcube'))
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.mozilla_contact = 'contact@mozilla.com'
def _post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _get_note(self):
eq_(self.app.threads.count(), 1)
thread = self.app.threads.all()[0]
eq_(thread.notes.count(), 1)
return thread.notes.all()[0]
def test_email_cc(self):
"""
Emailed cc'ed people (those who have posted on the thread).
"""
poster = user_factory()
thread, note = create_comm_note(
self.app, self.app.latest_version, poster, 'lgtm')
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test emails.
self._check_email_dev_and_contact(None, outbox_len=5)
# Some person who joined the thread.
self._check_email(
self._get_mail(poster.email), 'Approved', to=[poster.email])
def test_approve(self):
"""
On approval, send an email to [developer, mozilla contact].
"""
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.APPROVAL)
eq_(note.body, 'gud jerb')
# Test emails.
self._check_email_dev_and_contact(None)
def test_reject(self):
"""
On rejection, send an email to [developer, mozilla contact].
"""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, 'rubesh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_info(self):
"""
On info request, send an email to [developer, mozilla contact].
"""
data = {'action': 'info', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.MORE_INFO_REQUIRED)
eq_(note.body, 'huh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_escalate(self):
"""
On escalation, send an email to senior reviewers and developer.
"""
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.ESCALATION)
eq_(note.body, 'soup her man')
# Test emails.
eq_(len(mail.outbox), 2)
self._check_email( # Senior reviewer.
self._get_mail(self.snr_reviewer_user.email), 'Escalated',
to=[self.snr_reviewer_user.email])
self._check_email(self._get_mail('steamcube'), 'Escalated')
def test_comment(self):
"""
On reviewer comment, send an email to those but developers.
"""
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REVIEWER_COMMENT)
eq_(note.body, 'huh')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'Private reviewer comment',
to=[self.mozilla_contact])
def test_disable(self):
"""
On banning, send an email to [developer, mozilla contact].
"""
self.login_as_admin()
data = {'action': 'disable', 'comments': 'u dun it'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.DISABLED)
eq_(note.body, 'u dun it')
# Test emails.
self._check_email_dev_and_contact(None)
def test_attachments(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=2))
data.update(self._attachments(num=2))
data.update(self._testedon_management_form())
self._post(data)
# Test attachments.
note = self._get_note()
eq_(note.attachments.count(), 2)
def test_tested_on_one(self):
"""Tested 'Tested on' message appended to note body."""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=1))
data.update(self._platforms(1))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34')
def test_tested_on_two(self):
"""Tested two 'Tested on' messages appended to note body."""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=2))
data.update(self._platforms(2))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34; '
u'FirefoxOS platform on ZT\xc8 Open with version 1.3<')
class TestModeratedQueue(mkt.site.tests.TestCase, AccessMixin):
def setUp(self):
super(TestModeratedQueue, self).setUp()
self.app = app_factory()
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
user_factory(email='regular')
user1 = user_factory()
user2 = user_factory()
self.url = reverse('reviewers.apps.queue_moderated')
self.review1 = Review.objects.create(addon=self.app, body='body',
user=user1, rating=3,
editorreview=True)
ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM,
user=user1)
self.review2 = Review.objects.create(addon=self.app, body='body',
user=user2, rating=4,
editorreview=True)
ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT,
user=user2)
self.login(self.moderator_user)
def _post(self, action):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['reviews_formset'].forms[0]))
data_formset['form-0-action'] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
ReviewFlag.objects.all()[0].update(user=None)
ReviewFlag.objects.all()[1].delete()
res = self.client.get(self.url)
txt = pq(res.content)('.reviews-flagged-reasons li div span').text()
teststring = u'Flagged by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_setup(self):
eq_(Review.objects.filter(editorreview=True).count(), 2)
eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1)
res = self.client.get(self.url)
doc = pq(res.content)('#reviews-flagged')
# Test the default action is "skip".
eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1)
def test_skip(self):
# Skip the first review, which still leaves two.
self._post(mkt.ratings.REVIEW_MODERATE_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_delete(self):
# Delete the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_DELETE)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.DELETE_REVIEW).count(), 1)
def test_keep(self):
# Keep the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_KEEP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.APPROVE_REVIEW).count(), 1)
def test_no_reviews(self):
Review.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#reviews-flagged .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (2)')
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.moderator_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Moderated Reviews (2)')
def test_deleted_app(self):
"Test that a deleted app doesn't break the queue."
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_queue_count_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (0)')
class AbuseQueueMixin(object):
def _setUp(self):
self.abuseviewer_user = user_factory(email='abuser')
self.grant_permission(self.abuseviewer_user, self.perm)
self.login(self.abuseviewer_user)
user_factory(email='regular')
self.url = reverse(self.view_name)
def _post(self, action, form_index=0):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['abuse_formset'].forms[0]))
data_formset['form-%s-action' % (form_index)] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
AbuseReport.objects.all()[0].update(reporter=None)
res = self.client.get(self.url)
txt = pq(res.content)('.abuse-reports-reports li div span').text()
teststring = u'Submitted by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_no_reviews(self):
AbuseReport.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#abuse-reports .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (2)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_skip(self):
# Skip the first xxx's reports, which still leaves 2 apps/sites.
self._post(mkt.abuse.forms.ABUSE_REPORT_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_read(self):
# Mark read the first xxx's reports, which leaves one.
self._post(mkt.abuse.forms.ABUSE_REPORT_READ)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
# There are two abuse reports for app1/website1, so two log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_first_flag(self):
# Flag the first xxx's reports.
self._post(mkt.abuse.forms.ABUSE_REPORT_FLAG)
res = self.client.get(self.url)
# Check one is left.
eq_(len(res.context['page'].object_list), 1)
# Check the object is flagged.
eq_(RereviewQueue.objects.count(), 1)
# As flagging marks read too, there should be 2 log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_xss(self):
xss = '<script>alert("xss")</script>'
AbuseReport.objects.all()[0].update(message=xss)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)(
'#abuse-reports .abuse-reports-reports').html()
assert '<script>' in tbody
assert '<script>' not in tbody
def test_deleted_website(self):
"Test that a deleted app/website doesn't break the queue."
AbuseReport.objects.all()[0].object.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (1)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
class TestAppAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Apps:ReadAbuse'
view_name = 'reviewers.apps.queue_abuse'
log_const = mkt.LOG.APP_ABUSE_MARKREAD
def setUp(self):
super(TestAppAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
app1 = app_factory()
app2 = app_factory()
# Add some extra apps, which shouldn't show up.
app_factory()
app_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
addon=app1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
addon=app1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
addon=app2, message='the worst')
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(addon=Webapp.objects.all()[0]).count(),
2)
res = self.client.get(self.url)
# Check there are 2 apps listed.
eq_(len(res.context['page'].object_list), 2)
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.abuseviewer_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Abuse Reports (2)')
class TestWebsiteAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Websites:ReadAbuse'
view_name = 'reviewers.websites.queue_abuse'
log_const = mkt.LOG.WEBSITE_ABUSE_MARKREAD
def setUp(self):
super(TestWebsiteAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
website1 = website_factory()
website2 = website_factory()
# Add some extra sites, which shouldn't show up.
website_factory()
website_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
website=website1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
website=website1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
website=website2, message='the worst')
cls.website1 = website1
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(website=self.website1).count(), 2)
res = self.client.get(self.url)
# Check there are 2 websites listed.
eq_(len(res.context['page'].object_list), 2)
def test_first_flag(self):
# No re-review flagging for Websites yet - no re-review queue!
raise SkipTest()
class TestGetSigned(BasePackagedAppTest, mkt.site.tests.TestCase):
def setUp(self):
super(TestGetSigned, self).setUp()
self.url = reverse('reviewers.signed', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_local(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
eq_(res.status_code, 200)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_storage(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
self.assert3xx(res, private_storage.url(
self.file.signed_reviewer_file_path))
@mock.patch.object(packaged, 'sign', mock_sign)
def test_reviewer(self):
if not settings.XSENDFILE:
raise SkipTest()
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0])
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_token_good(self):
if not settings.XSENDFILE:
raise SkipTest()
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestMiniManifestView(BasePackagedAppTest):
def setUp(self):
super(TestMiniManifestView, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_reviewer(self):
self.setup_files()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug, self.version.id])))
def test_rejected(self):
# Rejected sets file.status to DISABLED and moves to a guarded path.
self.setup_files()
self.app.update(status=mkt.STATUS_REJECTED)
self.file.update(status=mkt.STATUS_DISABLED)
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug,
self.version.id])))
def test_minifest_name_matches_manifest_name(self):
self.setup_files()
self.app.name = 'XXX'
self.app.save()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
def test_token_good(self):
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
ok_('token=' in data['package_path'])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestReviewersScores(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersScores, self).setUp()
self.user = self.reviewer_user
self.url = reverse('reviewers.performance', args=[self.user.email])
def test_404(self):
res = self.client.get(reverse('reviewers.performance', args=['poop']))
eq_(res.status_code, 404)
def test_with_email(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_without_email(self):
res = self.client.get(reverse('reviewers.performance'))
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_no_reviews(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert u'No review points awarded yet' in res.content
class TestQueueSort(AppReviewerTest):
def setUp(self):
super(TestQueueSort, self).setUp()
"""Create and set up apps for some filtering fun."""
self.apps = [app_factory(name='Lillard',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE),
app_factory(name='Batum',
status=mkt.STATUS_PENDING,
is_packaged=True,
version_kw={'version': '1.0',
'has_editor_comment': True,
'has_info_request': True},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_PREMIUM)]
# Set up app attributes.
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].addonuser_set.create(user=user_factory(email='XXX'))
self.apps[1].addonuser_set.create(user=user_factory(email='illmatic'))
self.apps[0].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
self.apps[1].addondevicetype_set.create(
device_type=mkt.DEVICE_MOBILE.id)
self.url = reverse('reviewers.apps.queue_pending')
def test_do_sort_webapp(self):
"""
Test that apps are sorted in order specified in GET params.
"""
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
# Test sorting by app name.
req = rf.get(self.url, {'sort': 'name', 'order': 'asc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'name', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
def test_do_sort_version_nom(self):
"""Tests version nomination sort order."""
url = reverse('reviewers.apps.queue_pending')
user = UserProfile.objects.get(email='editor@mozilla.com')
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
# Throw in some disabled versions, they shouldn't affect order.
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[0],
nomination=days_ago(10))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(1))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(20))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination', 'order': 'desc'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id))
def test_do_sort_queue_object(self):
"""Tests sorting queue object."""
rf = RequestFactory()
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=1)
later_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
# Assert the order that RereviewQueue objects were created is
# maintained.
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'asc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
def test_sort_with_priority_review(self):
"""Tests the sorts are correct with a priority review flagged app."""
# Set up the priority review flagged app.
self.apps.append(app_factory(name='Foxkeh',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE,
priority_review=True))
# Set up app attributes.
self.apps[2].update(created=self.days_ago(1))
self.apps[2].addonuser_set.create(
user=user_factory(email='redpanda@mozilla.com'))
self.apps[2].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
# And check it also comes out top of waiting time with Webapp model.
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Version model.
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
qs = (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__status=mkt.STATUS_PENDING)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
req = rf.get(self.url, {'sort': 'nomination'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Rereview model.
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
earlier_rrq.created += timedelta(days=1)
earlier_rrq.save()
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=2)
later_rrq.save()
pri_rrq = RereviewQueue.objects.create(addon=self.apps[2])
pri_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps))
class TestAppsReviewing(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestAppsReviewing, self).setUp()
self.url = reverse('reviewers.apps.apps_reviewing')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING),
app_factory(name='Bear',
status=mkt.STATUS_PENDING),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING)]
def _view_app(self, app_id):
self.client.post(reverse('reviewers.review_viewing'), {
'addon_id': app_id})
def test_no_apps_reviewing(self):
res = self.client.get(self.url)
eq_(len(res.context['apps']), 0)
def test_apps_reviewing(self):
self._view_app(self.apps[0].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
def test_multiple_reviewers_no_cross_streams(self):
self._view_app(self.apps[0].id)
self._view_app(self.apps[1].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
# Now view an app as another user and verify app.
self.login('admin@mozilla.com')
self._view_app(self.apps[2].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
# Check original user again to make sure app list didn't increment.
self.login_as_editor()
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
class TestLeaderboard(AppReviewerTest):
def setUp(self):
super(TestLeaderboard, self).setUp()
self.url = reverse('reviewers.leaderboard')
mkt.set_user(self.reviewer_user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=mkt.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
users = (self.reviewer_user,
self.regular_user,
user_factory(email='clouserw'))
self._award_points(users[0], mkt.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], mkt.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], mkt.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
self._award_points(users[0], 1)
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
users[0].display_name,
mkt.REVIEWED_LEVELS[0]['name']])
self._award_points(users[0], -1)
self._award_points(users[2], (mkt.REVIEWED_LEVELS[1]['points'] -
mkt.REVIEWED_LEVELS[0]['points']))
eq_(get_cells(),
[users[2].display_name,
mkt.REVIEWED_LEVELS[1]['name'],
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
class TestReviewPage(mkt.site.tests.TestCase):
def setUp(self):
super(TestReviewPage, self).setUp()
self.app = app_factory(status=mkt.STATUS_PENDING)
self.reviewer = user_factory(email='editor')
self.grant_permission(self.reviewer, 'Apps:Review')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_status_null_disable_approve_btn(self):
self.app.update(status=mkt.STATUS_NULL)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
assert (doc('#review-actions input[value=public]')
.parents('li').hasClass('disabled'))
assert not (doc('#review-actions input[value=reject]')
.parents('li').hasClass('disabled'))
class TestAbusePage(AppReviewerTest):
def setUp(self):
super(TestAbusePage, self).setUp()
self.app = app_factory(name=u'My app é <script>alert(5)</script>')
self.url = reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug])
AbuseReport.objects.create(addon=self.app, message=self.app.name)
def testXSS(self):
from django.utils.encoding import smart_unicode
from jinja2.utils import escape
content = smart_unicode(self.client.get(self.url).content)
ok_(not unicode(self.app.name) in content)
ok_(unicode(escape(self.app.name)) in content)
class TestReviewTranslate(RestOAuth):
def setUp(self):
super(TestReviewTranslate, self).setUp()
self.grant_permission(self.profile, 'Apps:ModerateReview')
self.create_switch('reviews-translate')
user = user_factory(email='diego')
app = app_factory(app_slug='myapp~-_')
self.review = app.reviews.create(title=u'yes', body=u'oui',
addon=app, user=user,
editorreview=True, rating=4)
def test_regular_call(self):
res = self.client.get(reverse('reviewers.review_translate',
args=[self.review.addon.app_slug,
self.review.id, 'fr']))
self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302)
@mock.patch('mkt.reviewers.views.requests')
def test_ajax_call(self, requests):
# Mock requests.
response = mock.Mock(status_code=200)
response.json.return_value = {
u'data': {
u'translations': [{
u'translatedText': u'oui',
u'detectedSourceLanguage': u'fr'
}]
}
}
requests.get.return_value = response
# Call translation.
review = self.review
url = reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr'])
res = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 200)
eq_(res.content, '{"body": "oui", "title": "oui"}')
@mock.patch('mkt.reviewers.views.requests')
def test_invalid_api_key(self, requests):
# Mock requests.
response = mock.Mock(status_code=400)
response.json.return_value = {
'error': {
'code': 400,
'errors': [
{'domain': 'usageLimits',
'message': 'Bad Request',
'reason': 'keyInvalid'}
],
'message': 'Bad Request'
}
}
requests.get.return_value = response
# Call translation.
review = self.review
res = self.client.get(
reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr']),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 400)
class TestReviewHistory(mkt.site.tests.TestCase, CommTestMixin):
def setUp(self):
super(TestReviewHistory, self).setUp()
self.app = self.addon = app_factory()
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
self._thread_factory()
def test_comm_url(self):
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
def test_comm_url_multiple_thread(self):
self._thread_factory()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=2&serializer=simple')
def test_comm_url_no_encode(self):
self.addon = app_factory(app_slug='台北')
self._thread_factory()
url = reverse('reviewers.apps.review', args=[self.addon.app_slug])
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
class ModerateLogTest(mkt.site.tests.TestCase):
def setUp(self):
super(ModerateLogTest, self).setUp()
self.review = Review.objects.create(addon=app_factory(), body='body',
user=user_factory(), rating=4,
editorreview=True)
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
mkt.set_user(self.moderator_user)
self.login(self.moderator_user)
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
user_factory(email='regular')
class TestModerateLog(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLog, self).setUp()
self.url = reverse('reviewers.apps.moderatelog')
def test_log(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_start_filter(self):
r = self.client.get(self.url, dict(start='2011-01-01'))
eq_(r.status_code, 200)
def test_enddate_filter(self):
"""
Make sure that if our end date is 1/1/2011, that we include items from
1/1/2011.
"""
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
created=datetime(2011, 1, 1))
r = self.client.get(self.url, dict(end='2011-01-01'))
eq_(r.status_code, 200)
eq_(pq(r.content)('tbody td').eq(0).text(), 'Jan 1, 2011, 12:00:00 AM')
def test_action_filter(self):
"""
Based on setup we should see only two items if we filter for deleted
reviews.
"""
for i in xrange(2):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review)
r = self.client.get(self.url, dict(search='deleted'))
eq_(pq(r.content)('tbody tr').length, 2)
def test_no_results(self):
r = self.client.get(self.url, dict(end='2004-01-01'))
no_results = 'No events found for this period.'
assert no_results in r.content, 'Expected no results to be found.'
def test_display_name_xss(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
user=self.admin_user)
self.admin_user.display_name = '<script>alert("xss")</script>'
self.admin_user.save()
assert '<script>' in self.admin_user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(self.url)
pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' not in r.content
assert '<script>' in r.content
class TestModerateLogDetail(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLogDetail, self).setUp()
# AccessMixin needs a url property.
self.url = self._url(0)
def _url(self, id):
return reverse('reviewers.apps.moderatelog.detail', args=[id])
def test_detail_page(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
e_id = ActivityLog.objects.editor_events()[0].id
r = self.client.get(self._url(e_id))
eq_(r.status_code, 200)
def test_undelete_selfmoderation(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_admin(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
self.client.logout()
self.login(self.admin_user)
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_unauthorized(self):
# Delete as admin (or any other user than the reviewer).
e_id = mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review,
user=self.admin_user).id
self.review.delete()
# Try to undelete as normal reviewer.
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 403)
self.review = Review.with_deleted.get(id=self.review.id)
assert self.review.deleted, 'Review shouldn`t have been undeleted.'
| 40.79454 | 79 | 0.620518 |
import json
import re
import time
from datetime import datetime, timedelta
from itertools import cycle
from os import path
from django import test
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
import mock
import requests
import waffle
from cache_nuggets.lib import Token
from jingo.helpers import urlparams
from nose import SkipTest
from nose.tools import eq_, ok_
from post_request_task import task as post_request_task
from pyquery import PyQuery as pq
from requests.structures import CaseInsensitiveDict
import mkt
import mkt.ratings
import mkt.site.tests
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from mkt.abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.tests.test_views import CommTestMixin
from mkt.comm.utils import create_comm_note
from mkt.constants import MANIFEST_CONTENT_TYPE, comm
from mkt.developers.models import ActivityLog, AppLog
from mkt.files.models import File
from mkt.ratings.models import Review, ReviewFlag
from mkt.reviewers.models import (SHOWCASE_TAG, CannedResponse,
EscalationQueue, RereviewQueue,
ReviewerScore)
from mkt.reviewers.utils import ReviewersQueuesHelper
from mkt.reviewers.views import (_progress, app_review, queue_apps,
route_reviewer)
from mkt.site.fixtures import fixture
from mkt.site.helpers import absolutify, isotime
from mkt.site.storage_utils import private_storage, public_storage
from mkt.site.tests import (check_links, days_ago, formset, initial,
req_factory_factory, user_factory)
from mkt.site.utils import app_factory, make_rated, paginate, version_factory
from mkt.submit.tests.test_views import BasePackagedAppTest, SetupFilesMixin
from mkt.tags.models import Tag
from mkt.users.models import UserProfile
from mkt.versions.models import Version
from mkt.webapps.indexers import WebappIndexer
from mkt.webapps.models import AddonDeviceType, Webapp
from mkt.webapps.tasks import unindex_webapps
from mkt.websites.utils import website_factory
from mkt.zadmin.models import get_config, set_config
TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
return {'attachment-TOTAL_FORMS': max(1, num),
'attachment-INITIAL_FORMS': 0,
'attachment-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+')
data.update({
'attachment-%d-attachment' % n: attachment,
'attachment-%d-description' % n: descriptions[i]
})
return data
class TestedonManagementMixin(object):
def _testedon_management_form(self, num=0):
return {'testedon-TOTAL_FORMS': max(1, num),
'testedon-INITIAL_FORMS': 0,
'testedon-MAX_NUM_FORMS': 1000}
def _platforms(self, num, device_types=[u'\xd0esktop', u'FirefoxOS'],
devices=[u'PC ', u'ZT\xc8 Open'],
versions=[u'34', u'1.3<']):
data = {}
if num > 0:
for n in xrange(num):
i = n % len(device_types)
data.update({
'testedon-%d-device_type' % n: device_types[i],
'testedon-%d-device' % n: devices[i],
'testedon-%d-version' % n: versions[i],
})
return data
class AppReviewerTest(mkt.site.tests.TestCase):
def setUp(self):
super(AppReviewerTest, self).setUp()
self.reviewer_user = user_factory(email='editor')
self.grant_permission(self.reviewer_user, 'Apps:Review')
self.snr_reviewer_user = user_factory(email='snrreviewer')
self.grant_permission(self.snr_reviewer_user, 'Apps:Review,Apps:Edit,'
'Apps:ReviewEscalated,Apps:ReviewPrivileged',
name='Senior App Reviewers')
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
self.regular_user = user_factory(email='regular')
self.contact_user = user_factory(email='contact')
self.login_as_editor()
def login_as_admin(self):
self.login(self.admin_user)
def login_as_editor(self):
self.login(self.reviewer_user)
def login_as_senior_reviewer(self):
self.login(self.snr_reviewer_user)
def check_actions(self, expected, elements):
for idx, item in enumerate(expected):
text, form_value = item
e = elements.eq(idx)
eq_(e.parent().text(), text)
eq_(e.attr('name'), 'action')
eq_(e.val(), form_value)
def uses_es(self):
return waffle.switch_is_active('reviewer-tools-elasticsearch')
class AccessMixin(object):
def test_403_for_non_editor(self, *args, **kwargs):
self.login('regular@mozilla.com')
eq_(self.client.head(self.url).status_code, 403)
def test_302_for_anonymous(self, *args, **kwargs):
self.client.logout()
eq_(self.client.head(self.url).status_code, 302)
class SearchMixin(object):
def test_search_query(self):
res = self.client.get(self.url, {'text_query': 'test'})
eq_(res.status_code, 200)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock)
class TestReviewersHome(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersHome, self).setUp()
self.url = reverse('reviewers.home')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Bear',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING})]
self.packaged_app = app_factory(name='Dinosaur',
status=mkt.STATUS_PUBLIC,
is_packaged=True)
version_factory(addon=self.packaged_app,
file_kw={'status': mkt.STATUS_PENDING})
app_factory(name='Elephant', disabled_by_user=True,
status=mkt.STATUS_PENDING)
escalated = app_factory(name='Eyelash Pit Viper',
status=mkt.STATUS_PENDING)
EscalationQueue.objects.create(addon=escalated)
# Add a public app under re-review.
rereviewed = app_factory(name='Finch', status=mkt.STATUS_PUBLIC)
rq = RereviewQueue.objects.create(addon=rereviewed)
rq.update(created=self.days_ago(1))
# Add an app with latest update deleted. It shouldn't affect anything.
app = app_factory(name='Great White Shark',
status=mkt.STATUS_PUBLIC,
version_kw={'version': '1.0'},
is_packaged=True)
v = version_factory(addon=app,
version='2.1',
file_kw={'status': mkt.STATUS_PENDING})
v.update(deleted=True)
def test_route_reviewer(self):
req = mkt.site.tests.req_factory_factory(
reverse('reviewers'),
user=UserProfile.objects.get(email='editor@mozilla.com'))
r = route_reviewer(req)
self.assert3xx(r, reverse('reviewers.home'))
def test_progress_pending(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(8))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['pending']['week'], 1)
eq_(counts['pending']['new'], 1)
eq_(counts['pending']['old'], 1)
eq_(counts['pending']['med'], 1)
self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333)
def test_progress_rereview(self):
rq = RereviewQueue.objects.create(addon=self.apps[0])
rq.update(created=self.days_ago(8))
rq = RereviewQueue.objects.create(addon=self.apps[1])
rq.update(created=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['rereview']['week'], 1)
eq_(counts['rereview']['new'], 1)
eq_(counts['rereview']['old'], 1)
eq_(counts['rereview']['med'], 1)
self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333)
def test_progress_updated(self):
extra_app = app_factory(name='Jackalope',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(8))
extra_app = app_factory(name='Jackrabbit',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(25))
counts, percentages = _progress()
eq_(counts['updates']['week'], 1)
eq_(counts['updates']['new'], 1)
eq_(counts['updates']['old'], 1)
eq_(counts['updates']['med'], 1)
self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333)
def test_stats_waiting(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(5))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
self.packaged_app.update(created=self.days_ago(1))
doc = pq(self.client.get(self.url).content)
anchors = doc('.editor-stats-title a')
eq_(anchors.eq(0).text(), '3 Pending App Reviews')
eq_(anchors.eq(1).text(), '1 Re-review')
eq_(anchors.eq(2).text(), '1 Update Review')
divs = doc('.editor-stats-table > div')
eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.')
eq_(divs.eq(2).text(), '1 unreviewed app submission this week.')
eq_(divs.eq(4).text(), '1 unreviewed app submission this week.')
eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%')
def test_reviewer_leaders(self):
reviewers = UserProfile.objects.all()[:2]
users = cycle(reviewers)
for app in self.apps:
mkt.log(mkt.LOG.APPROVE_VERSION, app, app.latest_version,
user=users.next(), details={'comments': 'hawt'})
doc = pq(self.client.get(self.url).content.decode('utf-8'))
table = doc('#editors-stats .editor-stats-table').eq(0)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
table = doc('#editors-stats .editor-stats-table').eq(1)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
class FlagsMixin(object):
def test_flag_packaged_app(self):
self.apps[0].update(is_packaged=True)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_packaged, True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0)
flag = td('div.sprite-reviewer-packaged-app')
eq_(flag.length, 1)
def test_flag_premium_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_premium(), True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-premium')
eq_(flags.length, 1)
def test_flag_free_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1)
def test_flag_premium_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp').length, 1)
def test_flag_info(self):
self.apps[0].latest_version.update(has_info_request=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-info')
eq_(flags.length, 1)
def test_flag_comment(self):
self.apps[0].latest_version.update(has_editor_comment=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-editor')
eq_(flags.length, 1)
class XSSMixin(object):
def test_xss_in_queue(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)('#addon-queue tbody').html()
assert '<script>' in tbody
assert '<script>' not in tbody
class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestAppQueue, self).setUp()
yesterday = self.days_ago(1)
long_ago = self.days_ago(2)
self.apps = [app_factory(name='XXX',
status=mkt.STATUS_PENDING,
version_kw={'nomination': long_ago},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='YYY',
status=mkt.STATUS_PENDING,
version_kw={'nomination': yesterday},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='ZZZ')]
self.apps[0].update(created=self.days_ago(12))
self.apps[1].update(created=self.days_ago(11))
eq_(self.apps[0].latest_version.nomination, long_ago)
eq_(self.apps[1].latest_version.nomination, yesterday)
RereviewQueue.objects.create(addon=self.apps[2])
self.url = reverse('reviewers.apps.queue_pending')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestAppQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_queue_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.queue_viewing')).status_code,
200)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = Webapp.objects.filter(
status=mkt.STATUS_PENDING).order_by('created')
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_pending(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_rejected(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_cantreview(self):
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_canreview(self):
self.login_as_senior_reviewer()
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.apps[0], device_type=1)
AddonDeviceType.objects.create(addon=self.apps[0], device_type=2)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)')
eq_(tds('ul li:not(.unavailable)').length, 2)
def test_payments(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
self.apps[1].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)')
eq_(tds.eq(0).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_PREMIUM]))
eq_(tds.eq(1).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_FREE_INAPP]))
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
if self.uses_es():
self.refresh(doctypes=('webapp', 'homescreen'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (0)')
def test_homescreen_count(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']], [self.apps[1].id])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_incomplete_no_in_queue(self):
[app.update(status=mkt.STATUS_NULL) for app in self.apps]
if self.uses_es():
self.reindex(Webapp)
req = req_factory_factory(
self.url,
user=UserProfile.objects.get(email='editor@mozilla.com'))
doc = pq(queue_apps(req).content)
assert not doc('
def test_waiting_time(self):
res = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(res.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps[0:2]]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestAppQueueES(mkt.site.tests.ESTestCase, TestAppQueue):
def setUp(self):
super(TestAppQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestRereviewQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
RereviewQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
RereviewQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
RereviewQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
if self.uses_es():
self.refresh(doctypes=('homescreen', 'webapp'))
self.url = reverse('reviewers.apps.queue_rereview')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestRereviewQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('
apps = [rq.addon for rq in
RereviewQueue.objects.all().order_by('created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.rereviewqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Approve', 'public'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
self.assertSetEqual([a.app.id for a in res.context['addons']],
[a.id for a in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (2)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(RereviewQueue.objects.filter(addon=app).exists(), False)
class TestRereviewQueueES(mkt.site.tests.ESTestCase, TestRereviewQueue):
def setUp(self):
super(TestRereviewQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
# Prevent update_cached_manifests at setUp() since it gets called and tries
# to access files when we add versions.
@mock.patch('mkt.webapps.tasks.update_cached_manifests', False)
def setUp(self):
super(TestUpdateQueue, self).setUp()
post_request_task._start_queuing_tasks()
app1 = app_factory(is_packaged=True, name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
app2 = app_factory(is_packaged=True, name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
version_factory(addon=app1, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
version_factory(addon=app2, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
post_request_task._send_tasks_and_stop_queuing()
self.apps = list(Webapp.objects.order_by('id'))
self.url = reverse('reviewers.apps.queue_updates')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestUpdateQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
self.apps[0].versions.latest().update(nomination=self.days_ago(2))
self.apps[1].versions.latest().update(nomination=self.days_ago(1))
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('
expected = [
(unicode(self.apps[0].name), self.review_url(self.apps[0])),
(unicode(self.apps[1].name), self.review_url(self.apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_public_senior_reviewer(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
def test_homescreen(self):
Tag(tag_text='homescreen').save_tag(self.apps[1])
self.apps[1].save()
if self.uses_es():
WebappIndexer.unindex(self.apps[1].id)
self.refresh(doctypes=('homescreen', 'webapp'))
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (1)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']],
[app.id for app in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Escalations (1)')
def test_order(self):
self.apps[0].update(created=self.days_ago(10))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].versions.latest().update(nomination=self.days_ago(1))
self.apps[1].versions.latest().update(nomination=self.days_ago(4))
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = list(res.context['addons'])
eq_(apps[0].app.id, self.apps[1].id)
eq_(apps[1].app.id, self.apps[0].id)
def test_only_updates_in_queue(self):
# Add new packaged app, which should only show up in the pending queue.
app = app_factory(is_packaged=True, name='ZZZ',
status=mkt.STATUS_PENDING,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING})
self.apps.append(app)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app not in apps, (
'Unexpected: Found a new packaged app in the updates queue.')
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (2)')
def test_approved_update_in_queue(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_APPROVED,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.apps.append(app)
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_update_queue_with_empty_nomination(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_NULL,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': None})
self.apps.append(app)
first_version = app.latest_version
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=None,
file_kw={'status': mkt.STATUS_PENDING})
# Now that we have a version with nomination=None, reset app status.
app.update(status=mkt.STATUS_APPROVED)
File.objects.filter(version=first_version).update(status=app.status)
# Safeguard: we /really/ want to test with nomination=None.
eq_(app.latest_version.reload().nomination, None)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_deleted_version_not_in_queue(self):
app = self.apps[0]
# File is PENDING and delete current version.
old_ver = app.versions.order_by('id')[0]
old_ver.files.latest().update(status=mkt.STATUS_PENDING)
old_ver.delete()
# "Approve" the app.
app.versions.latest().files.latest().update(status=mkt.STATUS_PUBLIC)
eq_(app.reload().status, mkt.STATUS_PUBLIC)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Verify that our app has 2 versions.
eq_(Version.with_deleted.filter(addon=app).count(), 2)
# Verify the apps in the context are what we expect.
doc = pq(res.content)
eq_(doc('.tabnav li a')[2].text, u'Updates (1)')
apps = [a.app.id for a in res.context['addons']]
ok_(app.id not in apps)
ok_(self.apps[1].id in apps)
def test_waiting_time(self):
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestUpdateQueueES(mkt.site.tests.ESTestCase, TestUpdateQueue):
def setUp(self):
super(TestUpdateQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.refresh(doctypes=('homescreen', 'webapp'))
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin,
SearchMixin, XSSMixin):
def setUp(self):
super(TestEscalationQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
EscalationQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
EscalationQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
EscalationQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
self.login_as_senior_reviewer()
self.url = reverse('reviewers.apps.queue_escalated')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestEscalationQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_flag_blocked(self):
# Blocklisted apps should only be in the update queue, so this flag
# check is here rather than in FlagsMixin.
self.apps[0].update(status=mkt.STATUS_BLOCKED)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('
flags = tds('div.sprite-reviewer-blocked')
eq_(flags.length, 1)
def test_no_access_regular_reviewer(self):
self.login_as_editor()
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('
apps = [rq.addon for rq in
EscalationQueue.objects.all().order_by('addon__created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.escalationqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('
expected = [
(u'Approve', 'public'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (3)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(EscalationQueue.objects.filter(addon=app).exists(), False)
class TestEscalationQueueES(mkt.site.tests.ESTestCase, TestEscalationQueue):
def setUp(self):
super(TestEscalationQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
class TestReviewTransaction(AttachmentManagementMixin,
mkt.site.tests.MockEsMixin,
mkt.site.tests.MockBrowserIdMixin,
test.TransactionTestCase,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewTransaction, self).setUp()
mkt.site.tests.TestCase.grant_permission(
user_factory(email='editor'), 'Apps:Review')
self.mock_browser_id()
def get_app(self):
return Webapp.objects.get(id=337141)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign(self, sign_mock, json_mock, update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
with private_storage.open(
self.version.files.all()[0].file_path, 'w') as f:
f.write('.')
public_storage.delete(self.version.files.all()[0].signed_file_path)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
update_cached_manifests.reset_mock()
sign_mock.return_value = None # Didn't fail.
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign')
def test_public_sign_failure(self, sign_mock, json_mock,
update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
sign_mock.side_effect = packaged.SigningError
json_mock.return_value = {'name': 'Something'}
self.login('editor@mozilla.com')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PENDING)
eq_(update_cached_manifests.delay.call_count, 0)
class TestReviewMixin(object):
COMM_REPLY_RE = r'^commreply\+[a-f0-9]+\@marketplace\.firefox\.com$'
def post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _check_email(self, msg, subject, to=None):
if to:
eq_(msg.to, to)
else:
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
assert re.match(self.COMM_REPLY_RE, msg.extra_headers['Reply-To'])
eq_(msg.cc, [])
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
if subject:
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
def _get_mail(self, email):
return filter(lambda x: x.to[0].startswith(email), mail.outbox)[0]
def _check_email_dev_and_contact(self, subject, outbox_len=2):
eq_(len(mail.outbox), outbox_len)
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail('contact'), subject,
to=[self.mozilla_contact])
def _check_thread(self):
thread = self.app.threads
eq_(thread.count(), 1)
thread = thread.get()
perms = ('developer', 'reviewer', 'staff')
for key in perms:
assert getattr(thread, 'read_permission_%s' % key)
def _check_email_body(self, msg=None):
if not msg:
msg = mail.outbox[0]
body = msg.message().as_string()
url = self.app.get_url_path()
assert url in body, 'Could not find apps detail URL in %s' % msg
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def _check_score(self, reviewed_type):
scores = ReviewerScore.objects.all()
assert len(scores) > 0
eq_(scores[0].score, mkt.REVIEWED_SCORES[reviewed_type])
eq_(scores[0].note_key, reviewed_type)
class TestReviewApp(SetupFilesMixin, AppReviewerTest, TestReviewMixin,
AccessMixin, AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
make_rated(self.app)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact)
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.file = self.version.all_files[0]
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.setup_files()
def get_app(self):
return Webapp.objects.get(id=337141)
def test_review_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.review_viewing')).status_code,
200)
@mock.patch('mkt.webapps.models.Webapp.in_rereview_queue')
def test_rereview(self, is_rereview_queue):
is_rereview_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('
@mock.patch('mkt.webapps.models.Webapp.in_escalation_queue')
def test_escalated(self, in_escalation_queue):
in_escalation_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('
def test_cannot_review_my_app(self):
with self.settings(ALLOW_SELF_REVIEWS=False):
self.app.addonuser_set.create(
user=UserProfile.objects.get(email='editor@mozilla.com'))
res = self.client.head(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_cannot_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
res = self.client.get(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_review_no_latest_version(self):
self.app.versions.all().delete()
self.app.reload()
eq_(self.app.latest_version, None)
eq_(self.app.current_version, None)
response = self.client.get(self.url)
eq_(response.status_code, 200)
doc = pq(response.content)
assert not doc('input[name=action][value=info]').length
assert not doc('input[name=action][value=comment]').length
assert not doc('input[name=action][value=public]').length
assert not doc('input[name=action][value=reject]').length
# Also try with a packaged app.
self.app.update(is_packaged=True)
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_sr_can_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
self.login_as_senior_reviewer()
eq_(self.client.get(self.url).status_code, 200)
data = {'action': 'public', 'comments': 'yo'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_pending'))
def test_pending_to_reject_w_device_overrides(self):
# This shouldn't be possible unless there's form hacking.
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_TABLET.id)
eq_(self.app.publish_type, mkt.PUBLISH_IMMEDIATE)
data = {'action': 'reject', 'comments': 'something',
'device_override': [mkt.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
eq_(set([o.id for o in app.device_types]),
set([mkt.DEVICE_DESKTOP.id, mkt.DEVICE_TABLET.id]))
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
def test_pending_to_public_w_requirements_overrides(self):
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
# Since features have been changed by the reviewer, the app should not
# be immediately published.
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_public_w_requirements_removed(self):
self.app.latest_version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': False}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
eq_(RereviewQueue.objects.count(), 0)
def test_pending_to_reject_w_requirements_overrides(self):
# Rejecting an app doesn't let you override features requirements.
data = {'action': 'reject', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
def test_pending_to_public_w_requirements_overrides_nothing_changed(self):
self.version.features.update(has_packaged_apps=True)
data = {'action': 'public', 'comments': 'something',
'has_packaged_apps': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_packaged_apps
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_packaged_apps
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_PUBLIC)
action_id = mkt.LOG.REVIEW_FEATURES_OVERRIDE.id
assert not AppLog.objects.filter(
addon=self.app, activity_log__action=action_id).exists()
@mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock)
def test_incomplete_cant_approve(self):
self.app.update(status=mkt.STATUS_NULL)
self.app.latest_version.files.update(status=mkt.STATUS_NULL)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_NULL)
def test_notification_email_translation(self):
raise SkipTest
original_name = unicode(self.app.name)
fr_translation = u'Mais allô quoi!'
es_translation = u'¿Dónde está la biblioteca?'
self.app.name = {
'fr': fr_translation,
'es': es_translation,
}
self.app.default_locale = 'fr'
self.app.save()
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(translation.get_language(), 'es')
eq_(len(mail.outbox), 2)
msg = mail.outbox[0]
assert original_name not in msg.subject
assert es_translation not in msg.subject
assert fr_translation in msg.subject
assert original_name not in msg.body
assert es_translation not in msg.body
assert fr_translation in msg.body
@mock.patch('lib.crypto.packaged.sign')
def test_require_sig_for_public(self, sign):
sign.side_effect = packaged.SigningError
self.get_app().update(is_packaged=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
def _test_pending_to_public(self):
self.app.update(mozilla_contact='')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], ('Approved'))
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_pending_to_public(self):
self._test_pending_to_public()
@mock.patch('mkt.reviewers.views.messages.success')
def test_pending_to_escalation(self, messages):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
eq_(messages.call_args_list[0][0][1], 'Review successfully processed.')
def test_pending_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
self._check_email_dev_and_contact('Banned')
def test_pending_to_disable(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(len(mail.outbox), 0)
def _test_escalation_to_public(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
def test_escalation_to_public(self):
self._test_escalation_to_public()
def test_escalation_to_reject(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_escalation_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Banned')
def test_escalation_to_disable(self):
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='escalated')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(EscalationQueue.objects.count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_escalation(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
EscalationQueue.objects.create(addon=self.app)
data = {'action': 'clear_escalation', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
eq_(EscalationQueue.objects.count(), 0)
self._check_log(mkt.LOG.ESCALATION_CLEARED)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_rereview_to_reject(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(RereviewQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0)
self._check_email_dev_and_contact('Banned')
def test_rereview_to_disable(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='rereview')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1)
eq_(len(mail.outbox), 0)
def test_manual_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'manual_rereview', 'comments': 'man dem'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# The app status shouldn't change.
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.count(), 1)
self._check_log(mkt.LOG.REREVIEW_MANUAL)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_clear_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_clear_rereview_unlisted(self):
self.app.update(status=mkt.STATUS_UNLISTED)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_escalation(self):
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
def test_more_information(self):
# Test the same for all queues.
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
self._check_log(mkt.LOG.REQUEST_INFORMATION)
vqs = self.get_app().versions.all()
eq_(vqs.count(), 1)
eq_(vqs.filter(has_info_request=True).count(), 1)
self._check_email_dev_and_contact('Reviewer comment')
def test_multi_cc_email(self):
# Test multiple mozilla_contact emails via more information.
contacts = [user_factory(email=u'á').email,
user_factory(email=u'ç').email]
self.mozilla_contact = ', '.join(contacts)
self.app.update(mozilla_contact=self.mozilla_contact)
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 3)
subject = 'Reviewer comment'
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail(contacts[0]), subject,
to=[contacts[0]])
self._check_email(self._get_mail(contacts[1]), subject,
to=[contacts[1]])
def test_comment(self):
# Test the same for all queues.
data = {'action': 'comment', 'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_log(mkt.LOG.COMMENT_VERSION)
def test_receipt_no_node(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('
def test_receipt_has_node(self):
self.get_app().update(premium_type=mkt.ADDON_PREMIUM)
res = self.client.get(self.url)
eq_(len(pq(res.content)('.reviewers-desktop
1)
eq_(len(pq(res.content)('.reviewers-mobile
1)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json(self, mock_get):
m = mock.Mock()
m.content = 'the manifest contents <script>'
m.headers = CaseInsensitiveDict(
{'content-type': 'application/x-web-app-manifest+json <script>'})
mock_get.return_value = m
expected = {
'content': 'the manifest contents <script>',
'headers': {'content-type':
'application/x-web-app-manifest+json <script>'},
'success': True,
'permissions': {}
}
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), expected)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_unicode(self, mock_get):
m = mock.Mock()
m.content = u'كك some foreign ish'
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'كك some foreign ish',
'headers': {}, 'success': True,
'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding(self, mock_get):
m = mock.Mock()
m.content = open(self.manifest_path('non-utf8.webapp')).read()
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert u'&t):
m = mock.Mock()
m.content = ''
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'', 'headers': {},
'success': True, 'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_traceback_in_response(self, mock_get):
m = mock.Mock()
m.content = {'name': 'Some name'}
m.headers = CaseInsensitiveDict({})
mock_get.side_effect = requests.exceptions.SSLError
mock_get.return_value = m
# We should not 500 on a traceback.
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert data['content'], 'There should be a content with the traceback'
eq_(data['headers'], {})
@mock.patch('mkt.reviewers.views.json.dumps')
def test_manifest_json_packaged(self, mock_):
# Test that when the app is packaged, _mini_manifest is called.
mock_.return_value = '{}'
self.get_app().update(is_packaged=True)
res = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
assert mock_.called
@mock.patch('mkt.reviewers.views._get_manifest_json')
def test_manifest_json_perms(self, mock_):
mock_.return_value = {
'permissions': {
"foo": {"description": "foo"},
"camera": {"description": "<script>"}
}
}
self.get_app().update(is_packaged=True)
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content)['permissions'],
{'foo': {'description': 'foo', 'type': 'web'},
'camera': {'description': '<script>', 'type': 'priv'}})
def test_abuse(self):
AbuseReport.objects.create(addon=self.app, message='!@
res = self.client.get(self.url)
doc = pq(res.content)
dd = doc('.reviewers-desktop
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
dd = doc('.reviewers-mobile
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
def _attachment_form_data(self, num=1, action='comment'):
data = {'action': action,
'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=num))
data.update(self._attachments(num))
return data
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('mkt.site.storage_utils.LocalFileStorage.save')
def test_no_attachments(self, save_mock):
data = self._attachment_form_data(num=0, action='public')
data.update(self._testedon_management_form())
self.post(data)
eq_(save_mock.called, False, save_mock.call_args_list)
def test_idn_app_domain(self):
response = self.client.get(self.url)
assert 'IDN domain!' not in response.content
self.get_app().update(app_domain=u'http://www.allïzom.org')
response = self.client.get(self.url)
assert 'IDN domain!' in response.content
def test_xss_domain(self):
# It shouldn't be possible to have this in app domain, it will never
self.get_app().update(app_domain=u'<script>alert(42)</script>')
response = self.client.get(self.url)
assert '<script>alert(42)</script>' not in response.content
assert '<script>alert(42)</script>' in response.content
def test_priority_flag_cleared_for_public(self):
self.get_app().update(priority_review=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, False)
def test_priority_flag_uncleared_for_reject(self):
self.get_app().update(priority_review=True)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, True)
def test_is_showcase_checkbox(self):
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 0)
app = self.get_app()
Tag(tag_text=SHOWCASE_TAG).save_tag(app)
res = self.client.get(self.url)
eq_(pq(res.content)('#id_is_showcase:checked').length, 1)
def test_is_showcase_on(self):
data = {'action': 'comment', 'comments': 'blah', 'is_showcase': 'on'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG in tags
msg = self._get_mail('appcurationboard')
eq_(msg.to, [settings.APP_CURATION_BOARD_EMAIL])
eq_(msg.subject,
u'App [%s] nominated to be featured' % self.get_app().name)
def test_is_showcase_off(self):
self.app.update(mozilla_contact='')
# Note: Using action=comment b/c it does less and keeps test faster.
# Note: `is_showcase` isn't passed b/c checkboxes.
data = {'action': 'comment', 'comments': 'blah'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
tags = self.get_app().tags.values_list('tag_text', flat=True)
assert SHOWCASE_TAG not in tags
eq_(len(mail.outbox), 0)
def test_versions_history_pagination(self):
self.app.update(is_packaged=True)
version_factory(addon=self.app, version='2.0')
version_factory(addon=self.app, version='3.0')
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(self.url).content)
eq_(len(content('#review-files tr.listing-body')), 2)
eq_(len(content('#review-files-paginate a[rel=next]')), 1)
eq_(len(content('#review-files-paginate a[rel=prev]')), 0)
link = content('#review-files-paginate a[rel=next]')[0].attrib['href']
eq_(link, '%s?page=2#history' % self.url)
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(link).content)
eq_(len(content('#review-files tr.listing-body')), 1)
eq_(len(content('#review-files-paginate a[rel=next]')), 0)
eq_(len(content('#review-files-paginate a[rel=prev]')), 1)
eq_(content('#review-files-paginate a[rel=prev]')[0].attrib['href'],
'%s?page=1#history' % self.url)
class TestCannedResponses(AppReviewerTest):
def setUp(self):
super(TestCannedResponses, self).setUp()
self.login_as_editor()
self.app = app_factory(name='XXX', status=mkt.STATUS_PENDING)
self.cr = CannedResponse.objects.create(
name=u'app reason', response=u'app reason body',
sort_group=u'public')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_ok(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
form = r.context['form']
choices = form.fields['canned_response'].choices[1][1]
# So above, choices[1][1] gets the first real group's list of
eq_(len(choices), 1)
assert self.cr.response in choices[0]
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApproveHostedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin, TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApproveHostedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Web App Review" successfully processed (+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_message(messages)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
def test_pending_to_reject(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages):
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'suxor'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(index_webapps.delay.call_count, 1)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedApp, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None, is_packaged=True)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Packaged App Review" successfully processed '
'(+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_rejected(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_REJECTED)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_pending_to_approved_app_private_prior_version_rejected(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_REJECTED,
publish_type=mkt.PUBLISH_PRIVATE)
self.file.update(status=mkt.STATUS_DISABLED)
self.new_version = version_factory(
addon=self.app, version='1.1',
file_kw={'status': mkt.STATUS_PENDING})
index_webapps.delay.reset_mock()
update_cached_manifests.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(self.app.current_version, None)
eq_(self.app.latest_version, self.new_version)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.latest_version, self.new_version)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedVersions(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedVersions, self).setUp()
self.mozilla_contact = 'contact@mozilla.com'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.app.update(status=mkt.STATUS_PUBLIC,
mozilla_contact=self.mozilla_contact,
is_packaged=True)
self.new_version = version_factory(
addon=self.app, version='2.0',
file_kw={'status': mkt.STATUS_PENDING})
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Updated Packaged App Review" successfully processed '
'(+40 points, 40 total).')
def test_version_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages,
sign_mock):
self.app.update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_rejected_app_public(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_PUBLIC)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(sign_mock.call_count, 0)
class TestReviewLog(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewLog, self).setUp()
# Note: if `created` is not specified, `app_factory` uses a randomly
# generated timestamp.
self.apps = [app_factory(name='XXX', created=days_ago(3),
status=mkt.STATUS_PENDING),
app_factory(name='YYY', created=days_ago(2),
status=mkt.STATUS_PENDING)]
self.url = reverse('reviewers.apps.logs')
patcher = mock.patch.object(settings, 'TASK_USER_ID',
self.admin_user.id)
patcher.start()
self.addCleanup(patcher.stop)
def get_user(self):
return self.reviewer_user
def make_approvals(self):
d = 1
for app in self.apps:
days_ago = self.days_ago(d)
mkt.log(mkt.LOG.REJECT_VERSION, app, app.latest_version,
user=self.get_user(), details={'comments': 'youwin'},
created=days_ago)
# Throw in a few tasks logs that shouldn't get queried.
mkt.log(mkt.LOG.REREVIEW_MANIFEST_CHANGE, app, app.latest_version,
user=self.admin_user, details={'comments': 'foo'},
created=days_ago)
d += 1
def make_an_approval(self, action, comment='youwin', user=None, app=None):
if not user:
user = self.get_user()
if not app:
app = self.apps[0]
mkt.log(action, app, app.latest_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('#log-filter button'), 'No filters.'
rows = doc('tbody tr')
logs = rows.filter(':not(.hide)')
eq_(logs.length, 2)
eq_(logs.find('.name .app-link').eq(0).attr('href'),
self.apps[0].get_url_path())
eq_(logs.find('.name .app-link').eq(1).attr('href'),
self.apps[1].get_url_path())
eq_(rows.filter('.hide').eq(0).text(), 'youwin')
def test_search_app_soft_deleted(self):
self.make_approvals()
self.apps[0].update(status=mkt.STATUS_DELETED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
all_reviews = [d.attrib.get('data-addonid')
for d in doc('#log-listing tbody tr')]
assert str(self.apps[0].pk) in all_reviews, (
'Soft deleted review did not show up in listing')
def test_xss(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
mkt.log(mkt.LOG.REJECT_VERSION, a, a.latest_version,
user=self.get_user(), details={'comments': 'xss!'})
r = self.client.get(self.url)
eq_(r.status_code, 200)
inner_html = pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
self.make_approvals()
date = time.strftime('%Y-%m-%d')
r = self.client.get(self.url, dict(end=date))
eq_(r.status_code, 200)
doc = pq(r.content)('#log-listing tbody')
eq_(doc('tr:not(.hide)').length, 2)
eq_(doc('tr.hide').eq(0).text(), 'youwin')
def test_end_filter_wrong(self):
self.make_approvals()
r = self.client.get(self.url, dict(end='wrong!'))
eq_(r.status_code, 200)
eq_(pq(r.content)('
def test_search_comment_exists(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='hello'))
eq_(r.status_code, 200)
eq_(pq(r.content)('
def test_search_comment_doesnt_exist(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='bye'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_author_exists(self):
self.make_approvals()
user = UserProfile.objects.get(email='regular@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user, comment='hi')
r = self.client.get(self.url, dict(search='regular'))
eq_(r.status_code, 200)
rows = pq(r.content)('
eq_(rows.filter(':not(.hide)').length, 1)
eq_(rows.filter('.hide').eq(0).text(), 'hi')
def test_search_author_doesnt_exist(self):
self.make_approvals()
user = UserProfile.objects.get(email='editor@mozilla.com')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user)
r = self.client.get(self.url, dict(search='wrong'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_addon_exists(self):
self.make_approvals()
app = self.apps[0]
r = self.client.get(self.url, dict(search=app.name))
eq_(r.status_code, 200)
tr = pq(r.content)('
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_by_slug_exists(self):
app = self.apps[0]
app.app_slug = 'a-fox-was-sly'
app.save()
self.make_approvals()
r = self.client.get(self.url, dict(search='fox'))
eq_(r.status_code, 200)
tr = pq(r.content)('
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_doesnt_exist(self):
self.make_approvals()
r = self.client.get(self.url, dict(search='zzz'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
@mock.patch('mkt.developers.models.ActivityLog.arguments', new=mock.Mock)
def test_addon_missing(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(pq(r.content)('
'App has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(mkt.LOG.REQUEST_INFORMATION)
r = self.client.get(self.url)
eq_(pq(r.content)('
'More information requested')
def test_escalate_logs(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
eq_(pq(r.content)('
'Reviewer escalation')
def test_no_double_encode(self):
version = self.apps[0].latest_version
version.update(version='<foo>')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
assert '<foo>' in pq(r.content)('
'Double-encoded string was found in reviewer log.')
class TestMotd(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestMotd, self).setUp()
self.url = reverse('reviewers.apps.motd')
self.key = u'mkt_reviewers_motd'
set_config(self.key, u'original value')
def test_perms_not_editor(self):
self.client.logout()
req = self.client.get(self.url, follow=True)
self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url))
self.client.login('regular@mozilla.com')
eq_(self.client.get(self.url).status_code, 403)
def test_perms_not_motd(self):
# Any type of reviewer can see the MOTD.
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'], None)
# No redirect means it didn't save.
eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200)
eq_(get_config(self.key), u'original value')
def test_motd_change(self):
user = self.reviewer_user
self.grant_permission(user, 'AppReviewerMOTD:Edit')
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'].initial['motd'], u'original value')
req = self.client.post(self.url, dict(motd=''))
eq_(req.status_code, 200)
eq_(pq(req.content)('
'This field is required.')
# A real post now.
req = self.client.post(self.url, dict(motd='new motd'))
self.assert3xx(req, self.url)
eq_(get_config(self.key), u'new motd')
class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin,
TestReviewMixin, TestedonManagementMixin):
def setUp(self):
super(TestReviewAppComm, self).setUp()
self.app = app_factory(rated=True, status=mkt.STATUS_PENDING,
mozilla_contact='contact@mozilla.com')
self.app.addonuser_set.create(user=user_factory(email='steamcube'))
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.mozilla_contact = 'contact@mozilla.com'
def _post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _get_note(self):
eq_(self.app.threads.count(), 1)
thread = self.app.threads.all()[0]
eq_(thread.notes.count(), 1)
return thread.notes.all()[0]
def test_email_cc(self):
poster = user_factory()
thread, note = create_comm_note(
self.app, self.app.latest_version, poster, 'lgtm')
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test emails.
self._check_email_dev_and_contact(None, outbox_len=5)
# Some person who joined the thread.
self._check_email(
self._get_mail(poster.email), 'Approved', to=[poster.email])
def test_approve(self):
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.APPROVAL)
eq_(note.body, 'gud jerb')
# Test emails.
self._check_email_dev_and_contact(None)
def test_reject(self):
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, 'rubesh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_info(self):
data = {'action': 'info', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.MORE_INFO_REQUIRED)
eq_(note.body, 'huh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_escalate(self):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.ESCALATION)
eq_(note.body, 'soup her man')
# Test emails.
eq_(len(mail.outbox), 2)
self._check_email( # Senior reviewer.
self._get_mail(self.snr_reviewer_user.email), 'Escalated',
to=[self.snr_reviewer_user.email])
self._check_email(self._get_mail('steamcube'), 'Escalated')
def test_comment(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REVIEWER_COMMENT)
eq_(note.body, 'huh')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'Private reviewer comment',
to=[self.mozilla_contact])
def test_disable(self):
self.login_as_admin()
data = {'action': 'disable', 'comments': 'u dun it'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.DISABLED)
eq_(note.body, 'u dun it')
# Test emails.
self._check_email_dev_and_contact(None)
def test_attachments(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=2))
data.update(self._attachments(num=2))
data.update(self._testedon_management_form())
self._post(data)
# Test attachments.
note = self._get_note()
eq_(note.attachments.count(), 2)
def test_tested_on_one(self):
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=1))
data.update(self._platforms(1))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34')
def test_tested_on_two(self):
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=2))
data.update(self._platforms(2))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34; '
u'FirefoxOS platform on ZT\xc8 Open with version 1.3<')
class TestModeratedQueue(mkt.site.tests.TestCase, AccessMixin):
def setUp(self):
super(TestModeratedQueue, self).setUp()
self.app = app_factory()
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
user_factory(email='regular')
user1 = user_factory()
user2 = user_factory()
self.url = reverse('reviewers.apps.queue_moderated')
self.review1 = Review.objects.create(addon=self.app, body='body',
user=user1, rating=3,
editorreview=True)
ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM,
user=user1)
self.review2 = Review.objects.create(addon=self.app, body='body',
user=user2, rating=4,
editorreview=True)
ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT,
user=user2)
self.login(self.moderator_user)
def _post(self, action):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['reviews_formset'].forms[0]))
data_formset['form-0-action'] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
ReviewFlag.objects.all()[0].update(user=None)
ReviewFlag.objects.all()[1].delete()
res = self.client.get(self.url)
txt = pq(res.content)('.reviews-flagged-reasons li div span').text()
teststring = u'Flagged by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_setup(self):
eq_(Review.objects.filter(editorreview=True).count(), 2)
eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1)
res = self.client.get(self.url)
doc = pq(res.content)('#reviews-flagged')
eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1)
def test_skip(self):
self._post(mkt.ratings.REVIEW_MODERATE_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_delete(self):
self._post(mkt.ratings.REVIEW_MODERATE_DELETE)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.DELETE_REVIEW).count(), 1)
def test_keep(self):
self._post(mkt.ratings.REVIEW_MODERATE_KEEP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.APPROVE_REVIEW).count(), 1)
def test_no_reviews(self):
Review.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#reviews-flagged .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (2)')
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.moderator_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Moderated Reviews (2)')
def test_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_queue_count_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (0)')
class AbuseQueueMixin(object):
def _setUp(self):
self.abuseviewer_user = user_factory(email='abuser')
self.grant_permission(self.abuseviewer_user, self.perm)
self.login(self.abuseviewer_user)
user_factory(email='regular')
self.url = reverse(self.view_name)
def _post(self, action, form_index=0):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['abuse_formset'].forms[0]))
data_formset['form-%s-action' % (form_index)] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
AbuseReport.objects.all()[0].update(reporter=None)
res = self.client.get(self.url)
txt = pq(res.content)('.abuse-reports-reports li div span').text()
teststring = u'Submitted by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_no_reviews(self):
AbuseReport.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (2)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_skip(self):
self._post(mkt.abuse.forms.ABUSE_REPORT_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_read(self):
# Mark read the first xxx's reports, which leaves one.
self._post(mkt.abuse.forms.ABUSE_REPORT_READ)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(self.log_const).count(), 2)
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_first_flag(self):
self._post(mkt.abuse.forms.ABUSE_REPORT_FLAG)
res = self.client.get(self.url)
# Check one is left.
eq_(len(res.context['page'].object_list), 1)
# Check the object is flagged.
eq_(RereviewQueue.objects.count(), 1)
# As flagging marks read too, there should be 2 log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_xss(self):
xss = '<script>alert("xss")</script>'
AbuseReport.objects.all()[0].update(message=xss)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)(
'
assert '<script>' in tbody
assert '<script>' not in tbody
def test_deleted_website(self):
AbuseReport.objects.all()[0].object.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (1)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
class TestAppAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Apps:ReadAbuse'
view_name = 'reviewers.apps.queue_abuse'
log_const = mkt.LOG.APP_ABUSE_MARKREAD
def setUp(self):
super(TestAppAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
app1 = app_factory()
app2 = app_factory()
app_factory()
app_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
addon=app1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
addon=app1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
addon=app2, message='the worst')
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(addon=Webapp.objects.all()[0]).count(),
2)
res = self.client.get(self.url)
# Check there are 2 apps listed.
eq_(len(res.context['page'].object_list), 2)
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.abuseviewer_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Homescreens (0)')
eq_(links[5].text, u'Abuse Reports (2)')
class TestWebsiteAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Websites:ReadAbuse'
view_name = 'reviewers.websites.queue_abuse'
log_const = mkt.LOG.WEBSITE_ABUSE_MARKREAD
def setUp(self):
super(TestWebsiteAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
website1 = website_factory()
website2 = website_factory()
# Add some extra sites, which shouldn't show up.
website_factory()
website_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
website=website1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
website=website1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
website=website2, message='the worst')
cls.website1 = website1
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(website=self.website1).count(), 2)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_flag(self):
raise SkipTest()
class TestGetSigned(BasePackagedAppTest, mkt.site.tests.TestCase):
def setUp(self):
super(TestGetSigned, self).setUp()
self.url = reverse('reviewers.signed', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_local(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
eq_(res.status_code, 200)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_storage(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
self.assert3xx(res, private_storage.url(
self.file.signed_reviewer_file_path))
@mock.patch.object(packaged, 'sign', mock_sign)
def test_reviewer(self):
if not settings.XSENDFILE:
raise SkipTest()
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0])
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_token_good(self):
if not settings.XSENDFILE:
raise SkipTest()
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestMiniManifestView(BasePackagedAppTest):
def setUp(self):
super(TestMiniManifestView, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_reviewer(self):
self.setup_files()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug, self.version.id])))
def test_rejected(self):
# Rejected sets file.status to DISABLED and moves to a guarded path.
self.setup_files()
self.app.update(status=mkt.STATUS_REJECTED)
self.file.update(status=mkt.STATUS_DISABLED)
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug,
self.version.id])))
def test_minifest_name_matches_manifest_name(self):
self.setup_files()
self.app.name = 'XXX'
self.app.save()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
def test_token_good(self):
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
ok_('token=' in data['package_path'])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestReviewersScores(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersScores, self).setUp()
self.user = self.reviewer_user
self.url = reverse('reviewers.performance', args=[self.user.email])
def test_404(self):
res = self.client.get(reverse('reviewers.performance', args=['poop']))
eq_(res.status_code, 404)
def test_with_email(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_without_email(self):
res = self.client.get(reverse('reviewers.performance'))
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_no_reviews(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert u'No review points awarded yet' in res.content
class TestQueueSort(AppReviewerTest):
def setUp(self):
super(TestQueueSort, self).setUp()
self.apps = [app_factory(name='Lillard',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE),
app_factory(name='Batum',
status=mkt.STATUS_PENDING,
is_packaged=True,
version_kw={'version': '1.0',
'has_editor_comment': True,
'has_info_request': True},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_PREMIUM)]
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].addonuser_set.create(user=user_factory(email='XXX'))
self.apps[1].addonuser_set.create(user=user_factory(email='illmatic'))
self.apps[0].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
self.apps[1].addondevicetype_set.create(
device_type=mkt.DEVICE_MOBILE.id)
self.url = reverse('reviewers.apps.queue_pending')
def test_do_sort_webapp(self):
rf = RequestFactory()
qs = Webapp.objects.all()
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
req = rf.get(self.url, {'sort': 'name', 'order': 'asc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'name', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
def test_do_sort_version_nom(self):
url = reverse('reviewers.apps.queue_pending')
user = UserProfile.objects.get(email='editor@mozilla.com')
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[0],
nomination=days_ago(10))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(1))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(20))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination', 'order': 'desc'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id))
def test_do_sort_queue_object(self):
rf = RequestFactory()
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=1)
later_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
# Assert the order that RereviewQueue objects were created is
# maintained.
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'asc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
def test_sort_with_priority_review(self):
# Set up the priority review flagged app.
self.apps.append(app_factory(name='Foxkeh',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE,
priority_review=True))
# Set up app attributes.
self.apps[2].update(created=self.days_ago(1))
self.apps[2].addonuser_set.create(
user=user_factory(email='redpanda@mozilla.com'))
self.apps[2].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
# And check it also comes out top of waiting time with Webapp model.
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Version model.
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
qs = (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__status=mkt.STATUS_PENDING)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
req = rf.get(self.url, {'sort': 'nomination'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Rereview model.
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
earlier_rrq.created += timedelta(days=1)
earlier_rrq.save()
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=2)
later_rrq.save()
pri_rrq = RereviewQueue.objects.create(addon=self.apps[2])
pri_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps))
class TestAppsReviewing(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestAppsReviewing, self).setUp()
self.url = reverse('reviewers.apps.apps_reviewing')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING),
app_factory(name='Bear',
status=mkt.STATUS_PENDING),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING)]
def _view_app(self, app_id):
self.client.post(reverse('reviewers.review_viewing'), {
'addon_id': app_id})
def test_no_apps_reviewing(self):
res = self.client.get(self.url)
eq_(len(res.context['apps']), 0)
def test_apps_reviewing(self):
self._view_app(self.apps[0].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
def test_multiple_reviewers_no_cross_streams(self):
self._view_app(self.apps[0].id)
self._view_app(self.apps[1].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
# Now view an app as another user and verify app.
self.login('admin@mozilla.com')
self._view_app(self.apps[2].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
# Check original user again to make sure app list didn't increment.
self.login_as_editor()
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
class TestLeaderboard(AppReviewerTest):
def setUp(self):
super(TestLeaderboard, self).setUp()
self.url = reverse('reviewers.leaderboard')
mkt.set_user(self.reviewer_user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=mkt.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
users = (self.reviewer_user,
self.regular_user,
user_factory(email='clouserw'))
self._award_points(users[0], mkt.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], mkt.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], mkt.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
self._award_points(users[0], 1)
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
users[0].display_name,
mkt.REVIEWED_LEVELS[0]['name']])
self._award_points(users[0], -1)
self._award_points(users[2], (mkt.REVIEWED_LEVELS[1]['points'] -
mkt.REVIEWED_LEVELS[0]['points']))
eq_(get_cells(),
[users[2].display_name,
mkt.REVIEWED_LEVELS[1]['name'],
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
class TestReviewPage(mkt.site.tests.TestCase):
def setUp(self):
super(TestReviewPage, self).setUp()
self.app = app_factory(status=mkt.STATUS_PENDING)
self.reviewer = user_factory(email='editor')
self.grant_permission(self.reviewer, 'Apps:Review')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_status_null_disable_approve_btn(self):
self.app.update(status=mkt.STATUS_NULL)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
assert (doc('#review-actions input[value=public]')
.parents('li').hasClass('disabled'))
assert not (doc('#review-actions input[value=reject]')
.parents('li').hasClass('disabled'))
class TestAbusePage(AppReviewerTest):
def setUp(self):
super(TestAbusePage, self).setUp()
self.app = app_factory(name=u'My app é <script>alert(5)</script>')
self.url = reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug])
AbuseReport.objects.create(addon=self.app, message=self.app.name)
def testXSS(self):
from django.utils.encoding import smart_unicode
from jinja2.utils import escape
content = smart_unicode(self.client.get(self.url).content)
ok_(not unicode(self.app.name) in content)
ok_(unicode(escape(self.app.name)) in content)
class TestReviewTranslate(RestOAuth):
def setUp(self):
super(TestReviewTranslate, self).setUp()
self.grant_permission(self.profile, 'Apps:ModerateReview')
self.create_switch('reviews-translate')
user = user_factory(email='diego')
app = app_factory(app_slug='myapp~-_')
self.review = app.reviews.create(title=u'yes', body=u'oui',
addon=app, user=user,
editorreview=True, rating=4)
def test_regular_call(self):
res = self.client.get(reverse('reviewers.review_translate',
args=[self.review.addon.app_slug,
self.review.id, 'fr']))
self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302)
@mock.patch('mkt.reviewers.views.requests')
def test_ajax_call(self, requests):
response = mock.Mock(status_code=200)
response.json.return_value = {
u'data': {
u'translations': [{
u'translatedText': u'oui',
u'detectedSourceLanguage': u'fr'
}]
}
}
requests.get.return_value = response
review = self.review
url = reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr'])
res = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 200)
eq_(res.content, '{"body": "oui", "title": "oui"}')
@mock.patch('mkt.reviewers.views.requests')
def test_invalid_api_key(self, requests):
response = mock.Mock(status_code=400)
response.json.return_value = {
'error': {
'code': 400,
'errors': [
{'domain': 'usageLimits',
'message': 'Bad Request',
'reason': 'keyInvalid'}
],
'message': 'Bad Request'
}
}
requests.get.return_value = response
review = self.review
res = self.client.get(
reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr']),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 400)
class TestReviewHistory(mkt.site.tests.TestCase, CommTestMixin):
def setUp(self):
super(TestReviewHistory, self).setUp()
self.app = self.addon = app_factory()
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('editor@mozilla.com')
self._thread_factory()
def test_comm_url(self):
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
def test_comm_url_multiple_thread(self):
self._thread_factory()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=2&serializer=simple')
def test_comm_url_no_encode(self):
self.addon = app_factory(app_slug='台北')
self._thread_factory()
url = reverse('reviewers.apps.review', args=[self.addon.app_slug])
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
class ModerateLogTest(mkt.site.tests.TestCase):
def setUp(self):
super(ModerateLogTest, self).setUp()
self.review = Review.objects.create(addon=app_factory(), body='body',
user=user_factory(), rating=4,
editorreview=True)
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
mkt.set_user(self.moderator_user)
self.login(self.moderator_user)
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
user_factory(email='regular')
class TestModerateLog(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLog, self).setUp()
self.url = reverse('reviewers.apps.moderatelog')
def test_log(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_start_filter(self):
r = self.client.get(self.url, dict(start='2011-01-01'))
eq_(r.status_code, 200)
def test_enddate_filter(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
created=datetime(2011, 1, 1))
r = self.client.get(self.url, dict(end='2011-01-01'))
eq_(r.status_code, 200)
eq_(pq(r.content)('tbody td').eq(0).text(), 'Jan 1, 2011, 12:00:00 AM')
def test_action_filter(self):
for i in xrange(2):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review)
r = self.client.get(self.url, dict(search='deleted'))
eq_(pq(r.content)('tbody tr').length, 2)
def test_no_results(self):
r = self.client.get(self.url, dict(end='2004-01-01'))
no_results = 'No events found for this period.'
assert no_results in r.content, 'Expected no results to be found.'
def test_display_name_xss(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
user=self.admin_user)
self.admin_user.display_name = '<script>alert("xss")</script>'
self.admin_user.save()
assert '<script>' in self.admin_user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(self.url)
pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' not in r.content
assert '<script>' in r.content
class TestModerateLogDetail(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLogDetail, self).setUp()
self.url = self._url(0)
def _url(self, id):
return reverse('reviewers.apps.moderatelog.detail', args=[id])
def test_detail_page(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
e_id = ActivityLog.objects.editor_events()[0].id
r = self.client.get(self._url(e_id))
eq_(r.status_code, 200)
def test_undelete_selfmoderation(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_admin(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
self.client.logout()
self.login(self.admin_user)
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_unauthorized(self):
e_id = mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review,
user=self.admin_user).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 403)
self.review = Review.with_deleted.get(id=self.review.id)
assert self.review.deleted, 'Review shouldn`t have been undeleted.'
| true | true |
f72bd40ce088f4120389f93fcd804f18f5fdfbf5 | 7,929 | py | Python | tests/rastrigin_accept_action.py | LiyrAstroph/CDNest | afb6b869ce1c4ebd76662b20310f1d9d3db4e26e | [
"MIT"
] | 6 | 2019-09-11T03:34:45.000Z | 2020-10-16T12:14:05.000Z | tests/rastrigin_accept_action.py | LiyrAstroph/DNest_C | afb6b869ce1c4ebd76662b20310f1d9d3db4e26e | [
"MIT"
] | 2 | 2020-05-14T10:04:48.000Z | 2021-01-06T02:04:19.000Z | tests/rastrigin_accept_action.py | LiyrAstroph/DNest_C | afb6b869ce1c4ebd76662b20310f1d9d3db4e26e | [
"MIT"
] | null | null | null | #
# sample from a Rastrigin test function
# this is to illustrate how to use accept_action in CDNest to avoid repeat calculations.
#
# A 2D Rastrigin function looks
#
# logL=-(10.0*2 + (coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
#
# Every perturb, only one parameter is updated, so that the terms related to the rest parameters
# do not need to recalculate, just use the values in the previous step.
#
# In this example, we use an array to record values of the term "(coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0]))"
# in every accepted perturb.
#
from mpi4py import MPI
import numpy as np
import cydnest
import matplotlib.pyplot as plt
from matplotlib import cm
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
def randh():
"""
generate from the heavy-tailed distribution.
"""
return 10.0**(1.5 - 3*np.abs(np.random.randn()/np.sqrt(-np.log(np.random.rand()))))*np.random.randn()
def wrap(x, a, b):
assert b > a
return (x - a)%(b - a) + a
class Model(object):
def __init__(self, num_params=1, num_particles=1):
"""
intialize the model
"""
# number of particles each core holds
self.num_particles = num_particles
# number of parameters
self.num_params = num_params
# parameter ranges, a list
self.param_range = [[-5.12, 5.12]]*num_params
# parameter prior type.
# three types: Uniform, Gaussian, Log
self.prior_type = ["Uniform"]*num_params
# parameter prior information. used when the prior is Gaussian
# indicate the mean and standard deviation of the Gaussian prior
self.prior_info = [[0.0, 1.0]]*num_params
# which parameter being perturbed
# which particle being perturbed
self.which_param_update = 0
self.which_particle_update = 0
# perturbed values and accepted values for all particles
self.value_perturb = [0.0]*self.num_particles
self.value_accept = [0.0]*self.num_particles
def accept_action(self):
"""
action taken when a perturb is accepted
record the accepted values from the perturbed values
"""
# note "which_particle_update" is updated and "which_param_update" is updated
if self.which_param_update < 1:
self.value_accept[self.which_particle_update] = self.value_perturb[self.which_particle_update]
def kill_action(self, i, i_copy):
"""
cdnest kill a particle when it is not updated for a long time.
action taken when a particle is killed: i particle is killed,
copy i_copy particle's values to i particle's values
this function is needed, since we record some accepted values
"""
self.value_accept[i] = self.value_accept[i_copy]
return
# users can define their own functions to generate
# the initial parameter values
# this is optinal. if not defined, cydnest will use the internal
# function.
def from_prior(self):
"""
generate initial values of model parameters from priors
"""
coords = np.zeros(self.num_params)
for i in range(self.num_params):
if self.prior_type[i] == "Uniform":
coords[i] = np.random.uniform(self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
coords[i] = np.random.randn() * self.prior_info[i][1] + self.prior_info[0]
wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Log": # LOG prior
coords[i] = np.random.uniform(np.log(self.param_range[i][0]), np.log(self.param_range[i][1]))
coords[i] = np.exp(coords[i])
return coords
# users can define their own functions to perturb
# parameter values for sampling
# this is optinal. if not defined, cydnest will use the internal
# function.
def perturb(self, coords):
"""
perturb the parameters
"""
i = np.random.randint(self.num_params)
# record which parameter is updated
self.which_param_update = i
LogH = 0.0 # prior ratio: ln(prior(new)/prior(old)) = ln(prior(new)) - ln(prior(old))
width = (self.param_range[i][1]-self.param_range[i][0])
if self.prior_type[i] == "Uniform":
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
LogH -= ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 ) # ln(Gaussian)
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 )
elif self.prior_type[i] == "Log":
LogH -= ( -np.log(coords[i]) ) # ln(1/x) = -ln(x)
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -np.log(coords[i]) )
return LogH
def log_likelihood_initial(self, coords):
"""
calculate likelihood at initial start
"""
self.which_particle_update = cydnest.get_which_particle_update()
self.value_accept[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
def log_likelihood(self, coords):
"""
calculate likelihood
"""
# get which particle is being updated, and save it to self model
self.which_particle_update = cydnest.get_which_particle_update()
value = 0.0
if self.which_param_update < 1: # when 0-th parameter update, recalculate
self.value_perturb[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_perturb[self.which_particle_update]
else: # otherwise, use the accepted value
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
# create a model
model = Model(num_params=2, num_particles=2)
# create a dnest sampler
# max_num_save is the number of samples to generate
# max_num_levels is the number of levels
# ptol is the likelihood tolerance in loge()
sampler = cydnest.sampler(model, sample_dir="./", max_num_saves = 10000, ptol=0.1, num_particles=model.num_particles)
#
# The full argument lists look like:
# sampler = cydnest.sampler(model, sample_dir="./", max_num_saves = 10000, ptol=0.1,
# num_particles=1, thread_steps_factor = 10,
# max_num_levels = 0, lam = 10, beta = 100
# new_level_interval_factor = 2, save_interval_factor = 2)
#
# run sampler
logz = sampler.run()
comm.Barrier()
# ouput evidence
if rank == 0:
print("Evidence:", logz)
psample = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample" + sampler.get_sample_tag() + ".txt")
psample_info = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample_info" + sampler.get_sample_tag() + ".txt")
fig = plt.figure(figsize=(15, 12))
ax = fig.add_subplot(111, projection='3d')
X = np.arange(-1.5, 1.5, 0.01)
Y = np.arange(-1.5, 1.5, 0.01)
X, Y = np.meshgrid(X, Y)
Z = -(10.0*2 + (X**2 - 10*np.cos(2.0*np.pi*X)) + (Y**2 - 10*np.cos(2.0*np.pi*Y)) )
ax.plot_surface(X, Y, Z, cmap=cm.ocean, rstride=2, cstride=2, linewidth=0, antialiased=False, zorder=0)
idx = np.where((np.abs(psample[:, 0]) <1.4) & (np.abs(psample[:, 1]) <1.4))
ax.plot(psample[idx[0], 0], psample[idx[0], 1], psample_info[idx[0]], ls='none', marker='+', zorder=10)
ax.set_xlim(-1.5, 1.5)
ax.set_ylim(-1.5, 1.5)
ax.set_xlabel(r'$\theta_1$')
ax.set_ylabel(r'$\theta_2$')
ax.set_zlabel(r'$\log L$')
fig.savefig("fig_rastrigin.jpg", bbox_inches='tight')
plt.show()
# do postprocess, plot, show the properties of sampling
cydnest.postprocess(sampler.get_sample_dir(), sampler.get_sample_tag(), temperature=1.0, doplot=True) | 37.225352 | 118 | 0.66137 |
from mpi4py import MPI
import numpy as np
import cydnest
import matplotlib.pyplot as plt
from matplotlib import cm
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
def randh():
return 10.0**(1.5 - 3*np.abs(np.random.randn()/np.sqrt(-np.log(np.random.rand()))))*np.random.randn()
def wrap(x, a, b):
assert b > a
return (x - a)%(b - a) + a
class Model(object):
def __init__(self, num_params=1, num_particles=1):
self.num_particles = num_particles
self.num_params = num_params
self.param_range = [[-5.12, 5.12]]*num_params
self.prior_type = ["Uniform"]*num_params
self.prior_info = [[0.0, 1.0]]*num_params
self.which_param_update = 0
self.which_particle_update = 0
self.value_perturb = [0.0]*self.num_particles
self.value_accept = [0.0]*self.num_particles
def accept_action(self):
if self.which_param_update < 1:
self.value_accept[self.which_particle_update] = self.value_perturb[self.which_particle_update]
def kill_action(self, i, i_copy):
self.value_accept[i] = self.value_accept[i_copy]
return
def from_prior(self):
coords = np.zeros(self.num_params)
for i in range(self.num_params):
if self.prior_type[i] == "Uniform":
coords[i] = np.random.uniform(self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
coords[i] = np.random.randn() * self.prior_info[i][1] + self.prior_info[0]
wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Log":
coords[i] = np.random.uniform(np.log(self.param_range[i][0]), np.log(self.param_range[i][1]))
coords[i] = np.exp(coords[i])
return coords
def perturb(self, coords):
i = np.random.randint(self.num_params)
self.which_param_update = i
LogH = 0.0
width = (self.param_range[i][1]-self.param_range[i][0])
if self.prior_type[i] == "Uniform":
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
elif self.prior_type[i] == "Gaussian":
LogH -= ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 )
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -0.5* (coords[i] - self.prior_info[i][0])**2/self.prior_info[i][1]**2 )
elif self.prior_type[i] == "Log":
LogH -= ( -np.log(coords[i]) )
coords[i] += width*randh()
coords[i] = wrap(coords[i], self.param_range[i][0], self.param_range[i][1])
LogH += ( -np.log(coords[i]) )
return LogH
def log_likelihood_initial(self, coords):
self.which_particle_update = cydnest.get_which_particle_update()
self.value_accept[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
def log_likelihood(self, coords):
self.which_particle_update = cydnest.get_which_particle_update()
value = 0.0
if self.which_param_update < 1:
self.value_perturb[self.which_particle_update] = coords[0]**2 - 10*np.cos(2.0*np.pi*coords[0])
value = self.value_perturb[self.which_particle_update]
else:
value = self.value_accept[self.which_particle_update]
return -(10.0*2 + (value) + (coords[1]**2 - 10*np.cos(2.0*np.pi*coords[1])) )
model = Model(num_params=2, num_particles=2)
sampler = cydnest.sampler(model, sample_dir="./", max_num_saves = 10000, ptol=0.1, num_particles=model.num_particles)
logz = sampler.run()
comm.Barrier()
if rank == 0:
print("Evidence:", logz)
psample = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample" + sampler.get_sample_tag() + ".txt")
psample_info = np.loadtxt(sampler.get_sample_dir() +"/posterior_sample_info" + sampler.get_sample_tag() + ".txt")
fig = plt.figure(figsize=(15, 12))
ax = fig.add_subplot(111, projection='3d')
X = np.arange(-1.5, 1.5, 0.01)
Y = np.arange(-1.5, 1.5, 0.01)
X, Y = np.meshgrid(X, Y)
Z = -(10.0*2 + (X**2 - 10*np.cos(2.0*np.pi*X)) + (Y**2 - 10*np.cos(2.0*np.pi*Y)) )
ax.plot_surface(X, Y, Z, cmap=cm.ocean, rstride=2, cstride=2, linewidth=0, antialiased=False, zorder=0)
idx = np.where((np.abs(psample[:, 0]) <1.4) & (np.abs(psample[:, 1]) <1.4))
ax.plot(psample[idx[0], 0], psample[idx[0], 1], psample_info[idx[0]], ls='none', marker='+', zorder=10)
ax.set_xlim(-1.5, 1.5)
ax.set_ylim(-1.5, 1.5)
ax.set_xlabel(r'$\theta_1$')
ax.set_ylabel(r'$\theta_2$')
ax.set_zlabel(r'$\log L$')
fig.savefig("fig_rastrigin.jpg", bbox_inches='tight')
plt.show()
cydnest.postprocess(sampler.get_sample_dir(), sampler.get_sample_tag(), temperature=1.0, doplot=True) | true | true |
f72bd6613a23fb41a75040d39b206992b03cd8d2 | 27,303 | py | Python | catalyst/marketplace/marketplace.py | echo-ray/catalyst | 8b4274d17f0a42ee4d1d5e09d30fb0919aea2a51 | [
"Apache-2.0"
] | null | null | null | catalyst/marketplace/marketplace.py | echo-ray/catalyst | 8b4274d17f0a42ee4d1d5e09d30fb0919aea2a51 | [
"Apache-2.0"
] | null | null | null | catalyst/marketplace/marketplace.py | echo-ray/catalyst | 8b4274d17f0a42ee4d1d5e09d30fb0919aea2a51 | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import glob
import json
import os
import re
import shutil
import sys
import time
import webbrowser
import bcolz
import logbook
import pandas as pd
import requests
from requests_toolbelt import MultipartDecoder
from requests_toolbelt.multipart.decoder import \
NonMultipartContentTypeException
from catalyst.constants import (
LOG_LEVEL, AUTH_SERVER, ETH_REMOTE_NODE, MARKETPLACE_CONTRACT,
MARKETPLACE_CONTRACT_ABI, ENIGMA_CONTRACT, ENIGMA_CONTRACT_ABI)
from catalyst.exchange.utils.stats_utils import set_print_settings
from catalyst.marketplace.marketplace_errors import (
MarketplacePubAddressEmpty, MarketplaceDatasetNotFound,
MarketplaceNoAddressMatch, MarketplaceHTTPRequest,
MarketplaceNoCSVFiles, MarketplaceRequiresPython3)
from catalyst.marketplace.utils.auth_utils import get_key_secret, \
get_signed_headers
from catalyst.marketplace.utils.bundle_utils import merge_bundles
from catalyst.marketplace.utils.eth_utils import bin_hex, from_grains, \
to_grains
from catalyst.marketplace.utils.path_utils import get_bundle_folder, \
get_data_source_folder, get_marketplace_folder, \
get_user_pubaddr, get_temp_bundles_folder, extract_bundle
from catalyst.utils.paths import ensure_directory
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as urllib
log = logbook.Logger('Marketplace', level=LOG_LEVEL)
class Marketplace:
def __init__(self):
global Web3
try:
from web3 import Web3, HTTPProvider
except ImportError:
raise MarketplaceRequiresPython3()
self.addresses = get_user_pubaddr()
if self.addresses[0]['pubAddr'] == '':
raise MarketplacePubAddressEmpty(
filename=os.path.join(
get_marketplace_folder(), 'addresses.json')
)
self.default_account = self.addresses[0]['pubAddr']
self.web3 = Web3(HTTPProvider(ETH_REMOTE_NODE))
contract_url = urllib.urlopen(MARKETPLACE_CONTRACT)
self.mkt_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(MARKETPLACE_CONTRACT_ABI)
abi = json.load(abi_url)
self.mkt_contract = self.web3.eth.contract(
self.mkt_contract_address,
abi=abi,
)
contract_url = urllib.urlopen(ENIGMA_CONTRACT)
self.eng_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(ENIGMA_CONTRACT_ABI)
abi = json.load(abi_url)
self.eng_contract = self.web3.eth.contract(
self.eng_contract_address,
abi=abi,
)
# def get_data_sources_map(self):
# return [
# dict(
# name='Marketcap',
# desc='The marketcap value in USD.',
# start_date=pd.to_datetime('2017-01-01'),
# end_date=pd.to_datetime('2018-01-15'),
# data_frequencies=['daily'],
# ),
# dict(
# name='GitHub',
# desc='The rate of development activity on GitHub.',
# start_date=pd.to_datetime('2017-01-01'),
# end_date=pd.to_datetime('2018-01-15'),
# data_frequencies=['daily', 'hour'],
# ),
# dict(
# name='Influencers',
# desc='Tweets & related sentiments by selected influencers.',
# start_date=pd.to_datetime('2017-01-01'),
# end_date=pd.to_datetime('2018-01-15'),
# data_frequencies=['daily', 'hour', 'minute'],
# ),
# ]
def to_text(self, hex):
return Web3.toText(hex).rstrip('\0')
def choose_pubaddr(self):
if len(self.addresses) == 1:
address = self.addresses[0]['pubAddr']
address_i = 0
print('Using {} for this transaction.'.format(address))
else:
while True:
for i in range(0, len(self.addresses)):
print('{}\t{}\t{}'.format(
i,
self.addresses[i]['pubAddr'],
self.addresses[i]['desc'])
)
address_i = int(input('Choose your address associated with '
'this transaction: [default: 0] ') or 0)
if not (0 <= address_i < len(self.addresses)):
print('Please choose a number between 0 and {}\n'.format(
len(self.addresses) - 1))
else:
address = Web3.toChecksumAddress(
self.addresses[address_i]['pubAddr'])
break
return address, address_i
def sign_transaction(self, tx):
url = 'https://www.myetherwallet.com/#offline-transaction'
print('\nVisit {url} and enter the following parameters:\n\n'
'From Address:\t\t{_from}\n'
'\n\tClick the "Generate Information" button\n\n'
'To Address:\t\t{to}\n'
'Value / Amount to Send:\t{value}\n'
'Gas Limit:\t\t{gas}\n'
'Gas Price:\t\t[Accept the default value]\n'
'Nonce:\t\t\t{nonce}\n'
'Data:\t\t\t{data}\n'.format(
url=url,
_from=tx['from'],
to=tx['to'],
value=tx['value'],
gas=tx['gas'],
nonce=tx['nonce'],
data=tx['data'], )
)
webbrowser.open_new(url)
signed_tx = input('Copy and Paste the "Signed Transaction" '
'field here:\n')
if signed_tx.startswith('0x'):
signed_tx = signed_tx[2:]
return signed_tx
def check_transaction(self, tx_hash):
if 'ropsten' in ETH_REMOTE_NODE:
etherscan = 'https://ropsten.etherscan.io/tx/'
elif 'rinkeby' in ETH_REMOTE_NODE:
etherscan = 'https://rinkeby.etherscan.io/tx/'
else:
etherscan = 'https://etherscan.io/tx/'
etherscan = '{}{}'.format(etherscan, tx_hash)
print('\nYou can check the outcome of your transaction here:\n'
'{}\n\n'.format(etherscan))
def _list(self):
data_sources = self.mkt_contract.functions.getAllProviders().call()
data = []
for index, data_source in enumerate(data_sources):
if index > 0:
if 'test' not in Web3.toText(data_source).lower():
data.append(
dict(
dataset=self.to_text(data_source)
)
)
return pd.DataFrame(data)
def list(self):
df = self._list()
set_print_settings()
if df.empty:
print('There are no datasets available yet.')
else:
print(df)
def subscribe(self, dataset=None):
if dataset is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'subscribe to [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
dataset = df_sets.iloc[dataset_num]['dataset']
break
dataset = dataset.lower()
address = self.choose_pubaddr()[0]
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(dataset))
return
grains = provider_info[1]
price = from_grains(grains)
subscribed = self.mkt_contract.functions.checkAddressSubscription(
address, Web3.toHex(dataset)
).call()
if subscribed[5]:
print(
'\nYou are already subscribed to the "{}" dataset.\n'
'Your subscription started on {} UTC, and is valid until '
'{} UTC.'.format(
dataset,
pd.to_datetime(subscribed[3], unit='s', utc=True),
pd.to_datetime(subscribed[4], unit='s', utc=True)
)
)
return
print('\nThe price for a monthly subscription to this dataset is'
' {} ENG'.format(price))
print(
'Checking that the ENG balance in {} is greater than {} '
'ENG... '.format(address, price), end=''
)
wallet_address = address[2:]
balance = self.web3.eth.call({
'from': address,
'to': self.eng_contract_address,
'data': '0x70a08231000000000000000000000000{}'.format(
wallet_address
)
})
try:
balance = Web3.toInt(balance) # web3 >= 4.0.0b7
except TypeError:
balance = Web3.toInt(hexstr=balance) # web3 <= 4.0.0b6
if balance > grains:
print('OK.')
else:
print('FAIL.\n\nAddress {} balance is {} ENG,\nwhich is lower '
'than the price of the dataset that you are trying to\n'
'buy: {} ENG. Get enough ENG to cover the costs of the '
'monthly\nsubscription for what you are trying to buy, '
'and try again.'.format(
address, from_grains(balance), price))
return
while True:
agree_pay = input('Please confirm that you agree to pay {} ENG '
'for a monthly subscription to the dataset "{}" '
'starting today. [default: Y] '.format(
price, dataset)) or 'y'
if agree_pay.lower() not in ('y', 'n'):
print("Please answer Y or N.")
else:
if agree_pay.lower() == 'y':
break
else:
return
print('Ready to subscribe to dataset {}.\n'.format(dataset))
print('In order to execute the subscription, you will need to sign '
'two different transactions:\n'
'1. First transaction is to authorize the Marketplace contract '
'to spend {} ENG on your behalf.\n'
'2. Second transaction is the actual subscription for the '
'desired dataset'.format(price))
tx = self.eng_contract.functions.approve(
self.mkt_contract_address,
grains,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the first transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nFirst transaction successful!\n'
'Now processing second transaction.')
tx = self.mkt_contract.functions.subscribe(
Web3.toHex(dataset),
).buildTransaction({
'from': address,
'nonce': self.web3.eth.getTransactionCount(address)})
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(bin_hex(
self.web3.eth.sendRawTransaction(signed_tx)))
print('\nThis is the TxHash for this transaction: '
'{}'.format(tx_hash))
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the second transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nSecond transaction successful!\n'
'You have successfully subscribed to dataset {} with'
'address {}.\n'
'You can now ingest this dataset anytime during the '
'next month by running the following command:\n'
'catalyst marketplace ingest --dataset={}'.format(
dataset, address, dataset))
def process_temp_bundle(self, ds_name, path):
"""
Merge the temp bundle into the main bundle for the specified
data source.
Parameters
----------
ds_name
path
Returns
-------
"""
tmp_bundle = extract_bundle(path)
bundle_folder = get_data_source_folder(ds_name)
ensure_directory(bundle_folder)
if os.listdir(bundle_folder):
zsource = bcolz.ctable(rootdir=tmp_bundle, mode='r')
ztarget = bcolz.ctable(rootdir=bundle_folder, mode='r')
merge_bundles(zsource, ztarget)
else:
os.rename(tmp_bundle, bundle_folder)
pass
def ingest(self, ds_name=None, start=None, end=None, force_download=False):
if ds_name is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'ingest [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
ds_name = df_sets.iloc[dataset_num]['dataset']
break
# ds_name = ds_name.lower()
# TODO: catch error conditions
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(ds_name)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(ds_name))
return
address, address_i = self.choose_pubaddr()
fns = self.mkt_contract.functions
check_sub = fns.checkAddressSubscription(
address, Web3.toHex(ds_name)
).call()
if check_sub[0] != address or self.to_text(check_sub[1]) != ds_name:
print('You are not subscribed to dataset "{}" with address {}. '
'Plese subscribe first.'.format(ds_name, address))
return
if not check_sub[5]:
print('Your subscription to dataset "{}" expired on {} UTC.'
'Please renew your subscription by running:\n'
'catalyst marketplace subscribe --dataset={}'.format(
ds_name,
pd.to_datetime(check_sub[4], unit='s', utc=True),
ds_name)
)
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
headers = get_signed_headers(ds_name, key, secret)
log.debug('Starting download of dataset for ingestion...')
r = requests.post(
'{}/marketplace/ingest'.format(AUTH_SERVER),
headers=headers,
stream=True,
)
if r.status_code == 200:
target_path = get_temp_bundles_folder()
try:
decoder = MultipartDecoder.from_response(r)
for part in decoder.parts:
h = part.headers[b'Content-Disposition'].decode('utf-8')
# Extracting the filename from the header
name = re.search(r'filename="(.*)"', h).group(1)
filename = os.path.join(target_path, name)
with open(filename, 'wb') as f:
# for chunk in part.content.iter_content(
# chunk_size=1024):
# if chunk: # filter out keep-alive new chunks
# f.write(chunk)
f.write(part.content)
self.process_temp_bundle(ds_name, filename)
except NonMultipartContentTypeException:
response = r.json()
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=response,
)
else:
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=r.status_code,
)
log.info('{} ingested successfully'.format(ds_name))
def get_dataset(self, ds_name, start=None, end=None):
ds_name = ds_name.lower()
# TODO: filter ctable by start and end date
bundle_folder = get_data_source_folder(ds_name)
z = bcolz.ctable(rootdir=bundle_folder, mode='r')
df = z.todataframe() # type: pd.DataFrame
df.set_index(['date', 'symbol'], drop=True, inplace=True)
# TODO: implement the filter more carefully
# if start and end is None:
# df = df.xs(start, level=0)
return df
def clean(self, ds_name=None, data_frequency=None):
if ds_name is None:
mktplace_root = get_marketplace_folder()
folders = [os.path.basename(f.rstrip('/'))
for f in glob.glob('{}/*/'.format(mktplace_root))
if 'temp_bundles' not in f]
while True:
for idx, f in enumerate(folders):
print('{}\t{}'.format(idx, f))
dataset_num = input('Choose the dataset you want to '
'clean [0..{}]: '.format(
len(folders) - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
if dataset_num not in range(0, len(folders)):
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
ds_name = folders[dataset_num]
break
ds_name = ds_name.lower()
if data_frequency is None:
folder = get_data_source_folder(ds_name)
else:
folder = get_bundle_folder(ds_name, data_frequency)
shutil.rmtree(folder)
pass
def create_metadata(self, key, secret, ds_name, data_frequency, desc,
has_history=True, has_live=True):
"""
Returns
-------
"""
headers = get_signed_headers(ds_name, key, secret)
r = requests.post(
'{}/marketplace/register'.format(AUTH_SERVER),
json=dict(
ds_name=ds_name,
desc=desc,
data_frequency=data_frequency,
has_history=has_history,
has_live=has_live,
),
headers=headers,
)
if r.status_code != 200:
raise MarketplaceHTTPRequest(
request='register', error=r.status_code
)
if 'error' in r.json():
raise MarketplaceHTTPRequest(
request='upload file', error=r.json()['error']
)
def register(self):
while True:
desc = input('Enter the name of the dataset to register: ')
dataset = desc.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if provider_info[4]:
print('There is already a dataset registered under '
'the name "{}". Please choose a different '
'name.'.format(dataset))
else:
break
price = int(
input(
'Enter the price for a monthly subscription to '
'this dataset in ENG: '
)
)
while True:
freq = input('Enter the data frequency [daily, hourly, minute]: ')
if freq.lower() not in ('daily', 'hourly', 'minute'):
print('Not a valid frequency.')
else:
break
while True:
reg_pub = input(
'Does it include historical data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_history = True
else:
has_history = False
break
while True:
reg_pub = input(
'Doest it include live data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_live = True
else:
has_live = False
break
address, address_i = self.choose_pubaddr()
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
grains = to_grains(price)
tx = self.mkt_contract.functions.register(
Web3.toHex(dataset),
grains,
address,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to register the requested dataset: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nWarming up the {} dataset'.format(dataset))
self.create_metadata(
key=key,
secret=secret,
ds_name=dataset,
data_frequency=freq,
desc=desc,
has_history=has_history,
has_live=has_live,
)
print('\n{} registered successfully'.format(dataset))
def publish(self, dataset, datadir, watch):
dataset = dataset.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
raise MarketplaceDatasetNotFound(dataset=dataset)
match = next(
(l for l in self.addresses if l['pubAddr'] == provider_info[0]),
None
)
if not match:
raise MarketplaceNoAddressMatch(
dataset=dataset,
address=provider_info[0])
print('Using address: {} to publish this dataset.'.format(
provider_info[0]))
if 'key' in match:
key = match['key']
secret = match['secret']
else:
key, secret = get_key_secret(provider_info[0])
headers = get_signed_headers(dataset, key, secret)
filenames = glob.glob(os.path.join(datadir, '*.csv'))
if not filenames:
raise MarketplaceNoCSVFiles(datadir=datadir)
files = []
for file in filenames:
files.append(('file', open(file, 'rb')))
r = requests.post('{}/marketplace/publish'.format(AUTH_SERVER),
files=files,
headers=headers)
if r.status_code != 200:
raise MarketplaceHTTPRequest(request='upload file',
error=r.status_code)
if 'error' in r.json():
raise MarketplaceHTTPRequest(request='upload file',
error=r.json()['error'])
print('Dataset {} uploaded successfully.'.format(dataset))
| 34.430013 | 79 | 0.517892 | from __future__ import print_function
import glob
import json
import os
import re
import shutil
import sys
import time
import webbrowser
import bcolz
import logbook
import pandas as pd
import requests
from requests_toolbelt import MultipartDecoder
from requests_toolbelt.multipart.decoder import \
NonMultipartContentTypeException
from catalyst.constants import (
LOG_LEVEL, AUTH_SERVER, ETH_REMOTE_NODE, MARKETPLACE_CONTRACT,
MARKETPLACE_CONTRACT_ABI, ENIGMA_CONTRACT, ENIGMA_CONTRACT_ABI)
from catalyst.exchange.utils.stats_utils import set_print_settings
from catalyst.marketplace.marketplace_errors import (
MarketplacePubAddressEmpty, MarketplaceDatasetNotFound,
MarketplaceNoAddressMatch, MarketplaceHTTPRequest,
MarketplaceNoCSVFiles, MarketplaceRequiresPython3)
from catalyst.marketplace.utils.auth_utils import get_key_secret, \
get_signed_headers
from catalyst.marketplace.utils.bundle_utils import merge_bundles
from catalyst.marketplace.utils.eth_utils import bin_hex, from_grains, \
to_grains
from catalyst.marketplace.utils.path_utils import get_bundle_folder, \
get_data_source_folder, get_marketplace_folder, \
get_user_pubaddr, get_temp_bundles_folder, extract_bundle
from catalyst.utils.paths import ensure_directory
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as urllib
log = logbook.Logger('Marketplace', level=LOG_LEVEL)
class Marketplace:
def __init__(self):
global Web3
try:
from web3 import Web3, HTTPProvider
except ImportError:
raise MarketplaceRequiresPython3()
self.addresses = get_user_pubaddr()
if self.addresses[0]['pubAddr'] == '':
raise MarketplacePubAddressEmpty(
filename=os.path.join(
get_marketplace_folder(), 'addresses.json')
)
self.default_account = self.addresses[0]['pubAddr']
self.web3 = Web3(HTTPProvider(ETH_REMOTE_NODE))
contract_url = urllib.urlopen(MARKETPLACE_CONTRACT)
self.mkt_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(MARKETPLACE_CONTRACT_ABI)
abi = json.load(abi_url)
self.mkt_contract = self.web3.eth.contract(
self.mkt_contract_address,
abi=abi,
)
contract_url = urllib.urlopen(ENIGMA_CONTRACT)
self.eng_contract_address = Web3.toChecksumAddress(
contract_url.readline().decode(
contract_url.info().get_content_charset()).strip())
abi_url = urllib.urlopen(ENIGMA_CONTRACT_ABI)
abi = json.load(abi_url)
self.eng_contract = self.web3.eth.contract(
self.eng_contract_address,
abi=abi,
)
def to_text(self, hex):
return Web3.toText(hex).rstrip('\0')
def choose_pubaddr(self):
if len(self.addresses) == 1:
address = self.addresses[0]['pubAddr']
address_i = 0
print('Using {} for this transaction.'.format(address))
else:
while True:
for i in range(0, len(self.addresses)):
print('{}\t{}\t{}'.format(
i,
self.addresses[i]['pubAddr'],
self.addresses[i]['desc'])
)
address_i = int(input('Choose your address associated with '
'this transaction: [default: 0] ') or 0)
if not (0 <= address_i < len(self.addresses)):
print('Please choose a number between 0 and {}\n'.format(
len(self.addresses) - 1))
else:
address = Web3.toChecksumAddress(
self.addresses[address_i]['pubAddr'])
break
return address, address_i
def sign_transaction(self, tx):
url = 'https://www.myetherwallet.com/#offline-transaction'
print('\nVisit {url} and enter the following parameters:\n\n'
'From Address:\t\t{_from}\n'
'\n\tClick the "Generate Information" button\n\n'
'To Address:\t\t{to}\n'
'Value / Amount to Send:\t{value}\n'
'Gas Limit:\t\t{gas}\n'
'Gas Price:\t\t[Accept the default value]\n'
'Nonce:\t\t\t{nonce}\n'
'Data:\t\t\t{data}\n'.format(
url=url,
_from=tx['from'],
to=tx['to'],
value=tx['value'],
gas=tx['gas'],
nonce=tx['nonce'],
data=tx['data'], )
)
webbrowser.open_new(url)
signed_tx = input('Copy and Paste the "Signed Transaction" '
'field here:\n')
if signed_tx.startswith('0x'):
signed_tx = signed_tx[2:]
return signed_tx
def check_transaction(self, tx_hash):
if 'ropsten' in ETH_REMOTE_NODE:
etherscan = 'https://ropsten.etherscan.io/tx/'
elif 'rinkeby' in ETH_REMOTE_NODE:
etherscan = 'https://rinkeby.etherscan.io/tx/'
else:
etherscan = 'https://etherscan.io/tx/'
etherscan = '{}{}'.format(etherscan, tx_hash)
print('\nYou can check the outcome of your transaction here:\n'
'{}\n\n'.format(etherscan))
def _list(self):
data_sources = self.mkt_contract.functions.getAllProviders().call()
data = []
for index, data_source in enumerate(data_sources):
if index > 0:
if 'test' not in Web3.toText(data_source).lower():
data.append(
dict(
dataset=self.to_text(data_source)
)
)
return pd.DataFrame(data)
def list(self):
df = self._list()
set_print_settings()
if df.empty:
print('There are no datasets available yet.')
else:
print(df)
def subscribe(self, dataset=None):
if dataset is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'subscribe to [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
dataset = df_sets.iloc[dataset_num]['dataset']
break
dataset = dataset.lower()
address = self.choose_pubaddr()[0]
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(dataset))
return
grains = provider_info[1]
price = from_grains(grains)
subscribed = self.mkt_contract.functions.checkAddressSubscription(
address, Web3.toHex(dataset)
).call()
if subscribed[5]:
print(
'\nYou are already subscribed to the "{}" dataset.\n'
'Your subscription started on {} UTC, and is valid until '
'{} UTC.'.format(
dataset,
pd.to_datetime(subscribed[3], unit='s', utc=True),
pd.to_datetime(subscribed[4], unit='s', utc=True)
)
)
return
print('\nThe price for a monthly subscription to this dataset is'
' {} ENG'.format(price))
print(
'Checking that the ENG balance in {} is greater than {} '
'ENG... '.format(address, price), end=''
)
wallet_address = address[2:]
balance = self.web3.eth.call({
'from': address,
'to': self.eng_contract_address,
'data': '0x70a08231000000000000000000000000{}'.format(
wallet_address
)
})
try:
balance = Web3.toInt(balance)
except TypeError:
balance = Web3.toInt(hexstr=balance)
if balance > grains:
print('OK.')
else:
print('FAIL.\n\nAddress {} balance is {} ENG,\nwhich is lower '
'than the price of the dataset that you are trying to\n'
'buy: {} ENG. Get enough ENG to cover the costs of the '
'monthly\nsubscription for what you are trying to buy, '
'and try again.'.format(
address, from_grains(balance), price))
return
while True:
agree_pay = input('Please confirm that you agree to pay {} ENG '
'for a monthly subscription to the dataset "{}" '
'starting today. [default: Y] '.format(
price, dataset)) or 'y'
if agree_pay.lower() not in ('y', 'n'):
print("Please answer Y or N.")
else:
if agree_pay.lower() == 'y':
break
else:
return
print('Ready to subscribe to dataset {}.\n'.format(dataset))
print('In order to execute the subscription, you will need to sign '
'two different transactions:\n'
'1. First transaction is to authorize the Marketplace contract '
'to spend {} ENG on your behalf.\n'
'2. Second transaction is the actual subscription for the '
'desired dataset'.format(price))
tx = self.eng_contract.functions.approve(
self.mkt_contract_address,
grains,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the first transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nFirst transaction successful!\n'
'Now processing second transaction.')
tx = self.mkt_contract.functions.subscribe(
Web3.toHex(dataset),
).buildTransaction({
'from': address,
'nonce': self.web3.eth.getTransactionCount(address)})
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(bin_hex(
self.web3.eth.sendRawTransaction(signed_tx)))
print('\nThis is the TxHash for this transaction: '
'{}'.format(tx_hash))
except Exception as e:
print('Unable to subscribe to data source: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the second transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nSecond transaction successful!\n'
'You have successfully subscribed to dataset {} with'
'address {}.\n'
'You can now ingest this dataset anytime during the '
'next month by running the following command:\n'
'catalyst marketplace ingest --dataset={}'.format(
dataset, address, dataset))
def process_temp_bundle(self, ds_name, path):
tmp_bundle = extract_bundle(path)
bundle_folder = get_data_source_folder(ds_name)
ensure_directory(bundle_folder)
if os.listdir(bundle_folder):
zsource = bcolz.ctable(rootdir=tmp_bundle, mode='r')
ztarget = bcolz.ctable(rootdir=bundle_folder, mode='r')
merge_bundles(zsource, ztarget)
else:
os.rename(tmp_bundle, bundle_folder)
pass
def ingest(self, ds_name=None, start=None, end=None, force_download=False):
if ds_name is None:
df_sets = self._list()
if df_sets.empty:
print('There are no datasets available yet.')
return
set_print_settings()
while True:
print(df_sets)
dataset_num = input('Choose the dataset you want to '
'ingest [0..{}]: '.format(
df_sets.size - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
if dataset_num not in range(0, df_sets.size):
print('Enter a number between 0 and {}'.format(
df_sets.size - 1))
else:
ds_name = df_sets.iloc[dataset_num]['dataset']
break
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(ds_name)
).call()
if not provider_info[4]:
print('The requested "{}" dataset is not registered in '
'the Data Marketplace.'.format(ds_name))
return
address, address_i = self.choose_pubaddr()
fns = self.mkt_contract.functions
check_sub = fns.checkAddressSubscription(
address, Web3.toHex(ds_name)
).call()
if check_sub[0] != address or self.to_text(check_sub[1]) != ds_name:
print('You are not subscribed to dataset "{}" with address {}. '
'Plese subscribe first.'.format(ds_name, address))
return
if not check_sub[5]:
print('Your subscription to dataset "{}" expired on {} UTC.'
'Please renew your subscription by running:\n'
'catalyst marketplace subscribe --dataset={}'.format(
ds_name,
pd.to_datetime(check_sub[4], unit='s', utc=True),
ds_name)
)
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
headers = get_signed_headers(ds_name, key, secret)
log.debug('Starting download of dataset for ingestion...')
r = requests.post(
'{}/marketplace/ingest'.format(AUTH_SERVER),
headers=headers,
stream=True,
)
if r.status_code == 200:
target_path = get_temp_bundles_folder()
try:
decoder = MultipartDecoder.from_response(r)
for part in decoder.parts:
h = part.headers[b'Content-Disposition'].decode('utf-8')
name = re.search(r'filename="(.*)"', h).group(1)
filename = os.path.join(target_path, name)
with open(filename, 'wb') as f:
f.write(part.content)
self.process_temp_bundle(ds_name, filename)
except NonMultipartContentTypeException:
response = r.json()
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=response,
)
else:
raise MarketplaceHTTPRequest(
request='ingest dataset',
error=r.status_code,
)
log.info('{} ingested successfully'.format(ds_name))
def get_dataset(self, ds_name, start=None, end=None):
ds_name = ds_name.lower()
bundle_folder = get_data_source_folder(ds_name)
z = bcolz.ctable(rootdir=bundle_folder, mode='r')
df = z.todataframe()
df.set_index(['date', 'symbol'], drop=True, inplace=True)
return df
def clean(self, ds_name=None, data_frequency=None):
if ds_name is None:
mktplace_root = get_marketplace_folder()
folders = [os.path.basename(f.rstrip('/'))
for f in glob.glob('{}/*/'.format(mktplace_root))
if 'temp_bundles' not in f]
while True:
for idx, f in enumerate(folders):
print('{}\t{}'.format(idx, f))
dataset_num = input('Choose the dataset you want to '
'clean [0..{}]: '.format(
len(folders) - 1))
try:
dataset_num = int(dataset_num)
except ValueError:
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
if dataset_num not in range(0, len(folders)):
print('Enter a number between 0 and {}'.format(
len(folders) - 1))
else:
ds_name = folders[dataset_num]
break
ds_name = ds_name.lower()
if data_frequency is None:
folder = get_data_source_folder(ds_name)
else:
folder = get_bundle_folder(ds_name, data_frequency)
shutil.rmtree(folder)
pass
def create_metadata(self, key, secret, ds_name, data_frequency, desc,
has_history=True, has_live=True):
headers = get_signed_headers(ds_name, key, secret)
r = requests.post(
'{}/marketplace/register'.format(AUTH_SERVER),
json=dict(
ds_name=ds_name,
desc=desc,
data_frequency=data_frequency,
has_history=has_history,
has_live=has_live,
),
headers=headers,
)
if r.status_code != 200:
raise MarketplaceHTTPRequest(
request='register', error=r.status_code
)
if 'error' in r.json():
raise MarketplaceHTTPRequest(
request='upload file', error=r.json()['error']
)
def register(self):
while True:
desc = input('Enter the name of the dataset to register: ')
dataset = desc.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if provider_info[4]:
print('There is already a dataset registered under '
'the name "{}". Please choose a different '
'name.'.format(dataset))
else:
break
price = int(
input(
'Enter the price for a monthly subscription to '
'this dataset in ENG: '
)
)
while True:
freq = input('Enter the data frequency [daily, hourly, minute]: ')
if freq.lower() not in ('daily', 'hourly', 'minute'):
print('Not a valid frequency.')
else:
break
while True:
reg_pub = input(
'Does it include historical data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_history = True
else:
has_history = False
break
while True:
reg_pub = input(
'Doest it include live data? [default: Y]: '
) or 'y'
if reg_pub.lower() not in ('y', 'n'):
print('Please answer Y or N.')
else:
if reg_pub.lower() == 'y':
has_live = True
else:
has_live = False
break
address, address_i = self.choose_pubaddr()
if 'key' in self.addresses[address_i]:
key = self.addresses[address_i]['key']
secret = self.addresses[address_i]['secret']
else:
key, secret = get_key_secret(address)
grains = to_grains(price)
tx = self.mkt_contract.functions.register(
Web3.toHex(dataset),
grains,
address,
).buildTransaction(
{'from': address,
'nonce': self.web3.eth.getTransactionCount(address)}
)
signed_tx = self.sign_transaction(tx)
try:
tx_hash = '0x{}'.format(
bin_hex(self.web3.eth.sendRawTransaction(signed_tx))
)
print(
'\nThis is the TxHash for this transaction: {}'.format(tx_hash)
)
except Exception as e:
print('Unable to register the requested dataset: {}'.format(e))
return
self.check_transaction(tx_hash)
print('Waiting for the transaction to succeed...')
while True:
try:
if self.web3.eth.getTransactionReceipt(tx_hash).status:
break
else:
print('\nTransaction failed. Aborting...')
return
except AttributeError:
pass
for i in range(0, 10):
print('.', end='', flush=True)
time.sleep(1)
print('\nWarming up the {} dataset'.format(dataset))
self.create_metadata(
key=key,
secret=secret,
ds_name=dataset,
data_frequency=freq,
desc=desc,
has_history=has_history,
has_live=has_live,
)
print('\n{} registered successfully'.format(dataset))
def publish(self, dataset, datadir, watch):
dataset = dataset.lower()
provider_info = self.mkt_contract.functions.getDataProviderInfo(
Web3.toHex(dataset)
).call()
if not provider_info[4]:
raise MarketplaceDatasetNotFound(dataset=dataset)
match = next(
(l for l in self.addresses if l['pubAddr'] == provider_info[0]),
None
)
if not match:
raise MarketplaceNoAddressMatch(
dataset=dataset,
address=provider_info[0])
print('Using address: {} to publish this dataset.'.format(
provider_info[0]))
if 'key' in match:
key = match['key']
secret = match['secret']
else:
key, secret = get_key_secret(provider_info[0])
headers = get_signed_headers(dataset, key, secret)
filenames = glob.glob(os.path.join(datadir, '*.csv'))
if not filenames:
raise MarketplaceNoCSVFiles(datadir=datadir)
files = []
for file in filenames:
files.append(('file', open(file, 'rb')))
r = requests.post('{}/marketplace/publish'.format(AUTH_SERVER),
files=files,
headers=headers)
if r.status_code != 200:
raise MarketplaceHTTPRequest(request='upload file',
error=r.status_code)
if 'error' in r.json():
raise MarketplaceHTTPRequest(request='upload file',
error=r.json()['error'])
print('Dataset {} uploaded successfully.'.format(dataset))
| true | true |
f72bd87c0f6308b1597913764a10b8ad4d63132c | 2,172 | py | Python | coevolution_transformer/model/msa_embeddings.py | microsoft/Protein-Folding | f534b2dd1e3f192fbcdadf234f25828c7f458a58 | [
"MIT"
] | 1 | 2021-11-12T03:00:28.000Z | 2021-11-12T03:00:28.000Z | coevolution_transformer/model/msa_embeddings.py | microsoft/Protein-Folding | f534b2dd1e3f192fbcdadf234f25828c7f458a58 | [
"MIT"
] | 1 | 2022-01-11T17:09:26.000Z | 2022-01-11T17:09:26.000Z | coevolution_transformer/model/msa_embeddings.py | microsoft/Protein-Folding | f534b2dd1e3f192fbcdadf234f25828c7f458a58 | [
"MIT"
] | 2 | 2021-11-10T11:42:11.000Z | 2021-12-09T05:37:09.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import math
import torch
from torch import nn
import torch.nn.functional as F
class PositionalEncoding(nn.Module):
def __init__(self, d_model, max_len=1 << 13):
super(PositionalEncoding, self).__init__()
self.ninp = d_model
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term) # (L, C)
self.register_buffer("pe", pe)
def forward(self, idx):
"""
idx: (B, L)
return: (B, L, C)
"""
return self.pe[idx]
class MSAEmbeddings(nn.Module):
def __init__(self, msa_gap, embed_dim, dropout):
super(MSAEmbeddings, self).__init__()
self.embed_dim = embed_dim
self.onehot = nn.Embedding(24, 24)
self.onehot.weight.data = torch.eye(24)
self.onehot.weight.requires_grad = False
self.msa_embeddings = nn.Linear((msa_gap * 2 + 2) * 24 + 2, embed_dim)
self.position_embeddings = PositionalEncoding(embed_dim)
self.layer_norm = nn.LayerNorm(embed_dim)
self.dropout = nn.Dropout(dropout)
def forward(self, seq_ids, msa_ids, position_ids):
"""
seq_ids: (B, L)
msa_ids: (B, K, *, L)
position_ids: (B, L)
return: (B, K, L, C)
"""
B, K, _, L = msa_ids.shape
seq = self.onehot(seq_ids)
msa_ids = msa_ids.transpose(-2, -1)
boundary = msa_ids[..., -2:].float()
msa = self.onehot(msa_ids[..., :-2]).reshape(B, K, L, -1)
msa = torch.cat([seq[:, None].repeat(1, msa.shape[1], 1, 1), msa, boundary], dim=-1)
msa_emb = self.msa_embeddings(msa)
pos_emb = self.position_embeddings(position_ids)
embeddings = msa_emb * math.sqrt(self.embed_dim) + pos_emb[:, None]
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
| 36.2 | 98 | 0.603131 |
import math
import torch
from torch import nn
import torch.nn.functional as F
class PositionalEncoding(nn.Module):
def __init__(self, d_model, max_len=1 << 13):
super(PositionalEncoding, self).__init__()
self.ninp = d_model
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
self.register_buffer("pe", pe)
def forward(self, idx):
return self.pe[idx]
class MSAEmbeddings(nn.Module):
def __init__(self, msa_gap, embed_dim, dropout):
super(MSAEmbeddings, self).__init__()
self.embed_dim = embed_dim
self.onehot = nn.Embedding(24, 24)
self.onehot.weight.data = torch.eye(24)
self.onehot.weight.requires_grad = False
self.msa_embeddings = nn.Linear((msa_gap * 2 + 2) * 24 + 2, embed_dim)
self.position_embeddings = PositionalEncoding(embed_dim)
self.layer_norm = nn.LayerNorm(embed_dim)
self.dropout = nn.Dropout(dropout)
def forward(self, seq_ids, msa_ids, position_ids):
B, K, _, L = msa_ids.shape
seq = self.onehot(seq_ids)
msa_ids = msa_ids.transpose(-2, -1)
boundary = msa_ids[..., -2:].float()
msa = self.onehot(msa_ids[..., :-2]).reshape(B, K, L, -1)
msa = torch.cat([seq[:, None].repeat(1, msa.shape[1], 1, 1), msa, boundary], dim=-1)
msa_emb = self.msa_embeddings(msa)
pos_emb = self.position_embeddings(position_ids)
embeddings = msa_emb * math.sqrt(self.embed_dim) + pos_emb[:, None]
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
| true | true |
f72bd8ae65fb7e4bb2f7694fe3f07aad90af90c7 | 5,513 | py | Python | pywavefront/material.py | elgrandt/ShooterInc | 0552e563d9ea81b0e0a0c4be1648412aa52c56e6 | [
"MIT"
] | null | null | null | pywavefront/material.py | elgrandt/ShooterInc | 0552e563d9ea81b0e0a0c4be1648412aa52c56e6 | [
"MIT"
] | null | null | null | pywavefront/material.py | elgrandt/ShooterInc | 0552e563d9ea81b0e0a0c4be1648412aa52c56e6 | [
"MIT"
] | null | null | null | # ----------------------------------------------------------------------------
# PyWavefront
# Copyright (c) 2013 Kurt Yoder
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of PyWavefront nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from OpenGL.GL import *
import pywavefront.parser as parser
import pywavefront.texture as texture
class Material(object):
def __init__(self, name):
self.name = name
self.diffuse = [.8, .8, .8, 1.]
self.ambient = [.2, .2, .2, 1.]
self.specular = [0., 0., 0., 1.]
self.emissive = [0., 0., 0., 1.]
self.shininess = 0.
self.texture = None
# Interleaved array of floats in GL_T2F_N3F_V3F format
self.vertices = []
self.gl_floats = None
def pad_light(self, values):
"""Accept an array of up to 4 values, and return an array of 4 values.
If the input array is less than length 4, pad it with zeroes until it
is length 4. Also ensure each value is a float"""
while len(values) < 4:
values.append(0.)
return list(map(float, values))
def set_alpha(self, alpha):
"""Set alpha/last value on all four lighting attributes."""
alpha = float(alpha)
self.diffuse[3] = alpha
self.ambient[3] = alpha
self.specular[3] = alpha
self.emissive[3] = alpha
def set_diffuse(self, values=[]):
self.diffuse = self.pad_light(values)
def set_ambient(self, values=[]):
self.ambient = self.pad_light(values)
def set_specular(self, values=[]):
self.specular = self.pad_light(values)
def set_emissive(self, values=[]):
self.emissive = self.pad_light(values)
def set_texture(self, path):
self.texture = texture.Texture(path)
def unset_texture(self):
self.texture = None
def gl_light(self, lighting):
"""Return a GLfloat with length 4, containing the 4 lighting values."""
return (GLfloat * 4)(*(lighting))
def draw(self, face=GL_FRONT_AND_BACK):
glEnable(GL_TEXTURE_2D)
glColor4f(1,1,1,1)
glMaterialfv(face, GL_DIFFUSE, self.gl_light(self.diffuse) )
glMaterialfv(face, GL_AMBIENT, self.gl_light(self.ambient) )
glMaterialfv(face, GL_SPECULAR, self.gl_light(self.specular) )
glMaterialfv(face, GL_EMISSION, self.gl_light(self.emissive) )
glMaterialf(face, GL_SHININESS, self.shininess)
if self.texture:
self.texture.draw()
if self.gl_floats is None:
self.gl_floats = (GLfloat * len(self.vertices))(*self.vertices)
self.triangle_count = len(self.vertices) / 8
glInterleavedArrays(GL_T2F_N3F_V3F, 0, self.gl_floats)
glDrawArrays(GL_TRIANGLES, 0, int(self.triangle_count))
glDisable(GL_TEXTURE_2D)
class MaterialParser(parser.Parser):
"""Object to parse lines of a materials definition file."""
def __init__(self, file_path, path):
self.materials = {}
self.path = path
self.this_material = None
self.read_file(file_path)
def parse_newmtl(self, args):
[newmtl] = args
self.this_material = Material(newmtl)
self.materials[self.this_material.name] = self.this_material
def parse_Kd(self, args):
self.this_material.set_diffuse(args)
def parse_Ka(self, args):
self.this_material.set_ambient(args)
def parse_Ks(self, args):
self.this_material.set_specular(args)
def parse_Ke(self, args):
self.this_material.set_emissive(args)
def parse_Ns(self, args):
[Ns] = args
self.this_material.shininess = float(Ns)
def parse_d(self, args):
[d] = args
self.this_material.set_alpha(d)
def parse_map_Kd(self, args):
[Kd] = args
self.this_material.set_texture(self.path + "textures/" + Kd)
def parse_Ni(self, args):
# unimplemented
return
def parse_illum(self, args):
# unimplemented
return
| 35.339744 | 79 | 0.649737 |
from OpenGL.GL import *
import pywavefront.parser as parser
import pywavefront.texture as texture
class Material(object):
def __init__(self, name):
self.name = name
self.diffuse = [.8, .8, .8, 1.]
self.ambient = [.2, .2, .2, 1.]
self.specular = [0., 0., 0., 1.]
self.emissive = [0., 0., 0., 1.]
self.shininess = 0.
self.texture = None
self.vertices = []
self.gl_floats = None
def pad_light(self, values):
while len(values) < 4:
values.append(0.)
return list(map(float, values))
def set_alpha(self, alpha):
alpha = float(alpha)
self.diffuse[3] = alpha
self.ambient[3] = alpha
self.specular[3] = alpha
self.emissive[3] = alpha
def set_diffuse(self, values=[]):
self.diffuse = self.pad_light(values)
def set_ambient(self, values=[]):
self.ambient = self.pad_light(values)
def set_specular(self, values=[]):
self.specular = self.pad_light(values)
def set_emissive(self, values=[]):
self.emissive = self.pad_light(values)
def set_texture(self, path):
self.texture = texture.Texture(path)
def unset_texture(self):
self.texture = None
def gl_light(self, lighting):
return (GLfloat * 4)(*(lighting))
def draw(self, face=GL_FRONT_AND_BACK):
glEnable(GL_TEXTURE_2D)
glColor4f(1,1,1,1)
glMaterialfv(face, GL_DIFFUSE, self.gl_light(self.diffuse) )
glMaterialfv(face, GL_AMBIENT, self.gl_light(self.ambient) )
glMaterialfv(face, GL_SPECULAR, self.gl_light(self.specular) )
glMaterialfv(face, GL_EMISSION, self.gl_light(self.emissive) )
glMaterialf(face, GL_SHININESS, self.shininess)
if self.texture:
self.texture.draw()
if self.gl_floats is None:
self.gl_floats = (GLfloat * len(self.vertices))(*self.vertices)
self.triangle_count = len(self.vertices) / 8
glInterleavedArrays(GL_T2F_N3F_V3F, 0, self.gl_floats)
glDrawArrays(GL_TRIANGLES, 0, int(self.triangle_count))
glDisable(GL_TEXTURE_2D)
class MaterialParser(parser.Parser):
def __init__(self, file_path, path):
self.materials = {}
self.path = path
self.this_material = None
self.read_file(file_path)
def parse_newmtl(self, args):
[newmtl] = args
self.this_material = Material(newmtl)
self.materials[self.this_material.name] = self.this_material
def parse_Kd(self, args):
self.this_material.set_diffuse(args)
def parse_Ka(self, args):
self.this_material.set_ambient(args)
def parse_Ks(self, args):
self.this_material.set_specular(args)
def parse_Ke(self, args):
self.this_material.set_emissive(args)
def parse_Ns(self, args):
[Ns] = args
self.this_material.shininess = float(Ns)
def parse_d(self, args):
[d] = args
self.this_material.set_alpha(d)
def parse_map_Kd(self, args):
[Kd] = args
self.this_material.set_texture(self.path + "textures/" + Kd)
def parse_Ni(self, args):
return
def parse_illum(self, args):
return
| true | true |
f72bd8c09290fd4650b93bed148f010025633ae2 | 5,361 | py | Python | hla_hed.py | sunhuaibo/HLA-HED | bb0672e62a20baad80f5f154c9220bf8e5b8b28c | [
"MIT"
] | null | null | null | hla_hed.py | sunhuaibo/HLA-HED | bb0672e62a20baad80f5f154c9220bf8e5b8b28c | [
"MIT"
] | null | null | null | hla_hed.py | sunhuaibo/HLA-HED | bb0672e62a20baad80f5f154c9220bf8e5b8b28c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding=utf-8 -*-
# =====================================
# Author: Huaibo Sun
# E-mail: huaibo_sun@foxmail.com
# date: 2022-03-31
# =====================================
import os
import pandas as pd
from Bio import SeqIO
from pathlib import Path
from itertools import combinations
from argparse import ArgumentParser, RawDescriptionHelpFormatter
def get_opt():
"""
Input HLA file format
Sample A1 A2 B1 B2 C1 C2
p1 A*01:01 A*01:03 B*07:01 B*07:02 C*01:01 C*01:02
p2 A*01:01 A*01:03 B*07:01 B*07:02 C*01:01 C*01:02
If you use this tool, please cite the following three papers.
Grantham R. Amino acid difference formula to help explain protein evolution. Science. 1974 Sep 6;185(4154):862-4. doi: 10.1126/science.185.4154.862. PMID: 4843792.
Pierini F, Lenz TL. Divergent Allele Advantage at Human MHC Genes: Signatures of Past and Ongoing Selection. Mol Biol Evol. 2018 Sep 1;35(9):2145-2158. doi: 10.1093/molbev/msy116. PMID: 29893875; PMCID: PMC6106954.
Chowell D, Krishna C, Pierini F, Makarov V, Rizvi NA, Kuo F, Morris LGT, Riaz N, Lenz TL, Chan TA. Evolutionary divergence of HLA class I genotype impacts efficacy of cancer immunotherapy. Nat Med. 2019 Nov;25(11):1715-1720. doi: 10.1038/s41591-019-0639-4. Epub 2019 Nov 7. PMID: 31700181; PMCID: PMC7938381.
"""
script = os.path.dirname(os.path.abspath(__file__))
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, epilog=get_opt.__doc__)
parser.add_argument("-d", default=f"{script}/database/grantham_matrix.txt", help="Distance matrix for all amino acids, default: database/grantham_matrix.txt. (reference: DOI: 10.1126/science.185.4154.862)")
parser.add_argument("-f", default=f"{script}/database/ABC_prot.fa", help="Amino acid sequences in fasta format, default: database/ABC_prot.fa.")
parser.add_argument("-i", required=True, help="Input file of tab-delimited with individual HLA typing.")
parser.add_argument("-p", action="store_true", help="Paired HED score.")
parser.add_argument("-o", required=True, help="Output file name.")
parse = parser.parse_args()
return(parse)
def check_file(infile):
if not infile.exists:
raise Exception(f"{str(infile)} file is not exist")
def read_fasta(infile):
infile = Path(infile)
check_file(infile)
record = SeqIO.parse(infile, "fasta")
seq_array = {seq.id: str(seq.seq) for seq in record}
seq_len = [len(value) for value in seq_array.values()]
if len(set(seq_len)) != 1:
raise Exception("Input sequences length is not equality")
return(seq_array)
def read_aa(infile):
infile = Path(infile)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t", index_col=0)
aa_pairwise_dis = df.to_dict()
return(aa_pairwise_dis)
def calculate_distange(hla1, hla2, sequence, distance):
seq_hla1 = sequence.get(hla1, False)
seq_hla2 = sequence.get(hla2, False)
if not seq_hla1 or not seq_hla2:
return("NA")
else:
seq_len = len(seq_hla1)
dis = 0
for i in range(seq_len):
aa1 = seq_hla1[i]
aa2 = seq_hla2[i]
dis += distance[aa1][aa2]
dis = dis / seq_len
return(dis)
def main():
opt = get_opt()
seq_array = read_fasta(opt.f)
aa_pairwise_dis = read_aa(opt.d)
infile = Path(opt.i)
outfile = Path(opt.o)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t")
if opt.p:
df2 = pd.melt(df, id_vars=["Sample"], value_vars=["A1", "A2", "B1","B2", "C1","C2"])
alleles = set(df2["value"].values.tolist())
alleles_pair = combinations(alleles, 2)
outheader = ["Allele1","Allele2","HED"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for allele1, allele2 in alleles_pair:
dis_hla_pair = calculate_distange(allele1, allele2, seq_array, aa_pairwise_dis)
outline = [allele1, allele2, dis_hla_pair]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
else:
outheader = ["Sample","HED_A","HED_B","HED_C","Mean_HE"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for _, line in df.iterrows():
hla_a1 = line["A1"]
hla_a2 = line["A2"]
dis_hla_a = calculate_distange(hla_a1, hla_a2, seq_array, aa_pairwise_dis)
hla_b1 = line["B1"]
hla_b2 = line["B2"]
dis_hla_b = calculate_distange(hla_b1, hla_b2, seq_array, aa_pairwise_dis)
hla_c1 = line["C1"]
hla_c2 = line["C2"]
dis_hla_c = calculate_distange(hla_c1, hla_c2, seq_array, aa_pairwise_dis)
if dis_hla_a == "NA" or dis_hla_b == "NA" or dis_hla_c == "NA":
dis_mean = "NA"
else:
dis_mean = (dis_hla_a + dis_hla_b + dis_hla_c) / 3
outline = [line["Sample"], dis_hla_a, dis_hla_b, dis_hla_c, dis_mean]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
if __name__ == "__main__":
main() | 39.711111 | 312 | 0.610707 |
import os
import pandas as pd
from Bio import SeqIO
from pathlib import Path
from itertools import combinations
from argparse import ArgumentParser, RawDescriptionHelpFormatter
def get_opt():
script = os.path.dirname(os.path.abspath(__file__))
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, epilog=get_opt.__doc__)
parser.add_argument("-d", default=f"{script}/database/grantham_matrix.txt", help="Distance matrix for all amino acids, default: database/grantham_matrix.txt. (reference: DOI: 10.1126/science.185.4154.862)")
parser.add_argument("-f", default=f"{script}/database/ABC_prot.fa", help="Amino acid sequences in fasta format, default: database/ABC_prot.fa.")
parser.add_argument("-i", required=True, help="Input file of tab-delimited with individual HLA typing.")
parser.add_argument("-p", action="store_true", help="Paired HED score.")
parser.add_argument("-o", required=True, help="Output file name.")
parse = parser.parse_args()
return(parse)
def check_file(infile):
if not infile.exists:
raise Exception(f"{str(infile)} file is not exist")
def read_fasta(infile):
infile = Path(infile)
check_file(infile)
record = SeqIO.parse(infile, "fasta")
seq_array = {seq.id: str(seq.seq) for seq in record}
seq_len = [len(value) for value in seq_array.values()]
if len(set(seq_len)) != 1:
raise Exception("Input sequences length is not equality")
return(seq_array)
def read_aa(infile):
infile = Path(infile)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t", index_col=0)
aa_pairwise_dis = df.to_dict()
return(aa_pairwise_dis)
def calculate_distange(hla1, hla2, sequence, distance):
seq_hla1 = sequence.get(hla1, False)
seq_hla2 = sequence.get(hla2, False)
if not seq_hla1 or not seq_hla2:
return("NA")
else:
seq_len = len(seq_hla1)
dis = 0
for i in range(seq_len):
aa1 = seq_hla1[i]
aa2 = seq_hla2[i]
dis += distance[aa1][aa2]
dis = dis / seq_len
return(dis)
def main():
opt = get_opt()
seq_array = read_fasta(opt.f)
aa_pairwise_dis = read_aa(opt.d)
infile = Path(opt.i)
outfile = Path(opt.o)
check_file(infile)
df = pd.read_csv(infile, header=0, sep="\t")
if opt.p:
df2 = pd.melt(df, id_vars=["Sample"], value_vars=["A1", "A2", "B1","B2", "C1","C2"])
alleles = set(df2["value"].values.tolist())
alleles_pair = combinations(alleles, 2)
outheader = ["Allele1","Allele2","HED"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for allele1, allele2 in alleles_pair:
dis_hla_pair = calculate_distange(allele1, allele2, seq_array, aa_pairwise_dis)
outline = [allele1, allele2, dis_hla_pair]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
else:
outheader = ["Sample","HED_A","HED_B","HED_C","Mean_HE"]
with open(outfile, "w") as fw:
fw.write("\t".join(outheader) + "\n")
for _, line in df.iterrows():
hla_a1 = line["A1"]
hla_a2 = line["A2"]
dis_hla_a = calculate_distange(hla_a1, hla_a2, seq_array, aa_pairwise_dis)
hla_b1 = line["B1"]
hla_b2 = line["B2"]
dis_hla_b = calculate_distange(hla_b1, hla_b2, seq_array, aa_pairwise_dis)
hla_c1 = line["C1"]
hla_c2 = line["C2"]
dis_hla_c = calculate_distange(hla_c1, hla_c2, seq_array, aa_pairwise_dis)
if dis_hla_a == "NA" or dis_hla_b == "NA" or dis_hla_c == "NA":
dis_mean = "NA"
else:
dis_mean = (dis_hla_a + dis_hla_b + dis_hla_c) / 3
outline = [line["Sample"], dis_hla_a, dis_hla_b, dis_hla_c, dis_mean]
outline = [str(x) for x in outline]
fw.write("\t".join(outline) + "\n")
if __name__ == "__main__":
main() | true | true |
f72bd93c4d17e05aa0d2db88c1b2ffb816c8ad18 | 2,936 | py | Python | sdk/monitor/azure-monitor-query/setup.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-09T08:59:13.000Z | 2022-03-09T08:59:13.000Z | sdk/monitor/azure-monitor-query/setup.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/monitor/azure-monitor-query/setup.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-monitor-query"
PACKAGE_PPRINT_NAME = "Azure Monitor Query"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
"Development Status :: 5 - Production/Stable",
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
python_requires=">=3.6",
zip_safe=False,
packages=find_packages(exclude=[
'tests',
'samples',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.monitor',
]),
install_requires=[
'msrest>=0.6.19',
'azure-core<2.0.0,>=1.12.0',
]
)
| 33.363636 | 85 | 0.609673 |
import re
import os.path
from io import open
from setuptools import find_packages, setup
PACKAGE_NAME = "azure-monitor-query"
PACKAGE_PPRINT_NAME = "Azure Monitor Query"
package_folder_path = PACKAGE_NAME.replace('-', '/')
namespace_name = PACKAGE_NAME.replace('-', '.')
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
"Development Status :: 5 - Production/Stable",
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
python_requires=">=3.6",
zip_safe=False,
packages=find_packages(exclude=[
'tests',
'samples',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.monitor',
]),
install_requires=[
'msrest>=0.6.19',
'azure-core<2.0.0,>=1.12.0',
]
)
| true | true |
f72bd93f9a0200f463455bbe0367f69059d57081 | 1,045 | py | Python | src/serialize_tree.py | kemingy/daily-coding-problem | 0839311ec0848f8f0b4a9edba817ecceb8f944a0 | [
"Unlicense"
] | 3 | 2019-03-06T03:14:56.000Z | 2020-01-07T16:00:48.000Z | src/serialize_tree.py | kemingy/daily-coding-problem | 0839311ec0848f8f0b4a9edba817ecceb8f944a0 | [
"Unlicense"
] | null | null | null | src/serialize_tree.py | kemingy/daily-coding-problem | 0839311ec0848f8f0b4a9edba817ecceb8f944a0 | [
"Unlicense"
] | null | null | null | # Given the root to a binary tree, implement serialize(root), which serializes
# the tree into a string, and deserialize(s), which deserializes the string back
# into the tree.
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def serialize(node):
ans = []
level = [node]
while level:
children = []
for n in level:
if n:
children.append(n.left)
children.append(n.right)
ans.append(n.val if n else None)
level = children
return ans
def deserialize(node, index=0):
if not node[index:]:
return None
root = Node(node[index], deserialize(node, index*2+1), deserialize(node, index*2+2))
return root
if __name__ == '__main__':
node = Node('root', Node('left', Node('left.left')), Node('right'))
print(serialize(node))
assert deserialize(serialize(node)).left.left.val == 'left.left'
| 25.487805 | 89 | 0.579904 |
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def serialize(node):
ans = []
level = [node]
while level:
children = []
for n in level:
if n:
children.append(n.left)
children.append(n.right)
ans.append(n.val if n else None)
level = children
return ans
def deserialize(node, index=0):
if not node[index:]:
return None
root = Node(node[index], deserialize(node, index*2+1), deserialize(node, index*2+2))
return root
if __name__ == '__main__':
node = Node('root', Node('left', Node('left.left')), Node('right'))
print(serialize(node))
assert deserialize(serialize(node)).left.left.val == 'left.left'
| true | true |
f72bd97572c539099427f5e9007176593b7fce2c | 470 | py | Python | test_project/views/response_types/http_redirect.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 230 | 2021-08-15T20:46:24.000Z | 2022-03-30T10:17:43.000Z | test_project/views/response_types/http_redirect.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 176 | 2021-08-18T08:19:37.000Z | 2022-03-29T16:45:06.000Z | test_project/views/response_types/http_redirect.py | korantu/lona | 5039fa59f37cc32b9c789753af2ed8a8670ab611 | [
"MIT"
] | 13 | 2021-08-20T10:35:04.000Z | 2022-01-17T15:49:40.000Z | from lona.html import Strong, Div, H2, P
from lona.view import LonaView
class HTTPRedirectView(LonaView):
def handle_request(self, request):
s = Strong()
html = Div(
H2('Redirect'),
P('You will be HTTP redirected in ', s, ' seconds'),
)
for i in [3, 2, 1]:
s.set_text(str(i))
self.show(html)
self.sleep(1)
return {
'http_redirect': '/',
}
| 19.583333 | 64 | 0.497872 | from lona.html import Strong, Div, H2, P
from lona.view import LonaView
class HTTPRedirectView(LonaView):
def handle_request(self, request):
s = Strong()
html = Div(
H2('Redirect'),
P('You will be HTTP redirected in ', s, ' seconds'),
)
for i in [3, 2, 1]:
s.set_text(str(i))
self.show(html)
self.sleep(1)
return {
'http_redirect': '/',
}
| true | true |
f72bd9dea41c09da28f2a59df1bc45565df5f22d | 1,064 | py | Python | products/migrations/0003_reviews.py | ankit-ak/django-ecommerce-1 | 248127526c03c7c0f25a2df84365a0d0199b9693 | [
"MIT"
] | 4 | 2021-04-06T16:50:57.000Z | 2022-03-02T00:50:44.000Z | products/migrations/0003_reviews.py | ankit-ak/django-ecommerce-1 | 248127526c03c7c0f25a2df84365a0d0199b9693 | [
"MIT"
] | null | null | null | products/migrations/0003_reviews.py | ankit-ak/django-ecommerce-1 | 248127526c03c7c0f25a2df84365a0d0199b9693 | [
"MIT"
] | 7 | 2021-02-22T08:07:20.000Z | 2022-03-06T10:17:28.000Z | # Generated by Django 2.2.11 on 2020-04-01 18:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0002_auto_20200314_0741'),
]
operations = [
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(choices=[(1, 'Poor'), (2, 'Below Average'), (3, 'Average'), (4, 'Better than Average'), (5, 'Excellent')], default=3)),
('review', models.TextField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='products.Product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 39.407407 | 166 | 0.631579 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0002_auto_20200314_0741'),
]
operations = [
migrations.CreateModel(
name='Reviews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(choices=[(1, 'Poor'), (2, 'Below Average'), (3, 'Average'), (4, 'Better than Average'), (5, 'Excellent')], default=3)),
('review', models.TextField()),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='products.Product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| true | true |
f72bda1f9861b327f93273a74d2beaf0c1884dd9 | 1,026 | py | Python | INBa/2015/Sarocvashin_M/task_4_23.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | INBa/2015/Sarocvashin_M/task_4_23.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | INBa/2015/Sarocvashin_M/task_4_23.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | # Задача 4. Вариант 23
# Напишите программу, которая выводит имя, под которым скрывается Илья Арнольдович Файзильберг. Дополнительно необходимо вывести область интересов указанной личности, место рождения, годы рождения и смерти (если человек умер), вычислить возраст на данный момент (или момент смерти). Для хранения всех необходимых данных требуется использовать переменные. После вывода информации программа должна дожидаться пока пользователь нажмет Enter для выхода.
# Сароквашин Максим
# 25.02.2016
name = "Илья Арнольдович Файзильберг"
birthplace = "Одесса,СССР"
birthyear = int (1897)
deathyear = int (1937)
age = int (deathyear - birthyear)
interest = "Писатель"
print(name+" наиболее известен как Илья Ильф - русский советский писатель, журналист и сценарист. Соавтор Евгения Петрова")
print("Место рождения: "+birthplace)
print("Год рождения: "+str(birthyear))
print("Год смерти: "+str(deathyear))
print("Возраст смерти: "+str(age))
print("Область интересов: "+interest)
input("\nДля выхода нажмите Enter")
| 46.636364 | 449 | 0.783626 |
name = "Илья Арнольдович Файзильберг"
birthplace = "Одесса,СССР"
birthyear = int (1897)
deathyear = int (1937)
age = int (deathyear - birthyear)
interest = "Писатель"
print(name+" наиболее известен как Илья Ильф - русский советский писатель, журналист и сценарист. Соавтор Евгения Петрова")
print("Место рождения: "+birthplace)
print("Год рождения: "+str(birthyear))
print("Год смерти: "+str(deathyear))
print("Возраст смерти: "+str(age))
print("Область интересов: "+interest)
input("\nДля выхода нажмите Enter")
| true | true |
f72bdb03716d1c420cd5c2e0dab725ff70c1358a | 8,629 | py | Python | src/python3/sdp/math/interpolation.py | LeiShi/Synthetic-Diagnostics-Platform | 870120d3fd14b2a3c89c6e6e85625d1e9109a2de | [
"BSD-3-Clause"
] | 5 | 2019-08-16T22:08:19.000Z | 2021-02-24T02:47:05.000Z | src/python3/sdp/math/interpolation.py | justthepython/Synthetic-Diagnostics-Platform | 5f1cb5c29d182490acbd4f3c167f0e09ec211236 | [
"BSD-3-Clause"
] | 1 | 2016-05-11T12:58:00.000Z | 2016-05-11T17:18:36.000Z | src/python3/sdp/math/interpolation.py | justthepython/Synthetic-Diagnostics-Platform | 5f1cb5c29d182490acbd4f3c167f0e09ec211236 | [
"BSD-3-Clause"
] | 5 | 2018-04-29T12:35:59.000Z | 2020-01-10T03:38:30.000Z | """This module contains some useful interpolation methods
"""
import numpy as np
from scipy.interpolate import BarycentricInterpolator
class InterpolationError(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
class OutofBoundError(InterpolationError, ValueError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def linear_3d_3point(X,Y,Z,x,y,tol = 1e-8):
"""3D interpolation method
Linearly interpolate the value of z for given x,y.
By using 3 points data, the unknown value of z is assumed on the same plane.
The method used here is the cross product method. From P(x1,y1,z1),Q(x2,y2,z2),and R(x3,y3,z3), construct 2 vectors on the plane, PQ(x2-x1,y2-y1,z2-z1) and PR(x3-x1,y3-y1,z3-z1). Then do the cross product, PQ*PR = N. This gives the normal vector of the plane. The plane's equation is then 'N dot X = d', where X is an arbitary point and d to be determined. d can be easily gotten from any one of the given points, say P. d = N dot P. Then the equation of the plane is found. The equation can be written as 'ax+by+cz = d', then z can be solved for given x and y.
Arguments:
x1,y1,z1: coordinates of the first point
x2,y2,z2: the second point
x3,y3,z3: the third point
x,y: the x,y coordinates for the wanted
return value:
interpolated z value on given (x,y)
"""
x1,x2,x3 = X[0],X[1],X[2]
y1,y2,y3 = Y[0],Y[1],Y[2]
z0 = np.max(Z)
z1,z2,z3 = Z[0]/z0,Z[1]/z0,Z[2]/z0
Nx = (y2-y1)*(z3-z1)-(y3-y1)*(z2-z1)
Ny = (x3-x1)*(z2-z1)-(x2-x1)*(z3-z1)
Nz = (x2-x1)*(y3-y1)-(x3-x1)*(y2-y1)
z_base = (x2-x1)*(y3-y1)
print(Nx,Ny,Nz,z_base)
if(np.absolute(Nz/z_base) <= tol ):
raise InterpolationError('3 points interpolation failed: given points are on a plane vertical to XY plane, no z value being able to interpolated.')
d = Nx*x1 + Ny*y1 + Nz*z1
print(d, d-Nx*x-Ny*y)
return (d - Nx*x - Ny*y)/float(Nz)*z0
def trilinear_interp(X,Y,Z,F,x, fill_value=0.0):
""" Trilinear interpolation (3D) for 1 point on a cubic mesh
See Wikipedia for a better description than the following:
First choose a direction and interpolate all the corners along this
direction (so 8pts -> 4pts) at the value of the wanted point.
Choose a second direction and interpolate the 4pts at the wanted point
(4pts -> 2pts).
Finish with the interpolation along the last line
Arguments:
X -- 1D array containing the X coordinate of F
Y -- 1D array containing the Y coordinate of F
Z -- 1D array containing the Z coordinate of F
F -- 3D array containing the data
x -- position (3D) where the interpolation is wanted
return value:
interpolated z value on given (x,y)
"""
raise NameError('Does not work, should use RegularGridInterpolator')
if len(x.shape) == 1:
# if outside the box, put the value to fill_value
if x[0] < X[0] or x[1] < Y[0] or x[2] < Z[0]\
or x[0] > X[-1] or x[1] > Y[-1] or x[2] > Z[-1]:
return fill_value
else:
# First find the x,y,z coordinate of the corner of the cube
indx = np.where(X < x[0])[0].max()
indy = np.where(Y < x[1])[0].max()
indz = np.where(Z < x[2])[0].max()
# relative coordinates
rx = (x[0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[2]-Z[indz])/(Z[indz+1]-Z[indz])
# compute the first linear interpolation
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
# compute the second linear interpolation
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
# compute the last linear interpolation
return c0*(1-rz) + c1*rz
elif len(x.shape) == 2:
"""this part is the same that before but with a mesh (not only one point).
the comments will be only for trick due to the shape of the positions
abd not on the method (look the first part for them)
"""
G = np.zeros(len(x[:,0]))
# First find the x,y,z coordinate of the corner of the cube
ind = ~((x[:,0] < X[0]) | (x[:,1] < Y[0]) | (x[:,2] < Z[0]) |
(x[:,0] > X[-1]) | (x[:,1] > Y[-1]) | (x[:,2] > Z[-1]))
G[~ind] = fill_value
indx = np.where(X <= x[ind,0])[0].max()
indy = np.where(Y <= x[ind,1])[0].max()
indz = np.where(Z <= x[ind,2])[0].max()
# relative coordinates
rx = (x[ind,0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[ind,1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[ind,2]-Z[indz])/(Z[indz+1]-Z[indz])
# compute the first linear interpolation
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
# compute the second linear interpolation
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
# compute the last linear interpolation
G[ind] = c0*(1-rz) + c1*rz
return G
else:
raise NameError('Error: wrong shape of the position to interpolate')
# BarycentricInterpolator with boundary check
class BoundaryWarnBarycentricInterpolator(BarycentricInterpolator):
"""Barycentric Interpolator with Boundary Check. Based on
:py:class:`scipy.interpolate.BarycentricInterpolator`.
The boundary is set as minimun x and maximum x. If called with x outside
the available range, a OutofBoundError will be raised.
__init__(xi, yi=None, axis=0, bound_error=True, fill_value=0)
:param xi: x coordinates for interpolation
:type xi: array of float
:param yi: Optional, y values on each xi location. If not given, need to be
provided later using :py:method`set_yi` method.
:type yi: array of float
:param int axis: the axis of yi along which the interpolator will be
created.
:param bool bound_error: If True, out of bound interpolation will result a
OutofBoundError. Otherwise fill_value will be used
. Default to be True
:param float fill_value: If bound_error is False, out of bound values will
be automatically filled with fill_value.
see :py:class:`scipy.interpolate.BarycentricInterpolator` for further
information.
"""
def __init__(self, xi, yi=None, axis=0, bound_error=True, fill_value=0):
self._xmin = np.min(xi)
self._xmax = np.max(xi)
self._bound_error = bound_error
self._fill_value = fill_value
super(BoundaryWarnBarycentricInterpolator, self).__init__(xi, yi, axis)
def __call__(self, x):
if (self._bound_error):
if np.any(x < self._xmin) or np.any(x > self._xmax):
raise OutofBoundError('x out of bound! xmin: {}, xmax: {}'.\
format(self._xmin, self._xmax))
return super(BoundaryWarnBarycentricInterpolator, self).__call__(x)
else:
outbound_idx = np.logical_or(x < self._xmin, x > self._xmax)
result = np.empty_like(x)
result[~outbound_idx] = super(BoundaryWarnBarycentricInterpolator,
self).__call__(x[~outbound_idx])
result[outbound_idx] = self._fill_value
return result
def add_xi(self, xi, yi=None):
super(BoundaryWarnBarycentricInterpolator, self).add_xi(xi, yi)
self._xmin = np.min( [np.min(xi), self._xmin] )
self._xmax = np.max( [np.max(xi), self._xmax] )
def set_yi(self, yi, axis=None):
yi = np.array(yi)
if not self._bound_error:
assert yi.ndim == 1
super(BoundaryWarnBarycentricInterpolator, self).set_yi(yi, axis)
| 40.70283 | 565 | 0.585004 |
import numpy as np
from scipy.interpolate import BarycentricInterpolator
class InterpolationError(Exception):
def __init__(self,value):
self.value = value
def __str__(self):
return repr(self.value)
class OutofBoundError(InterpolationError, ValueError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def linear_3d_3point(X,Y,Z,x,y,tol = 1e-8):
x1,x2,x3 = X[0],X[1],X[2]
y1,y2,y3 = Y[0],Y[1],Y[2]
z0 = np.max(Z)
z1,z2,z3 = Z[0]/z0,Z[1]/z0,Z[2]/z0
Nx = (y2-y1)*(z3-z1)-(y3-y1)*(z2-z1)
Ny = (x3-x1)*(z2-z1)-(x2-x1)*(z3-z1)
Nz = (x2-x1)*(y3-y1)-(x3-x1)*(y2-y1)
z_base = (x2-x1)*(y3-y1)
print(Nx,Ny,Nz,z_base)
if(np.absolute(Nz/z_base) <= tol ):
raise InterpolationError('3 points interpolation failed: given points are on a plane vertical to XY plane, no z value being able to interpolated.')
d = Nx*x1 + Ny*y1 + Nz*z1
print(d, d-Nx*x-Ny*y)
return (d - Nx*x - Ny*y)/float(Nz)*z0
def trilinear_interp(X,Y,Z,F,x, fill_value=0.0):
raise NameError('Does not work, should use RegularGridInterpolator')
if len(x.shape) == 1:
if x[0] < X[0] or x[1] < Y[0] or x[2] < Z[0]\
or x[0] > X[-1] or x[1] > Y[-1] or x[2] > Z[-1]:
return fill_value
else:
indx = np.where(X < x[0])[0].max()
indy = np.where(Y < x[1])[0].max()
indz = np.where(Z < x[2])[0].max()
rx = (x[0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[2]-Z[indz])/(Z[indz+1]-Z[indz])
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
return c0*(1-rz) + c1*rz
elif len(x.shape) == 2:
"""this part is the same that before but with a mesh (not only one point).
the comments will be only for trick due to the shape of the positions
abd not on the method (look the first part for them)
"""
G = np.zeros(len(x[:,0]))
ind = ~((x[:,0] < X[0]) | (x[:,1] < Y[0]) | (x[:,2] < Z[0]) |
(x[:,0] > X[-1]) | (x[:,1] > Y[-1]) | (x[:,2] > Z[-1]))
G[~ind] = fill_value
indx = np.where(X <= x[ind,0])[0].max()
indy = np.where(Y <= x[ind,1])[0].max()
indz = np.where(Z <= x[ind,2])[0].max()
rx = (x[ind,0]-X[indx])/(X[indx+1]-X[indx])
ry = (x[ind,1]-Y[indy])/(Y[indy+1]-Y[indy])
rz = (x[ind,2]-Z[indz])/(Z[indz+1]-Z[indz])
temp = 1-rx
c00 = F[indx,indy,indz]*temp + F[indx+1,indy,indz]*rx
c10 = F[indx,indy+1,indz]*temp + F[indx+1,indy+1,indz]*rx
c01 = F[indx,indy,indz+1]*temp + F[indx+1,indy,indz+1]*rx
c11 = F[indx,indy+1,indz+1]*temp + F[indx+1,indy+1,indz+1]*rx
temp = 1-ry
c0 = c00*temp + c10*ry
c1 = c01*temp + c11*ry
G[ind] = c0*(1-rz) + c1*rz
return G
else:
raise NameError('Error: wrong shape of the position to interpolate')
class BoundaryWarnBarycentricInterpolator(BarycentricInterpolator):
def __init__(self, xi, yi=None, axis=0, bound_error=True, fill_value=0):
self._xmin = np.min(xi)
self._xmax = np.max(xi)
self._bound_error = bound_error
self._fill_value = fill_value
super(BoundaryWarnBarycentricInterpolator, self).__init__(xi, yi, axis)
def __call__(self, x):
if (self._bound_error):
if np.any(x < self._xmin) or np.any(x > self._xmax):
raise OutofBoundError('x out of bound! xmin: {}, xmax: {}'.\
format(self._xmin, self._xmax))
return super(BoundaryWarnBarycentricInterpolator, self).__call__(x)
else:
outbound_idx = np.logical_or(x < self._xmin, x > self._xmax)
result = np.empty_like(x)
result[~outbound_idx] = super(BoundaryWarnBarycentricInterpolator,
self).__call__(x[~outbound_idx])
result[outbound_idx] = self._fill_value
return result
def add_xi(self, xi, yi=None):
super(BoundaryWarnBarycentricInterpolator, self).add_xi(xi, yi)
self._xmin = np.min( [np.min(xi), self._xmin] )
self._xmax = np.max( [np.max(xi), self._xmax] )
def set_yi(self, yi, axis=None):
yi = np.array(yi)
if not self._bound_error:
assert yi.ndim == 1
super(BoundaryWarnBarycentricInterpolator, self).set_yi(yi, axis)
| true | true |
f72bdb07358bd565efec3e28ef5e0ed2041de357 | 7,081 | py | Python | cla-backend/cla/tests/unit/test_user_event.py | tejasadg/easycla | 12284c957ab543d0d09c63aa8e82d70ecf09ccaf | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | cla-backend/cla/tests/unit/test_user_event.py | tejasadg/easycla | 12284c957ab543d0d09c63aa8e82d70ecf09ccaf | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | cla-backend/cla/tests/unit/test_user_event.py | tejasadg/easycla | 12284c957ab543d0d09c63aa8e82d70ecf09ccaf | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | # Copyright The Linux Foundation and each contributor to CommunityBridge.
# SPDX-License-Identifier: MIT
from unittest.mock import patch, Mock
import unittest
import pytest
from cla.models.dynamo_models import User, Project, Company, CCLAWhitelistRequest
from cla.models.event_types import EventType
from cla.controllers import user as user_controller
from cla.auth import AuthUser
@pytest.fixture
def create_event_user():
user_controller.create_event = Mock()
class TestRequestCompanyWhitelist:
def setup(self) -> None:
self.old_load = User.load
self.old_get_user_name = User.get_user_name
self.get_user_emails = User.get_user_emails
self.get_user_email = User.get_user_email
self.company_load = Company.load
self.get_company_name = Company.get_company_name
self.project_load = Project.load
self.get_project_name = Project.get_project_name
def teardown(self) -> None:
User.load = self.old_load
User.get_user_name = self.old_get_user_name
User.get_user_emails = self.get_user_emails
User.get_user_email = self.get_user_email
Company.load = self.company_load
Company.get_company_name = self.get_company_name
Project.load = self.project_load
Project.get_project_name = self.get_project_name
def test_request_company_whitelist(self, create_event_user, project, company, user):
""" Test user requesting to be added to the Approved List event """
with patch('cla.controllers.user.Event.create_event') as mock_event:
event_type = EventType.RequestCompanyWL
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
User.get_user_emails = Mock(return_value=[user.get_user_email()])
User.get_user_email = Mock(return_value=user.get_user_email())
Company.load = Mock()
Company.get_company_name = Mock(return_value=company.get_company_name())
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
user_controller.get_email_service = Mock()
user_controller.send = Mock()
user_controller.request_company_whitelist(
user.get_user_id(),
company.get_company_id(),
user.get_user_name(),
user.get_user_email(),
project.get_project_id(),
message="Please add",
recipient_name="Recipient Name",
recipient_email="Recipient Email",
)
event_data = (f'CLA: contributor {user.get_user_name()} requests to be Approved for the '
f'project: {project.get_project_name()} '
f'organization: {company.get_company_name()} '
f'as {user.get_user_name()} <{user.get_user_email()}>')
mock_event.assert_called_once_with(
event_user_id=user.get_user_id(),
event_project_id=project.get_project_id(),
event_company_id=company.get_company_id(),
event_type=event_type,
event_data=event_data,
event_summary=event_data,
contains_pii=True,
)
class TestInviteClaManager:
def setup(self):
self.user_load = User.load
self.load_project_by_name = Project.load_project_by_name
self.save = CCLAWhitelistRequest.save
def teardown(self):
User.load = self.user_load
Project.load_project_by_name = self.load_project_by_name
CCLAWhitelistRequest.save = self.save
@patch('cla.controllers.user.Event.create_event')
def test_invite_cla_manager(self, mock_event, create_event_user, user):
""" Test send email to CLA manager event """
User.load = Mock()
Project.load_project_by_name = Mock()
CCLAWhitelistRequest.save = Mock()
user_controller.send_email_to_cla_manager = Mock()
contributor_id = user.get_user_id()
contributor_name = user.get_user_name()
contributor_email = user.get_user_email()
cla_manager_name = "admin"
cla_manager_email = "foo@admin.com"
project_name = "foo_project"
company_name = "Test Company"
event_data = (f'sent email to CLA Manager: {cla_manager_name} with email {cla_manager_email} '
f'for project {project_name} and company {company_name} '
f'to user {contributor_name} with email {contributor_email}')
# TODO FIX Unit test - need to mock Project load_project_by_name() function
user_controller.invite_cla_manager(contributor_id, contributor_name, contributor_email,
cla_manager_name, cla_manager_email,
project_name, company_name)
mock_event.assert_called_once_with(
event_user_id=contributor_id,
event_project_name=project_name,
event_data=event_data,
event_type=EventType.InviteAdmin,
event_summary=event_data,
contains_pii=True,
)
class TestRequestCompanyCCLA:
def setup(self):
self.user_load = User.load
self.get_user_name = User.get_user_name
self.company_load = Company.load
self.project_load = Project.load
self.get_project_name = Project.get_project_name
self.get_managers = Company.get_managers
def teardown(self):
User.load = self.user_load
User.get_user_name = self.get_user_name
Company.load = self.company_load
Project.load = self.project_load
Project.get_project_name = self.get_project_name
Company.get_managers = self.get_managers
@patch('cla.controllers.user.Event.create_event')
def test_request_company_ccla(self, mock_event, create_event_user, user, project, company):
""" Test request company ccla event """
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
email = user.get_user_email()
Company.load = Mock()
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
manager = User(lf_username="harold", user_email="foo@gmail.com")
Company.get_managers = Mock(return_value=[manager, ])
event_data = f"Sent email to sign ccla for {project.get_project_name()}"
CCLAWhitelistRequest.save = Mock(return_value=None)
user_controller.request_company_ccla(
user.get_user_id(), email, company.get_company_id(), project.get_project_id()
)
mock_event.assert_called_once_with(
event_data=event_data,
event_summary=event_data,
event_type=EventType.RequestCCLA,
event_user_id=user.get_user_id(),
event_company_id=company.get_company_id(),
contains_pii=False,
)
| 41.168605 | 102 | 0.656263 |
from unittest.mock import patch, Mock
import unittest
import pytest
from cla.models.dynamo_models import User, Project, Company, CCLAWhitelistRequest
from cla.models.event_types import EventType
from cla.controllers import user as user_controller
from cla.auth import AuthUser
@pytest.fixture
def create_event_user():
user_controller.create_event = Mock()
class TestRequestCompanyWhitelist:
def setup(self) -> None:
self.old_load = User.load
self.old_get_user_name = User.get_user_name
self.get_user_emails = User.get_user_emails
self.get_user_email = User.get_user_email
self.company_load = Company.load
self.get_company_name = Company.get_company_name
self.project_load = Project.load
self.get_project_name = Project.get_project_name
def teardown(self) -> None:
User.load = self.old_load
User.get_user_name = self.old_get_user_name
User.get_user_emails = self.get_user_emails
User.get_user_email = self.get_user_email
Company.load = self.company_load
Company.get_company_name = self.get_company_name
Project.load = self.project_load
Project.get_project_name = self.get_project_name
def test_request_company_whitelist(self, create_event_user, project, company, user):
with patch('cla.controllers.user.Event.create_event') as mock_event:
event_type = EventType.RequestCompanyWL
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
User.get_user_emails = Mock(return_value=[user.get_user_email()])
User.get_user_email = Mock(return_value=user.get_user_email())
Company.load = Mock()
Company.get_company_name = Mock(return_value=company.get_company_name())
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
user_controller.get_email_service = Mock()
user_controller.send = Mock()
user_controller.request_company_whitelist(
user.get_user_id(),
company.get_company_id(),
user.get_user_name(),
user.get_user_email(),
project.get_project_id(),
message="Please add",
recipient_name="Recipient Name",
recipient_email="Recipient Email",
)
event_data = (f'CLA: contributor {user.get_user_name()} requests to be Approved for the '
f'project: {project.get_project_name()} '
f'organization: {company.get_company_name()} '
f'as {user.get_user_name()} <{user.get_user_email()}>')
mock_event.assert_called_once_with(
event_user_id=user.get_user_id(),
event_project_id=project.get_project_id(),
event_company_id=company.get_company_id(),
event_type=event_type,
event_data=event_data,
event_summary=event_data,
contains_pii=True,
)
class TestInviteClaManager:
def setup(self):
self.user_load = User.load
self.load_project_by_name = Project.load_project_by_name
self.save = CCLAWhitelistRequest.save
def teardown(self):
User.load = self.user_load
Project.load_project_by_name = self.load_project_by_name
CCLAWhitelistRequest.save = self.save
@patch('cla.controllers.user.Event.create_event')
def test_invite_cla_manager(self, mock_event, create_event_user, user):
User.load = Mock()
Project.load_project_by_name = Mock()
CCLAWhitelistRequest.save = Mock()
user_controller.send_email_to_cla_manager = Mock()
contributor_id = user.get_user_id()
contributor_name = user.get_user_name()
contributor_email = user.get_user_email()
cla_manager_name = "admin"
cla_manager_email = "foo@admin.com"
project_name = "foo_project"
company_name = "Test Company"
event_data = (f'sent email to CLA Manager: {cla_manager_name} with email {cla_manager_email} '
f'for project {project_name} and company {company_name} '
f'to user {contributor_name} with email {contributor_email}')
user_controller.invite_cla_manager(contributor_id, contributor_name, contributor_email,
cla_manager_name, cla_manager_email,
project_name, company_name)
mock_event.assert_called_once_with(
event_user_id=contributor_id,
event_project_name=project_name,
event_data=event_data,
event_type=EventType.InviteAdmin,
event_summary=event_data,
contains_pii=True,
)
class TestRequestCompanyCCLA:
def setup(self):
self.user_load = User.load
self.get_user_name = User.get_user_name
self.company_load = Company.load
self.project_load = Project.load
self.get_project_name = Project.get_project_name
self.get_managers = Company.get_managers
def teardown(self):
User.load = self.user_load
User.get_user_name = self.get_user_name
Company.load = self.company_load
Project.load = self.project_load
Project.get_project_name = self.get_project_name
Company.get_managers = self.get_managers
@patch('cla.controllers.user.Event.create_event')
def test_request_company_ccla(self, mock_event, create_event_user, user, project, company):
User.load = Mock()
User.get_user_name = Mock(return_value=user.get_user_name())
email = user.get_user_email()
Company.load = Mock()
Project.load = Mock()
Project.get_project_name = Mock(return_value=project.get_project_name())
manager = User(lf_username="harold", user_email="foo@gmail.com")
Company.get_managers = Mock(return_value=[manager, ])
event_data = f"Sent email to sign ccla for {project.get_project_name()}"
CCLAWhitelistRequest.save = Mock(return_value=None)
user_controller.request_company_ccla(
user.get_user_id(), email, company.get_company_id(), project.get_project_id()
)
mock_event.assert_called_once_with(
event_data=event_data,
event_summary=event_data,
event_type=EventType.RequestCCLA,
event_user_id=user.get_user_id(),
event_company_id=company.get_company_id(),
contains_pii=False,
)
| true | true |
f72bdc7b9355d16ea5ca646e0db1ca2a4c402827 | 5,135 | py | Python | win_unc/unc_directory.py | zo-edv/py_win_unc | 610b7c9ce4ea17554d04342126169b488c8ccfae | [
"MIT"
] | 10 | 2015-08-14T06:34:28.000Z | 2020-10-03T17:48:09.000Z | win_unc/unc_directory.py | zo-edv/py_win_unc | 610b7c9ce4ea17554d04342126169b488c8ccfae | [
"MIT"
] | 11 | 2017-01-12T23:43:56.000Z | 2020-06-19T18:32:56.000Z | win_unc/unc_directory.py | zo-edv/py_win_unc | 610b7c9ce4ea17554d04342126169b488c8ccfae | [
"MIT"
] | 8 | 2015-09-25T20:44:33.000Z | 2018-10-04T03:19:42.000Z | from win_unc.errors import InvalidUncPathError
from win_unc.cleaners import clean_unc_path
from win_unc.unc_credentials import get_creds_from_string
from win_unc.validators import is_valid_unc_path
class UncDirectory(object):
"""
Represents a UNC directory on Windows. A UNC directory is a path and optionally a set of
credentials that are required to connect to the UNC path.
"""
def __init__(self, path, creds=None):
"""
Returns a new `UncDirectory` class.
`path` must be a UNC directory path. If `path` cannot be construed as a valid UNC path,
this will raise an `InvalidUncPathError`.
`creds` may be `None` or a `UncCrednetials` object. If `None`, then the UNC directory
must not require authentication to be connected. Otherwise, `creds` will be used
for authentication.
If only the first positional argument is provided and it is already an instance of the
`UncDirectory` class (either directly or by inheritance), this constructor will clone
it and create a new `UncDirectory` object with the same properties. Note that the clone
is a "shallow" clone. Both the original `UncDirectory` object and its clone will use the
same `UncCredentials` object if it was provided.
"""
if creds is None and isinstance(path, UncDirectory):
new_path = path._path
new_creds = path._creds
else:
new_path = path
new_creds = creds
cleaned_path = clean_unc_path(new_path)
if is_valid_unc_path(cleaned_path):
self._path = cleaned_path
self._creds = new_creds if new_creds and not new_creds.is_empty() else None
else:
raise InvalidUncPathError(new_path)
def get_normalized_path(self):
"""
Returns the normalized path for this `UncDirectory`. Differing UNC paths that all point to
the same network location will have the same normalized path.
"""
path = self._path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def get_path(self):
"""
Returns the UNC path for this `UncDirectory`.
"""
return self._path
def get_username(self):
"""
Returns the username associated with the credentials of this `UncDirectory` or `None`
if no username was provided.
"""
return self._creds.get_username() if self._creds else None
def get_password(self):
"""
Returns the password associated with the credentials of this `UncDirectory` or `None`
if no password was provided.
"""
return self._creds.get_password() if self._creds else None
def get_auth_string(self):
"""
Returns the authorization string associated with the credentials of this `UncDirectory`.
"""
return self._creds.get_auth_string() if self._creds else ''
def get_auth_path(self):
"""
Returns the path of this `UncDirectory` with the authorization string prepended. If this
`UncDirectory` has no associated credentials, the returned path will be the
`UncDirectory`'s path unmodified. Otherwise, the returned path will resemble the HTTP
Basic Authentication scheme in its format.
"""
creds = self.get_auth_string()
return '{creds}{at}{path}'.format(
creds=creds,
at='@' if creds else '',
path=self._path)
def __eq__(self, other):
if isinstance(other, UncDirectory):
return (self.get_normalized_path() == other.get_normalized_path()
and self._creds == other._creds)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.get_auth_path()
def __repr__(self):
return '<{cls}: "{str}">'.format(cls=self.__class__.__name__, str=self.get_auth_path())
def is_unc_directory_string(string):
"""
Returns `True` when `string` represents a `UncDirectory` as defined by `UncDirectory`'s
`get_auth_path` method or `False` otherwise.
"""
cleaned_string = clean_unc_path(string)
return (is_valid_unc_path(cleaned_string)
or ('@\\\\' in cleaned_string
and len(cleaned_string.partition('@\\\\')[2]) > 0))
def get_unc_directory_from_string(string):
"""
Parses a string from `UncDirectory`'s `get_auth_path` method and returns a new `UncDirectory`
object based on it. This may raise any errors that can be raised by `UncDirectory`'s
constructor.
"""
creds = None
path = string
if '@\\\\' in string:
creds_part, path_part = string.rsplit(r'@\\', 1) # Always split on the last `@\\` in case
# the password contains it.
path = r'\\' + path_part
creds = get_creds_from_string(creds_part)
return UncDirectory(path, creds)
| 37.481752 | 98 | 0.637196 | from win_unc.errors import InvalidUncPathError
from win_unc.cleaners import clean_unc_path
from win_unc.unc_credentials import get_creds_from_string
from win_unc.validators import is_valid_unc_path
class UncDirectory(object):
def __init__(self, path, creds=None):
if creds is None and isinstance(path, UncDirectory):
new_path = path._path
new_creds = path._creds
else:
new_path = path
new_creds = creds
cleaned_path = clean_unc_path(new_path)
if is_valid_unc_path(cleaned_path):
self._path = cleaned_path
self._creds = new_creds if new_creds and not new_creds.is_empty() else None
else:
raise InvalidUncPathError(new_path)
def get_normalized_path(self):
path = self._path.lower()
return path[:-5] if path.endswith(r'\ipc$') else path.rstrip('\\')
def get_path(self):
return self._path
def get_username(self):
return self._creds.get_username() if self._creds else None
def get_password(self):
return self._creds.get_password() if self._creds else None
def get_auth_string(self):
return self._creds.get_auth_string() if self._creds else ''
def get_auth_path(self):
creds = self.get_auth_string()
return '{creds}{at}{path}'.format(
creds=creds,
at='@' if creds else '',
path=self._path)
def __eq__(self, other):
if isinstance(other, UncDirectory):
return (self.get_normalized_path() == other.get_normalized_path()
and self._creds == other._creds)
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return self.get_auth_path()
def __repr__(self):
return '<{cls}: "{str}">'.format(cls=self.__class__.__name__, str=self.get_auth_path())
def is_unc_directory_string(string):
cleaned_string = clean_unc_path(string)
return (is_valid_unc_path(cleaned_string)
or ('@\\\\' in cleaned_string
and len(cleaned_string.partition('@\\\\')[2]) > 0))
def get_unc_directory_from_string(string):
creds = None
path = string
if '@\\\\' in string:
creds_part, path_part = string.rsplit(r'@\\', 1)
path = r'\\' + path_part
creds = get_creds_from_string(creds_part)
return UncDirectory(path, creds)
| true | true |
f72bdee1d28143d175cb1971a290684cad30cafc | 479 | py | Python | Code/sentiment_analysis.py | mayureeb/fakenews | c47a72c8bbe4d413b309da0c662da784c002fe3f | [
"Unlicense"
] | 12 | 2017-06-25T23:06:31.000Z | 2021-02-27T23:06:57.000Z | Code/sentiment_analysis.py | mayureeb/fakenews | c47a72c8bbe4d413b309da0c662da784c002fe3f | [
"Unlicense"
] | 6 | 2021-03-31T20:03:11.000Z | 2022-03-12T00:49:42.000Z | Code/sentiment_analysis.py | mayureeb/fakenews | c47a72c8bbe4d413b309da0c662da784c002fe3f | [
"Unlicense"
] | 5 | 2017-06-25T23:05:41.000Z | 2018-11-22T16:10:58.000Z | import pandas as pd
from textblob import TextBlob
pd.options.mode.chained_assignment = None # ignores the SettingWithCopy Warning
df = pd.read_csv('INPUT.csv', encoding = 'utf8')
df['polarity'] = 0.0
df['subjectivity'] = 0.0
for i in range(0, len(df.index)):
print(i)
blob = TextBlob(str(df['text'][i]))
df['subjectivity'][i] = blob.sentiment.subjectivity
df['polarity'][i] = blob.sentiment.polarity
print(df.head())
df.to_csv('OUTPUT.csv', encoding = 'utf8')
| 29.9375 | 80 | 0.691023 | import pandas as pd
from textblob import TextBlob
pd.options.mode.chained_assignment = None
df = pd.read_csv('INPUT.csv', encoding = 'utf8')
df['polarity'] = 0.0
df['subjectivity'] = 0.0
for i in range(0, len(df.index)):
print(i)
blob = TextBlob(str(df['text'][i]))
df['subjectivity'][i] = blob.sentiment.subjectivity
df['polarity'][i] = blob.sentiment.polarity
print(df.head())
df.to_csv('OUTPUT.csv', encoding = 'utf8')
| true | true |
f72bdf31e9017fd309d269003029962f0606f6e1 | 22,621 | py | Python | client.py | iegorman/netspeed | 6eec201791b89be69ed9d6a6563d90cf324f2f14 | [
"BSD-2-Clause"
] | null | null | null | client.py | iegorman/netspeed | 6eec201791b89be69ed9d6a6563d90cf324f2f14 | [
"BSD-2-Clause"
] | null | null | null | client.py | iegorman/netspeed | 6eec201791b89be69ed9d6a6563d90cf324f2f14 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/python3
# Connmand line client for repeated internet speed tests.
import os
import sys
import collections
import gc
import getopt
import json
import math
import time
import traceback
import urllib.error
import urllib.request
import re
class Client(object):
"""
Python class and connmand line client for repeated internet speed tests.
Reports results to local file system and to a remote server.
The client and server exchange JSON strings that represent Python
dictionaries. The entries of each object will be a subset of the entries
shown as javascript object attributes in comments at the top of file
'./server.js'.
"""
# defaults for adjustment of transmit length -- treat as class constants
defaultInterval = 3_600 # seconds
initialDownloadLength = 20_000_000 # bytes
initialUploadLength = 2_000_000 # bytes
minLength = 200_000 # at least this length after adjustment
maxLength = 100_000_000 # at most this length after adjustment
desiredRuntime = 10 # ideal test run time, seconds
maxRatio = 1.5 # minimum time devation to cause change in length
maxUploadLength = 125_000_000 # upload will fail if upload is too large
# download limit is unknown, seems to be more than 1_000_000_000
# default output destimations
defaultLog = sys.stdout # message log
defaultReport = sys.stderr # summary reports and errors
# count only the bits in actual data, ignore protocal bits
# protocol bit are a small proportion of message exceopt in smalll packets
bitsPerDataByte = 8
@classmethod
def js_time(cls):
'''
JavaScript time -- milliseconds from Unix epoch.
This is the millisecond offset from 1970-01-01 00:00:00 UTC.
'''
return math.floor(1000 * time.time()) # integer from float
@classmethod
def js_clock(cls, milliseconds=None):
"""
Format javascript time from Unix epoch to local 'YYYY-MM-DD hh:mm:ss'.
This is the time in the local time zone. If no Javascript time is
given, it will the the current time.
"""
seconds = milliseconds / 1000 if milliseconds else None
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(seconds))
def __init__(self, serverURL,
log=defaultLog, # JSON log of transactions
report=defaultReport, # human-readable report
interval=defaultInterval,
downloadLength=initialDownloadLength,
uploadLength=initialUploadLength,
testID = None # default: will be set by the server
):
"""
Create an instance for download and upload tests.
serverURL may be specified with or without trailing slash. A
slash will be appended if trailing slash is omitted.
report and log are the names of output destinations of destinations ins
the local filesystem.
"""
super()
# Accept server URL with or without trailing slash, strip the slash
self._serverURL = (serverURL.rstrip('/')
if serverURL.endswith('/')
else serverURL)
# Paths relative to server
self._rootPath = '/'
self._setupPath = '/begin'
self._downloadPath = '/download'
self._downreportPath = '/downreport'
self._uploadPath = '/upload'
self._upreportPath = '/upreport'
self._pingPath = '/echo'
# output to file system
self._report = report
self._log = log
# Initial settings
self._interval = ( interval if interval
else self.__class__.defaultInterval)
self._downloadLength = ( downloadLength if downloadLength
else self.__class__.initialDownloadLength)
self._uploadLength = ( uploadLength if uploadLength
else self.__class__.initialUploadLength)
self._testID = testID
self._testNumber = 0 # Incremented on each test cycle
self._externalIP = None # client IP seen by server at each contact
self._testBegin = None # date-time of first contact with server
# prevent upload failure caused by large uploads
self._uploadLength = min(self.maxUploadLength, self._uploadLength)
def recalculateLength(self, previousLength, previousRuntime):
"""
Choose a transmit length that gives a test time close to desired time.
previousLength: amount of data in last transmission, bytes
previousRuntime: time to complete last tranmission, seconds
When the previous time falls too far below or too far above the
desired time the previous transmit length will be changed in an
attempt to come closer to the desired time on the next test.
There is an upper and a lower limt to the changed length.
"""
targetRuntime = self.__class__.desiredRuntime
minLength = self.__class__.minLength
maxLength = self.__class__.maxLength
# don't crash on a zero time, replace by a very short time
lastRuntime = max(previousRuntime, targetRuntime/100)
if ( lastRuntime > targetRuntime / self.maxRatio
and lastRunime < targetRuntime * self.maxRatio ):
return previousLength
# round to nearest thousand and not too small or large
transmitLength = previousLength * targetRuntime / lastRuntime
return max(minLength, min(maxLength, round(transmitLength, -3)))
def bytesource(self, count):
"""
Iterate a sequence of blocks of bytes.
count is the total number of bytes.
Last block may be shorter than the others.
"""
byt = ((b'0123456789' * 7) + b'012345678\n') * 50
n = count
blen = len(byt)
while n > blen:
yield byt
n -= blen
yield byt[0:n] # may have zero length
def begin(self):
'''
Make initial contact with server.
The server will check donfiguration and provide some test information.
The server may replace out-of-range values by default values.
'''
timestamp = self.js_time()
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self.js_time()), # server may revise this time
('pathname', self._setupPath),
('clientTimeStamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
('uploadLength', self._uploadLength),
))
content = bytes(json.dumps(params), 'utf-8')
try:
url = self._serverURL + self._setupPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
# failure of the next assignments would be a system failure
info = json.loads(f.read())
self._testID = info["testID"]
self._interval = info["interval"]
self._downloadLength = info["downloadLength"]
self._uploadLength = info["uploadLength"]
self._testBegin = info['testBegin']
print(json.dumps(info), file=self._log)
self._log.flush()
print( 'Begin:\n Test ID = ' + info['testID']
+ '\n External IP = ' + info['externalIP']
+ '\n Test Begin Time = '
+ self.js_clock(info['testBegin'])
+ '\n', file=self._report)
self._report.flush()
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to begin communication with server at',
self._serverURL])) from e
return
def reportToServer(self, params, reportPath):
"""
Report the result of a download or upload test to the server.
This is the second stage of a download or upload test and is
invoked by downloadTest()
Takes a dictionary of informations and returns a similar
dictionary from the server.
"""
timestamp = self.js_time()
try:
params['clientTimestamp'] = timestamp
params['pathname'] = reportPath
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + reportPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
data = f.read(4096).decode(encoding='iso-8859-1',
errors='replace')
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to report result to', url])) from e
# data should be JSON text in canonical form
return json.loads(data)
def download(self, params):
"""
Run a download test with data received from the server.
This is the first stage of a download test and is invoked by
downloadTest()
Takes a dictionary of informations and returns a modified
dictionary.
"""
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + self._downloadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'text/plain, application/octet',
},
data=content,
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times
# we only need the total length of downloaded data
clientResponseBegin = self.js_time()
size = len(f.read(1024))
while size > 0:
clientReceiveLength += size
size = len(f.read(16_384))
clientResponseEnd = self.js_time()
# update the information and return it
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('downloadReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to download data from server at',
self._serverURL])) from e
return params
def downloadTest(self):
"""
Run a download test and report result to server.
There are two exchanges. The first exchange does the download and
reports partial information to the server. The second exchange
includes information that becomes available after completion of the
first exchange, and reports full information to the server.
"""
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._downloadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
))
params = self.download(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['clientReceiveLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientResponseBegin']) / 1_000
print( 'Download\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the download size for the next run, to get approximately the
# desired length of time on each test run.
self._downloadLength = self.recalculateLength(
params['downloadReceiveLength'], seconds)
params = self.reportToServer(params, self._downreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def upload(self, params):
"""
Run an upload test with data sent to the server.
This is the first stage of an upload test and is invoked by
uploadTest()
Takes a dictionary of informations and returns a modified
dictionary.
"""
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
url = self._serverURL + self._uploadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/octet',
'Content-Length': self._uploadLength,
'Accept': 'application/json',
},
data=self.bytesource(self._uploadLength),
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times, save the info
clientResponseBegin = self.js_time()
text = f.read(4096) # should be JSON text, < 1K
clientResponseEnd = self.js_time()
size = len(text)
while size > 0:
# should be no remaining text
clientReceiveLength += size
size = len(f.read(4096))
clientResponseEnd = self.js_time()
info = json.loads(text)
# update data report for printing as JSON to the log
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
params.setdefault('serverReceiveLength',
info['serverReceiveLength'])
params.setdefault('serverRequestBegin', info['serverRequestBegin'])
params.setdefault('serverRequestEnd', info['serverRequestEnd'])
params.setdefault('serverResponseBegin',
info['serverResponseBegin'])
params.setdefault('uploadReceiveLength',
info['uploadReceiveLength'])
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to upload data from server at',
self._serverURL])) from e
return params
def uploadTest(self):
"""
Run upload test and report result to server.
There are two exchanges. The first exchange does the upload and
reports partial information to the server. The second exchange
includes information that becomes available after completion of the
first exchange, and reports full information to the server.
"""
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._uploadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('uploadLength', self._uploadLength),
))
params = self.upload(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['uploadLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientRequestBegin']) / 1_000
print( 'Upload\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the upload size for the next run, to get approximately the
# desired length of time on each test run.
self._uploadLength = min(self.maxUploadLength,
self.recalculateLength(params['uploadReceiveLength'], seconds))
params = self.reportToServer(params, self._upreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def run_test_cycle(self):
"""
Run a single set of upload and upload tests.
"""
self.downloadTest()
self.uploadTest()
def run(self):
"""
Invoke startup and ongoing test runs.
"""
self.begin()
while True:
self.run_test_cycle()
time.sleep(self._interval)
if __name__ == "__main__":
shortopts = "h"
longopts = ["help", "testid=", "interval=", "download=", "upload="]
cmdline = getopt.getopt(sys.argv[1:], shortopts, longopts=longopts)
argv = cmdline[1]
opt = dict(cmdline[0])
def printerr(s):
print(s, file=sys.stderr)
sys.stderr.flush()
if len(argv) < 1 or '-h' in opt or '--help' in opt:
printerr("Usage: " + sys.argv[0] + " [options] host[:port]")
printerr(" Client to estimate download and upload times")
printerr(" host (required): domain name or IP address of server")
printerr(" port (optional, default = 80): "
+ "destination port on server")
printerr(" options:")
printerr(" -h|--help print this message")
printerr(" --interval=n time (seconds) between runs"
+ " (default = " + str(Client.defaultInterval) +")")
printerr(" --download=n number of bytes to download"
+ " (default = " + str(Client.initialDownloadLength) + ")")
printerr(" --upload=n number of bytes to upload"
+ " (default = " + str(Client.initialUploadLength) + ")")
printerr(" --testid=ID test ID"
+ " (default = test ID will be set by server)")
printerr(" JSON log goes to stdout")
printerr(" Human-readable report goes to stderr")
printerr(" See script for details")
exit(2)
testID = opt["--testid"] if "--testid" in opt else None
interval = (int(opt["--interval"]) if "--interval" in opt
else Client.defaultInterval)
download = (int(opt["--download"]) if "--download" in opt
else Client.initialDownloadLength)
upload = (int(opt["--upload"]) if "--upload" in opt
else Client.initialUploadLength)
try:
Client(argv[0], interval=interval,
downloadLength=download,
uploadLength=upload,
testID=testID).run()
except KeyboardInterrupt as e:
printerr("Teiminated by Keyboard Interrupt\n")
exit(1)
| 41.582721 | 79 | 0.55895 |
import os
import sys
import collections
import gc
import getopt
import json
import math
import time
import traceback
import urllib.error
import urllib.request
import re
class Client(object):
defaultInterval = 3_600
initialDownloadLength = 20_000_000
initialUploadLength = 2_000_000
minLength = 200_000
maxLength = 100_000_000
desiredRuntime = 10
maxRatio = 1.5
maxUploadLength = 125_000_000
defaultLog = sys.stdout
defaultReport = sys.stderr
bitsPerDataByte = 8
@classmethod
def js_time(cls):
return math.floor(1000 * time.time())
@classmethod
def js_clock(cls, milliseconds=None):
seconds = milliseconds / 1000 if milliseconds else None
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(seconds))
def __init__(self, serverURL,
log=defaultLog,
report=defaultReport,
interval=defaultInterval,
downloadLength=initialDownloadLength,
uploadLength=initialUploadLength,
testID = None
):
super()
self._serverURL = (serverURL.rstrip('/')
if serverURL.endswith('/')
else serverURL)
self._rootPath = '/'
self._setupPath = '/begin'
self._downloadPath = '/download'
self._downreportPath = '/downreport'
self._uploadPath = '/upload'
self._upreportPath = '/upreport'
self._pingPath = '/echo'
self._report = report
self._log = log
self._interval = ( interval if interval
else self.__class__.defaultInterval)
self._downloadLength = ( downloadLength if downloadLength
else self.__class__.initialDownloadLength)
self._uploadLength = ( uploadLength if uploadLength
else self.__class__.initialUploadLength)
self._testID = testID
self._testNumber = 0
self._externalIP = None
self._testBegin = None
self._uploadLength = min(self.maxUploadLength, self._uploadLength)
def recalculateLength(self, previousLength, previousRuntime):
targetRuntime = self.__class__.desiredRuntime
minLength = self.__class__.minLength
maxLength = self.__class__.maxLength
lastRuntime = max(previousRuntime, targetRuntime/100)
if ( lastRuntime > targetRuntime / self.maxRatio
and lastRunime < targetRuntime * self.maxRatio ):
return previousLength
# round to nearest thousand and not too small or large
transmitLength = previousLength * targetRuntime / lastRuntime
return max(minLength, min(maxLength, round(transmitLength, -3)))
def bytesource(self, count):
byt = ((b'0123456789' * 7) + b'012345678\n') * 50
n = count
blen = len(byt)
while n > blen:
yield byt
n -= blen
yield byt[0:n] # may have zero length
def begin(self):
timestamp = self.js_time()
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self.js_time()), # server may revise this time
('pathname', self._setupPath),
('clientTimeStamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
('uploadLength', self._uploadLength),
))
content = bytes(json.dumps(params), 'utf-8')
try:
url = self._serverURL + self._setupPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
# failure of the next assignments would be a system failure
info = json.loads(f.read())
self._testID = info["testID"]
self._interval = info["interval"]
self._downloadLength = info["downloadLength"]
self._uploadLength = info["uploadLength"]
self._testBegin = info['testBegin']
print(json.dumps(info), file=self._log)
self._log.flush()
print( 'Begin:\n Test ID = ' + info['testID']
+ '\n External IP = ' + info['externalIP']
+ '\n Test Begin Time = '
+ self.js_clock(info['testBegin'])
+ '\n', file=self._report)
self._report.flush()
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to begin communication with server at',
self._serverURL])) from e
return
def reportToServer(self, params, reportPath):
timestamp = self.js_time()
try:
params['clientTimestamp'] = timestamp
params['pathname'] = reportPath
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + reportPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'application/json',
},
data=content,
method='POST'
)
with urllib.request.urlopen(request) as f:
data = f.read(4096).decode(encoding='iso-8859-1',
errors='replace')
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to report result to', url])) from e
# data should be JSON text in canonical form
return json.loads(data)
def download(self, params):
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
content = bytes(json.dumps(params), 'utf-8')
url = self._serverURL + self._downloadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/json',
# Content-Length is automatically calculated
'Accept': 'text/plain, application/octet',
},
data=content,
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times
# we only need the total length of downloaded data
clientResponseBegin = self.js_time()
size = len(f.read(1024))
while size > 0:
clientReceiveLength += size
size = len(f.read(16_384))
clientResponseEnd = self.js_time()
# update the information and return it
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('downloadReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to download data from server at',
self._serverURL])) from e
return params
def downloadTest(self):
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._downloadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('downloadLength', self._downloadLength),
))
params = self.download(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['clientReceiveLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientResponseBegin']) / 1_000
print( 'Download\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the download size for the next run, to get approximately the
# desired length of time on each test run.
self._downloadLength = self.recalculateLength(
params['downloadReceiveLength'], seconds)
params = self.reportToServer(params, self._downreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def upload(self, params):
timestamp = self.js_time()
clientRequestBegin = 0
clientRequestEnd = 0
clientResponseBegin = 0
clientResponseEnd = 0
clientReceiveLength = 0
try:
# prepare the request
url = self._serverURL + self._uploadPath
request = urllib.request.Request(
url,
headers = {
'Content-Type': 'application/octet',
'Content-Length': self._uploadLength,
'Accept': 'application/json',
},
data=self.bytesource(self._uploadLength),
method='POST'
)
# send the request, mark the times
clientRequestBegin = self.js_time()
with urllib.request.urlopen(request) as f:
clientRequestEnd = self.js_time()
# get the response, mark the times, save the info
clientResponseBegin = self.js_time()
text = f.read(4096) # should be JSON text, < 1K
clientResponseEnd = self.js_time()
size = len(text)
while size > 0:
# should be no remaining text
clientReceiveLength += size
size = len(f.read(4096))
clientResponseEnd = self.js_time()
info = json.loads(text)
# update data report for printing as JSON to the log
params.setdefault('clientReceiveLength', clientReceiveLength)
params.setdefault('clientRequestBegin', clientRequestBegin)
params.setdefault('clientRequestEnd', clientRequestEnd)
params.setdefault('clientResponseBegin', clientResponseBegin)
params.setdefault('clientResponseEnd', clientResponseEnd)
params.setdefault('serverReceiveLength',
info['serverReceiveLength'])
params.setdefault('serverRequestBegin', info['serverRequestBegin'])
params.setdefault('serverRequestEnd', info['serverRequestEnd'])
params.setdefault('serverResponseBegin',
info['serverResponseBegin'])
params.setdefault('uploadReceiveLength',
info['uploadReceiveLength'])
except Exception as e:
raise RuntimeError('timestamp=' + ': '.join([str(timestamp),
'Failed to upload data from server at',
self._serverURL])) from e
return params
def uploadTest(self):
gc.collect() # try to avoid garbage collection during test
timestamp = self.js_time()
# allocation of data to make the request
params = collections.OrderedDict((
('externalIP', self._externalIP),
('testID', self._testID),
('testBegin', self._testBegin),
('testNumber', self._testNumber),
('pathname', self._uploadPath),
('clientTimestamp', timestamp),
('interval', self._interval),
('uploadLength', self._uploadLength),
))
params = self.upload(params)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
# human-readable repot
megabytes = math.floor(params['uploadLength'] / 1_000) / 1_000
seconds = (params['clientResponseEnd']
- params['clientRequestBegin']) / 1_000
print( 'Upload\n Time: '
+ self.js_clock(params['clientTimestamp'])
+ '\n Megabytes: ' + str(megabytes)
+ '\n Seconds: ' + str(seconds)
+ '\n Megabits / Second: ' + str(round(
(self.bitsPerDataByte * megabytes / seconds), 3))
+ '\n', file=self._report)
self._report.flush()
# revise the upload size for the next run, to get approximately the
# desired length of time on each test run.
self._uploadLength = min(self.maxUploadLength,
self.recalculateLength(params['uploadReceiveLength'], seconds))
params = self.reportToServer(params, self._upreportPath)
# computer-readable JSON report
print(json.dumps(params), file=self._log)
self._log.flush()
return
def run_test_cycle(self):
self.downloadTest()
self.uploadTest()
def run(self):
self.begin()
while True:
self.run_test_cycle()
time.sleep(self._interval)
if __name__ == "__main__":
shortopts = "h"
longopts = ["help", "testid=", "interval=", "download=", "upload="]
cmdline = getopt.getopt(sys.argv[1:], shortopts, longopts=longopts)
argv = cmdline[1]
opt = dict(cmdline[0])
def printerr(s):
print(s, file=sys.stderr)
sys.stderr.flush()
if len(argv) < 1 or '-h' in opt or '--help' in opt:
printerr("Usage: " + sys.argv[0] + " [options] host[:port]")
printerr(" Client to estimate download and upload times")
printerr(" host (required): domain name or IP address of server")
printerr(" port (optional, default = 80): "
+ "destination port on server")
printerr(" options:")
printerr(" -h|--help print this message")
printerr(" --interval=n time (seconds) between runs"
+ " (default = " + str(Client.defaultInterval) +")")
printerr(" --download=n number of bytes to download"
+ " (default = " + str(Client.initialDownloadLength) + ")")
printerr(" --upload=n number of bytes to upload"
+ " (default = " + str(Client.initialUploadLength) + ")")
printerr(" --testid=ID test ID"
+ " (default = test ID will be set by server)")
printerr(" JSON log goes to stdout")
printerr(" Human-readable report goes to stderr")
printerr(" See script for details")
exit(2)
testID = opt["--testid"] if "--testid" in opt else None
interval = (int(opt["--interval"]) if "--interval" in opt
else Client.defaultInterval)
download = (int(opt["--download"]) if "--download" in opt
else Client.initialDownloadLength)
upload = (int(opt["--upload"]) if "--upload" in opt
else Client.initialUploadLength)
try:
Client(argv[0], interval=interval,
downloadLength=download,
uploadLength=upload,
testID=testID).run()
except KeyboardInterrupt as e:
printerr("Teiminated by Keyboard Interrupt\n")
exit(1)
| true | true |
f72be1dc6f4fb1ba0ced9f2acbb93db31ef858b3 | 3,694 | py | Python | bin/specsim3d/spectralsim.py | LutzGross/fingal | 4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48 | [
"Apache-2.0"
] | null | null | null | bin/specsim3d/spectralsim.py | LutzGross/fingal | 4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48 | [
"Apache-2.0"
] | null | null | null | bin/specsim3d/spectralsim.py | LutzGross/fingal | 4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48 | [
"Apache-2.0"
] | null | null | null | #-------------------------------------------------------------------------------
# Name: Spectralsim
# Purpose: Simulation of standard normal random fields
#
# Author: Dr.-Ing. S. Hoerning
#
# Created: 02.05.2018, Centre for Natural Gas, EAIT,
# The University of Queensland, Brisbane, QLD, Australia
#-------------------------------------------------------------------------------
import numpy as np
from . import covariancefunction as covfun
class spectral_random_field(object):
def __init__(self,
domainsize = (100,100),
covmod = '1.0 Exp(2.)',
periodic = False,
):
self.counter = 0
self.periodic = periodic
# create self.xyz for plotting 3d
if len(domainsize) == 3:
self.xyz = np.mgrid[[slice(0,n,1) for n in domainsize]].reshape(3,-1).T
# adjust domainsize by cutoff for non-perjodic output
self.cutoff = 0
if not self.periodic:
cutoff = covfun.find_maximum_range(covmod)
cutoffs = []
for dim in domainsize:
tsize = dim + cutoff
# find closest multiple of 8 that is larger than tsize
m8 = np.int(np.ceil(tsize/8.)*8.)
cutoffs.append(m8 - dim)
self.cutoff = np.array(cutoffs)
self.domainsize = np.array(domainsize)+self.cutoff
self.covmod = covmod
self.ndim = len(self.domainsize)
self.npoints = np.prod(self.domainsize)
self.grid = np.mgrid[[slice(0,n,1) for n in self.domainsize]]
# ensure periodicity of domain
for i in range(self.ndim):
self.domainsize = self.domainsize[:,np.newaxis]
self.grid = np.min((self.grid,np.array(self.domainsize)-self.grid),axis=0)
# compute distances from origin (--> wavenumbers in fourier space)
self.h = ((self.grid**2).sum(axis=0))**0.5
# covariances (in fourier space!!!)
self.Q = covfun.Covariogram(self.h, self.covmod)
# FFT of covariances
self.FFTQ = np.abs(np.fft.fftn(self.Q))
# eigenvalues of decomposition
self.sqrtFFTQ = np.sqrt(self.FFTQ / self.npoints)
self.Y = self.simnew()
def simnew(self):
self.counter += 1
# compute random field via inverse fourier transform
real = np.random.standard_normal(size=self.sqrtFFTQ.shape)
imag = np.random.standard_normal(size=self.sqrtFFTQ.shape)
epsilon = real + 1j*imag
rand = epsilon * self.sqrtFFTQ
self.Y = np.real(np.fft.ifftn(rand))*self.npoints
if not self.periodic:
# readjust domainsize to correct size (--> no boundary effects...)
gridslice = [slice(0,(self.domainsize.squeeze()-self.cutoff)[i],1)
for i in range(self.ndim)]
self.Y = self.Y[tuple(gridslice)]
self.Y = self.Y.reshape(self.domainsize.squeeze()-self.cutoff)
return self.Y
# TEST CASE
if __name__ == "__main__":
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
domain = (30, 30, 30)
covmod = '1.0 Exp(4.)'
spec = spectral_random_field(domainsize = domain, covmod = covmod)
field3d = spec.simnew()
xyz = np.mgrid[[slice(0 , n, 1) for n in domain]].reshape(3,-1).T
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xyz[:,0], xyz[:,1], xyz[:,2], c=field3d.flatten())
plt.show()
| 35.864078 | 84 | 0.538711 |
import numpy as np
from . import covariancefunction as covfun
class spectral_random_field(object):
def __init__(self,
domainsize = (100,100),
covmod = '1.0 Exp(2.)',
periodic = False,
):
self.counter = 0
self.periodic = periodic
if len(domainsize) == 3:
self.xyz = np.mgrid[[slice(0,n,1) for n in domainsize]].reshape(3,-1).T
self.cutoff = 0
if not self.periodic:
cutoff = covfun.find_maximum_range(covmod)
cutoffs = []
for dim in domainsize:
tsize = dim + cutoff
m8 = np.int(np.ceil(tsize/8.)*8.)
cutoffs.append(m8 - dim)
self.cutoff = np.array(cutoffs)
self.domainsize = np.array(domainsize)+self.cutoff
self.covmod = covmod
self.ndim = len(self.domainsize)
self.npoints = np.prod(self.domainsize)
self.grid = np.mgrid[[slice(0,n,1) for n in self.domainsize]]
for i in range(self.ndim):
self.domainsize = self.domainsize[:,np.newaxis]
self.grid = np.min((self.grid,np.array(self.domainsize)-self.grid),axis=0)
self.h = ((self.grid**2).sum(axis=0))**0.5
self.Q = covfun.Covariogram(self.h, self.covmod)
self.FFTQ = np.abs(np.fft.fftn(self.Q))
self.sqrtFFTQ = np.sqrt(self.FFTQ / self.npoints)
self.Y = self.simnew()
def simnew(self):
self.counter += 1
real = np.random.standard_normal(size=self.sqrtFFTQ.shape)
imag = np.random.standard_normal(size=self.sqrtFFTQ.shape)
epsilon = real + 1j*imag
rand = epsilon * self.sqrtFFTQ
self.Y = np.real(np.fft.ifftn(rand))*self.npoints
if not self.periodic:
gridslice = [slice(0,(self.domainsize.squeeze()-self.cutoff)[i],1)
for i in range(self.ndim)]
self.Y = self.Y[tuple(gridslice)]
self.Y = self.Y.reshape(self.domainsize.squeeze()-self.cutoff)
return self.Y
if __name__ == "__main__":
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
domain = (30, 30, 30)
covmod = '1.0 Exp(4.)'
spec = spectral_random_field(domainsize = domain, covmod = covmod)
field3d = spec.simnew()
xyz = np.mgrid[[slice(0 , n, 1) for n in domain]].reshape(3,-1).T
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(xyz[:,0], xyz[:,1], xyz[:,2], c=field3d.flatten())
plt.show()
| true | true |
f72be3f57c7c5d2eb4ce80ae2f2640c917c82222 | 1,598 | py | Python | salt/states/aptpkg.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 3 | 2015-04-16T18:42:35.000Z | 2017-10-30T16:57:49.000Z | salt/states/aptpkg.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 16 | 2015-11-18T00:44:03.000Z | 2018-10-29T20:48:27.000Z | salt/states/aptpkg.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | 4 | 2020-11-04T06:28:05.000Z | 2022-02-09T10:54:49.000Z | # -*- coding: utf-8 -*-
'''
Package management operations specific to APT- and DEB-based systems
====================================================================
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'apt'
def __virtual__():
'''
Only work on apt-based platforms with pkg.get_selections
'''
return (__virtualname__
if __salt__.get('pkg.get_selections', False)
else False)
def held(name):
'''
Set package in 'hold' state, meaning it will not be upgraded.
name
The name of the package, e.g., 'tmux'
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
state = __salt__['pkg.get_selections'](
pattern=name,
)
if not state:
ret.update(comment='Package {0} does not have a state'.format(name))
elif not salt.utils.is_true(state.get('hold', False)):
if not __opts__['test']:
result = __salt__['pkg.set_selections'](
selection={'hold': [name]}
)
ret.update(changes=result[name],
result=True,
comment='Package {0} is now being held'.format(name))
else:
ret.update(result=None,
comment='Package {0} is set to be held'.format(name))
else:
ret.update(result=True,
comment='Package {0} is already held'.format(name))
return ret
| 27.084746 | 76 | 0.560701 |
from __future__ import absolute_import
import logging
import salt.utils
log = logging.getLogger(__name__)
__virtualname__ = 'apt'
def __virtual__():
return (__virtualname__
if __salt__.get('pkg.get_selections', False)
else False)
def held(name):
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
state = __salt__['pkg.get_selections'](
pattern=name,
)
if not state:
ret.update(comment='Package {0} does not have a state'.format(name))
elif not salt.utils.is_true(state.get('hold', False)):
if not __opts__['test']:
result = __salt__['pkg.set_selections'](
selection={'hold': [name]}
)
ret.update(changes=result[name],
result=True,
comment='Package {0} is now being held'.format(name))
else:
ret.update(result=None,
comment='Package {0} is set to be held'.format(name))
else:
ret.update(result=True,
comment='Package {0} is already held'.format(name))
return ret
| true | true |
f72be4c5cc1eeab06b7191f01eab65ac00eda171 | 605 | py | Python | lms_aaditya/ContactsModules/update_contacts.py | hcmuleva/personal-profile | 051b5a2f36b927951691f48abe584beb8bc25440 | [
"MIT"
] | null | null | null | lms_aaditya/ContactsModules/update_contacts.py | hcmuleva/personal-profile | 051b5a2f36b927951691f48abe584beb8bc25440 | [
"MIT"
] | 3 | 2020-07-13T17:46:32.000Z | 2020-07-26T10:30:59.000Z | lms_aaditya/ContactsModules/update_contacts.py | hcmuleva/personal-profile | 051b5a2f36b927951691f48abe584beb8bc25440 | [
"MIT"
] | null | null | null | from MongoConnect import ConnectModule
my_con = ConnectModule.connect()
collection = my_con.db["Contacts"]
class UpdateContact:
def __init__(self, reg_id, uname, uemail, uphone):
self.uname = uname
self.uemail = uemail
self.uphone = uphone
self.reg_id = reg_id
def update(self):
newdata = {"$set": {
"Registration Id": self.reg_id,
"Name ": self.uname,
"Email ID ": self.uemail,
"Phone number ": self.uphone
}
}
return collection.update({"Registration Id": self.reg_id}, newdata)
| 27.5 | 75 | 0.58843 | from MongoConnect import ConnectModule
my_con = ConnectModule.connect()
collection = my_con.db["Contacts"]
class UpdateContact:
def __init__(self, reg_id, uname, uemail, uphone):
self.uname = uname
self.uemail = uemail
self.uphone = uphone
self.reg_id = reg_id
def update(self):
newdata = {"$set": {
"Registration Id": self.reg_id,
"Name ": self.uname,
"Email ID ": self.uemail,
"Phone number ": self.uphone
}
}
return collection.update({"Registration Id": self.reg_id}, newdata)
| true | true |
f72be53c78ed7ddbde3083e7ebead947d18b09a5 | 2,494 | py | Python | dayong/components/event_component.py | ooliver1/Dayong | 0923e0ff2a03157a51f7fae9c6056afd812c314c | [
"MIT"
] | 1 | 2021-11-04T13:20:31.000Z | 2021-11-04T13:20:31.000Z | dayong/components/event_component.py | ooliver1/Dayong | 0923e0ff2a03157a51f7fae9c6056afd812c314c | [
"MIT"
] | null | null | null | dayong/components/event_component.py | ooliver1/Dayong | 0923e0ff2a03157a51f7fae9c6056afd812c314c | [
"MIT"
] | null | null | null | """
dayong.components.event_component
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Organization of events and event listeners.
"""
from typing import Optional
import hikari
import tanjun
from dayong.configs import DayongConfig
component = tanjun.Component()
@component.with_listener(hikari.MemberCreateEvent)
async def greet_new_member(
event: hikari.MemberCreateEvent,
config: DayongConfig = tanjun.injected(type=DayongConfig),
) -> None:
"""Welcome new guild members.
This will dynamically search for welcome channels, sort the channels by name length
and send a greeting to the channel with the shortest name.
Args:
event (hikari.MemberCreateEvent): Instance of `hikari.MemberCreateEvent`. This
is a registered type dependency and is injected by the client.
"""
embeddings = config.embeddings["new_member_greetings"]
wc_channels: list[str] = []
wc_channel: Optional[hikari.TextableChannel] = None
channels = await event.app.rest.fetch_guild_channels(event.guild_id)
# Collect welcome channels.
for channel in channels:
if channel.name is not None and "welcome" in channel.name:
wc_channels.append(channel.name)
if wc_channels:
wc_channels.sort(key=len)
for channel in channels:
if wc_channels[0] == channel.name:
wc_channel = (
wch
if isinstance(
(wch := await event.app.rest.fetch_channel(channel.id)),
hikari.TextableChannel,
)
else None
)
if wc_channel is not None and isinstance(embeddings, dict):
embed = hikari.Embed(
description=embeddings["description"].format(
hikari.OwnGuild.name,
event.member.id,
embeddings["readme_channel_id"],
),
color=embeddings["color"],
)
for info in range(len(embeddings["greetings_field"])):
inner_dict = embeddings["greetings_field"][info]
embed.add_field(
name=inner_dict["name"],
value=inner_dict["value"],
inline=True,
)
await wc_channel.send(embed)
@tanjun.as_loader
def load_examples(client: tanjun.Client) -> None:
"""The loader for this component.
Args:
client (tanjun.Client): The client instance that will load this module.
"""
client.add_component(component.copy())
| 30.048193 | 87 | 0.623897 | from typing import Optional
import hikari
import tanjun
from dayong.configs import DayongConfig
component = tanjun.Component()
@component.with_listener(hikari.MemberCreateEvent)
async def greet_new_member(
event: hikari.MemberCreateEvent,
config: DayongConfig = tanjun.injected(type=DayongConfig),
) -> None:
embeddings = config.embeddings["new_member_greetings"]
wc_channels: list[str] = []
wc_channel: Optional[hikari.TextableChannel] = None
channels = await event.app.rest.fetch_guild_channels(event.guild_id)
for channel in channels:
if channel.name is not None and "welcome" in channel.name:
wc_channels.append(channel.name)
if wc_channels:
wc_channels.sort(key=len)
for channel in channels:
if wc_channels[0] == channel.name:
wc_channel = (
wch
if isinstance(
(wch := await event.app.rest.fetch_channel(channel.id)),
hikari.TextableChannel,
)
else None
)
if wc_channel is not None and isinstance(embeddings, dict):
embed = hikari.Embed(
description=embeddings["description"].format(
hikari.OwnGuild.name,
event.member.id,
embeddings["readme_channel_id"],
),
color=embeddings["color"],
)
for info in range(len(embeddings["greetings_field"])):
inner_dict = embeddings["greetings_field"][info]
embed.add_field(
name=inner_dict["name"],
value=inner_dict["value"],
inline=True,
)
await wc_channel.send(embed)
@tanjun.as_loader
def load_examples(client: tanjun.Client) -> None:
client.add_component(component.copy())
| true | true |
f72be60e09156efc269b07d9431ebb692d7f5e1c | 683 | py | Python | src/test_SimulationState.py | pawel00100/Disease-Spread-Simulation | eff8b824a9205f61a4d70cd6c7613a9a786bd1eb | [
"MIT"
] | null | null | null | src/test_SimulationState.py | pawel00100/Disease-Spread-Simulation | eff8b824a9205f61a4d70cd6c7613a9a786bd1eb | [
"MIT"
] | null | null | null | src/test_SimulationState.py | pawel00100/Disease-Spread-Simulation | eff8b824a9205f61a4d70cd6c7613a9a786bd1eb | [
"MIT"
] | null | null | null | from unittest import TestCase
from Person import Person
from SimulationState import SimulationState, MapPosition
from src.Map import Map
class TestSimulationState(TestCase):
def test_find_neighbors(self):
map = Map(200, 200)
p0 = Person(MapPosition(0, 0, map), map)
p1 = Person(MapPosition(1, 0, map), map)
p2 = Person(MapPosition(0, 100, map), map)
people = {p0: MapPosition(0, 0, map), p1: MapPosition(1, 0, map), p2: MapPosition(0, 100, map)}
simulation_state = SimulationState(people)
neighbors = simulation_state.find_neighbors(p0)
self.assert_(p1 in neighbors)
self.assert_(p2 not in neighbors)
| 31.045455 | 103 | 0.676428 | from unittest import TestCase
from Person import Person
from SimulationState import SimulationState, MapPosition
from src.Map import Map
class TestSimulationState(TestCase):
def test_find_neighbors(self):
map = Map(200, 200)
p0 = Person(MapPosition(0, 0, map), map)
p1 = Person(MapPosition(1, 0, map), map)
p2 = Person(MapPosition(0, 100, map), map)
people = {p0: MapPosition(0, 0, map), p1: MapPosition(1, 0, map), p2: MapPosition(0, 100, map)}
simulation_state = SimulationState(people)
neighbors = simulation_state.find_neighbors(p0)
self.assert_(p1 in neighbors)
self.assert_(p2 not in neighbors)
| true | true |
f72be711d81d043cbb81c0c2790bd4ca458ffcbf | 3,130 | py | Python | tests/test_types.py | acarrasco/dacite | ece070cc3c25e86634086db8ee4f2e45bdfe6fe5 | [
"MIT"
] | null | null | null | tests/test_types.py | acarrasco/dacite | ece070cc3c25e86634086db8ee4f2e45bdfe6fe5 | [
"MIT"
] | 1 | 2019-03-20T17:30:34.000Z | 2019-03-20T17:30:34.000Z | tests/test_types.py | acarrasco/dacite | ece070cc3c25e86634086db8ee4f2e45bdfe6fe5 | [
"MIT"
] | null | null | null | from typing import Optional, Union, List, Any, Dict, NewType, TypeVar, Generic
import pytest
from dacite.types import (
is_optional,
extract_optional,
is_generic,
is_union,
is_generic_collection,
extract_origin_collection,
is_instance,
cast_value,
extract_generic,
is_new_type,
)
def test_is_union_with_union():
assert is_union(Union[int, float])
def test_is_union_with_non_union():
assert not is_union(int)
def test_is_optional_with_optional():
assert is_optional(Optional[int])
def test_is_optional_with_non_optional():
assert not is_optional(int)
def test_is_optional_with_optional_of_union():
assert is_optional(Optional[Union[int, float]])
def test_extract_optional():
assert extract_optional(Optional[int]) == int
def test_extract_optional_with_wrong_type():
with pytest.raises(ValueError):
extract_optional(List[None])
def test_is_generic_with_generic():
assert is_generic(Optional[int])
def test_is_generic_with_non_generic():
assert not is_generic(int)
def test_is_generic_collection_with_generic_collection():
assert is_generic_collection(List[int])
def test_is_generic_collection_with_non_generic_collection():
assert not is_generic_collection(list)
def test_extract_generic_collection():
assert extract_origin_collection(List[int]) == list
def test_is_new_type_with_new_type():
assert is_new_type(NewType("NewType", int))
def test_is_new_type_with_non_new_type():
assert not is_new_type(int)
def test_is_instance_with_built_in_type_and_matching_value_type():
assert is_instance(1, int)
def test_is_instance_with_built_in_type_and_not_matching_value_type():
assert not is_instance("test", int)
def test_is_instance_with_union_and_matching_value_type():
assert is_instance(1, Union[int, float])
def test_is_instance_with_union_and_not_matching_value_type():
assert not is_instance("test", Union[int, float])
def test_is_instance_with_generic_collection_and_matching_value_type():
assert is_instance([1], List[int])
def test_is_instance_with_generic_collection_and_not_matching_value_type():
assert not is_instance({1}, List[int])
def test_is_instance_with_any_type():
assert is_instance(1, Any)
def test_is_instance_with_new_type_and_matching_value_type():
assert is_instance("test", NewType("MyStr", str))
def test_is_instance_with_new_type_and_not_matching_value_type():
assert not is_instance(1, NewType("MyStr", str))
def test_is_instance_with_not_supported_generic_types():
T = TypeVar("T")
class X(Generic[T]):
pass
assert not is_instance(X[str](), X[str])
def test_cast_value_with_built_in_type():
assert cast_value(int, "1") == 1
def test_cast_value_with_optional():
assert cast_value(Optional[int], "1") == 1
def test_cast_value_with_generic_sequence():
assert cast_value(List[int], ["1"]) == [1]
def test_cast_value_with_generic_mapping():
assert cast_value(Dict[str, int], {1: "1"}) == {"1": 1}
def test_extract_generic():
assert extract_generic(List[int]) == (int,)
| 22.517986 | 78 | 0.763259 | from typing import Optional, Union, List, Any, Dict, NewType, TypeVar, Generic
import pytest
from dacite.types import (
is_optional,
extract_optional,
is_generic,
is_union,
is_generic_collection,
extract_origin_collection,
is_instance,
cast_value,
extract_generic,
is_new_type,
)
def test_is_union_with_union():
assert is_union(Union[int, float])
def test_is_union_with_non_union():
assert not is_union(int)
def test_is_optional_with_optional():
assert is_optional(Optional[int])
def test_is_optional_with_non_optional():
assert not is_optional(int)
def test_is_optional_with_optional_of_union():
assert is_optional(Optional[Union[int, float]])
def test_extract_optional():
assert extract_optional(Optional[int]) == int
def test_extract_optional_with_wrong_type():
with pytest.raises(ValueError):
extract_optional(List[None])
def test_is_generic_with_generic():
assert is_generic(Optional[int])
def test_is_generic_with_non_generic():
assert not is_generic(int)
def test_is_generic_collection_with_generic_collection():
assert is_generic_collection(List[int])
def test_is_generic_collection_with_non_generic_collection():
assert not is_generic_collection(list)
def test_extract_generic_collection():
assert extract_origin_collection(List[int]) == list
def test_is_new_type_with_new_type():
assert is_new_type(NewType("NewType", int))
def test_is_new_type_with_non_new_type():
assert not is_new_type(int)
def test_is_instance_with_built_in_type_and_matching_value_type():
assert is_instance(1, int)
def test_is_instance_with_built_in_type_and_not_matching_value_type():
assert not is_instance("test", int)
def test_is_instance_with_union_and_matching_value_type():
assert is_instance(1, Union[int, float])
def test_is_instance_with_union_and_not_matching_value_type():
assert not is_instance("test", Union[int, float])
def test_is_instance_with_generic_collection_and_matching_value_type():
assert is_instance([1], List[int])
def test_is_instance_with_generic_collection_and_not_matching_value_type():
assert not is_instance({1}, List[int])
def test_is_instance_with_any_type():
assert is_instance(1, Any)
def test_is_instance_with_new_type_and_matching_value_type():
assert is_instance("test", NewType("MyStr", str))
def test_is_instance_with_new_type_and_not_matching_value_type():
assert not is_instance(1, NewType("MyStr", str))
def test_is_instance_with_not_supported_generic_types():
T = TypeVar("T")
class X(Generic[T]):
pass
assert not is_instance(X[str](), X[str])
def test_cast_value_with_built_in_type():
assert cast_value(int, "1") == 1
def test_cast_value_with_optional():
assert cast_value(Optional[int], "1") == 1
def test_cast_value_with_generic_sequence():
assert cast_value(List[int], ["1"]) == [1]
def test_cast_value_with_generic_mapping():
assert cast_value(Dict[str, int], {1: "1"}) == {"1": 1}
def test_extract_generic():
assert extract_generic(List[int]) == (int,)
| true | true |
f72be79b665283eb1a3552b5a050e1fc7b15727e | 2,877 | py | Python | mms/context.py | andrewfayres/mxnet-model-server | ef4edfef4cfe5234887bf834ec7b82676a36ba02 | [
"Apache-2.0"
] | 1 | 2019-01-30T02:57:31.000Z | 2019-01-30T02:57:31.000Z | mms/context.py | DrSnowbird/mxnet-model-server | a0bfd712350545dceb21c8e0b0b21dfa0c9918a7 | [
"Apache-2.0"
] | null | null | null | mms/context.py | DrSnowbird/mxnet-model-server | a0bfd712350545dceb21c8e0b0b21dfa0c9918a7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Context object of incoming request
"""
class Context(object):
"""
Context stores model relevant worker information
Some fixed during load times and some
"""
def __init__(self, model_name, model_dir, manifest, batch_size, gpu, mms_version):
self.model_name = model_name
self.manifest = manifest
self._system_properties = {
"model_dir": model_dir,
"gpu_id": gpu,
"batch_size": batch_size,
"server_name": "MMS",
"server_version": mms_version
}
self.request_ids = None
self.request_processor = RequestProcessor(dict())
self._metrics = None
@property
def system_properties(self):
return self._system_properties
@property
def request_processor(self):
return self._request_processor
@request_processor.setter
def request_processor(self, request_processor):
self._request_processor = request_processor
@property
def metrics(self):
return self._metrics
@metrics.setter
def metrics(self, metrics):
self._metrics = metrics
def set_response_content_type(self, request_id, value):
self._request_processor.add_response_property(request_id, {'content-type': value})
def get_response_content_type(self, request_id):
response_headers = self._request_processor.get_response_header().get(request_id)
if response_headers is not None:
return response_headers.get('content-type')
return None
def __eq__(self, other):
return isinstance(other, Context) and self.__dict__ == other.__dict__
class RequestProcessor(object):
"""
Request processor
"""
def __init__(self, request_header):
self._status_code = 200
self._reason_phrase = None
self._response_header = {}
self._request_header = request_header
def get_request_property(self, key):
return self._request_header.get(key)
def report_status(self, code, reason_phrase=None):
self._status_code = code
self._reason_phrase = reason_phrase
def add_response_property(self, key, value):
self._response_header[key] = value
def get_response_header(self):
return self._response_header
| 31.271739 | 90 | 0.686827 |
class Context(object):
def __init__(self, model_name, model_dir, manifest, batch_size, gpu, mms_version):
self.model_name = model_name
self.manifest = manifest
self._system_properties = {
"model_dir": model_dir,
"gpu_id": gpu,
"batch_size": batch_size,
"server_name": "MMS",
"server_version": mms_version
}
self.request_ids = None
self.request_processor = RequestProcessor(dict())
self._metrics = None
@property
def system_properties(self):
return self._system_properties
@property
def request_processor(self):
return self._request_processor
@request_processor.setter
def request_processor(self, request_processor):
self._request_processor = request_processor
@property
def metrics(self):
return self._metrics
@metrics.setter
def metrics(self, metrics):
self._metrics = metrics
def set_response_content_type(self, request_id, value):
self._request_processor.add_response_property(request_id, {'content-type': value})
def get_response_content_type(self, request_id):
response_headers = self._request_processor.get_response_header().get(request_id)
if response_headers is not None:
return response_headers.get('content-type')
return None
def __eq__(self, other):
return isinstance(other, Context) and self.__dict__ == other.__dict__
class RequestProcessor(object):
def __init__(self, request_header):
self._status_code = 200
self._reason_phrase = None
self._response_header = {}
self._request_header = request_header
def get_request_property(self, key):
return self._request_header.get(key)
def report_status(self, code, reason_phrase=None):
self._status_code = code
self._reason_phrase = reason_phrase
def add_response_property(self, key, value):
self._response_header[key] = value
def get_response_header(self):
return self._response_header
| true | true |
f72be81e9dc0fedf810a47adb40ea90cbd01bd08 | 7,766 | py | Python | xos/core/dashboard/views/home.py | pan2za/xos | c2a4da2ccaa12360b2718be303b247866aefdfe6 | [
"Apache-2.0"
] | null | null | null | xos/core/dashboard/views/home.py | pan2za/xos | c2a4da2ccaa12360b2718be303b247866aefdfe6 | [
"Apache-2.0"
] | 5 | 2020-06-05T17:47:15.000Z | 2021-09-23T23:21:27.000Z | xos/core/dashboard/views/home.py | pan2za/xos | c2a4da2ccaa12360b2718be303b247866aefdfe6 | [
"Apache-2.0"
] | null | null | null | from view_common import *
from django.http import HttpResponseRedirect
import sys
def isInt(s):
try:
int(s)
return True
except ValueError:
return False
class LoggedInView(TemplateView):
def get(self, request, name="root", *args, **kwargs):
if request.user.login_page:
return HttpResponseRedirect(request.user.login_page)
else:
return HttpResponseRedirect("/admin/")
class DashboardDynamicView(TemplateView):
head_template = r"""{% extends "admin/dashboard/dashboard_base.html" %}
{% load admin_static %}
{% block content %}
"""
head_wholePage_template = r"""{% extends "admin/wholePage.html" %}
{% load admin_static %}
{% block content %}
"""
tail_template = r"{% endblock %}"
def get(self, request, name="root", *args, **kwargs):
context = self.get_context_data(**kwargs)
context = getDashboardContext(request.user, context)
if name == "root":
# maybe it is a bit hacky, didn't want to mess up everything @teone
user_dashboards = request.user.get_dashboards()
first_dasboard_name = user_dashboards[0].id
return self.singleDashboardView(request, first_dasboard_name, context)
# return self.multiDashboardView(request, context)
elif kwargs.get("wholePage", None):
return self.singleFullView(request, name, context)
else:
return self.singleDashboardView(request, name, context)
def readTemplate(self, fn):
TEMPLATE_DIRS = [XOS_DIR + "/templates/admin/dashboard/",
XOS_DIR + "/core/xoslib/dashboards/"]
for template_dir in TEMPLATE_DIRS:
pathname = os.path.join(template_dir, fn) + ".html"
if os.path.exists(pathname):
break
else:
return "failed to find %s in %s" % (fn, TEMPLATE_DIRS)
template = open(pathname, "r").read()
if (fn == "tenant"):
# fix for tenant view - it writes html to a div called tabs-5
template = '<div id="tabs-5"></div>' + template
return template
def embedDashboardUrl(self, url):
if url.startswith("template:"):
fn = url[9:]
return self.readTemplate(fn)
elif url.startswith("http"):
return '<iframe src="%s" width="100%%" height="100%%" style="min-height: 1024px;" frameBorder="0"></iframe>' % url
else:
return "don't know how to load dashboard %s" % url
def embedDashboardView(self, view, i=0):
body = ""
url = view.url
if (view.controllers.all().count() > 0):
body = body + 'Controller: <select id="dashselect-%d">' % i
body = body + '<option value="None">(select a controller)</option>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<option value="%d">%s</option>' % (j, controllerdashboard.controller.name)
body = body + '</select><hr>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<script type="text/template" id="dashtemplate-%d-%d">\n%s\n</script>\n' % (i,j, self.embedDashboardUrl(controllerdashboard.url));
body = body + '<div id="dashcontent-%d" class="dashcontent"></div>\n' % i
body = body + """<script>
$("#dashselect-%d").change(function() {
v=$("#dashselect-%d").val();
if (v=="None") {
$("#dashcontent-%d").html("");
return;
}
$("#dashcontent-%d").html( $("#dashtemplate-%d-" + v).html() );
});
//$("#dashcontent-%d").html( $("#dashtemplate-%d-0").html() );
</script>
""" % (i, i, i, i, i, i, i)
else:
body = body + self.embedDashboardUrl(url)
return body
def multiDashboardView(self, request, context):
head_template = self.head_template
tail_template = self.tail_template
dashboards = request.user.get_dashboards()
if not request.user.is_appuser:
# customize is a special dashboard they always get
customize = DashboardView.objects.filter(name="Customize")
if customize:
dashboards.append(customize[0])
tabs = []
bodies = []
i = 0
for view in dashboards:
# don't display disabled dashboards
if (not view.enabled):
continue
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, view.name))
body = '<div id="dashtab-%d">%s</div>\n' % (i, self.embedDashboardView(view, i))
bodies.append(body)
i = i + 1
# embed content provider dashboards
for cp in ContentProvider.objects.all():
if request.user in cp.users.all():
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, cp.name))
body = ""
body = body + '<div id="dashtab-%d">\n' % i
body = body + self.embedDashboardUrl("http:/admin/hpc/contentprovider/%s/%s/embeddedfilteredchange" % (cp.serviceProvider.hpcService.id, cp.id))
body = body + '</div>\n'
bodies.append(body)
i = i + 1
if (len(tabs) == 1) and (len(bodies) == 1):
# there is only one dashboard, so optimize out the tabbing
contents = bodies[0]
else:
contents = """
<div id="hometabs" >
<ul id="suit_form_tabs" class="nav nav-tabs nav-tabs-suit" data-tab-prefix="suit-tab">
%s
</ul>
%s
</div>
""" % ("\n".join(tabs), "\n".join(bodies))
t = template.Template(head_template + contents + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleDashboardView(self, request, id, context):
head_template = self.head_template
tail_template = self.tail_template
# if id is a number, load by datamodel,
# else look directly for the template
if(isInt(id)):
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
else:
t = template.Template(head_template + self.readTemplate("xos" + id) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleFullView(self, request, id, context):
head_template = self.head_wholePage_template
tail_template = self.tail_template
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
| 37.882927 | 160 | 0.550992 | from view_common import *
from django.http import HttpResponseRedirect
import sys
def isInt(s):
try:
int(s)
return True
except ValueError:
return False
class LoggedInView(TemplateView):
def get(self, request, name="root", *args, **kwargs):
if request.user.login_page:
return HttpResponseRedirect(request.user.login_page)
else:
return HttpResponseRedirect("/admin/")
class DashboardDynamicView(TemplateView):
head_template = r"""{% extends "admin/dashboard/dashboard_base.html" %}
{% load admin_static %}
{% block content %}
"""
head_wholePage_template = r"""{% extends "admin/wholePage.html" %}
{% load admin_static %}
{% block content %}
"""
tail_template = r"{% endblock %}"
def get(self, request, name="root", *args, **kwargs):
context = self.get_context_data(**kwargs)
context = getDashboardContext(request.user, context)
if name == "root":
user_dashboards = request.user.get_dashboards()
first_dasboard_name = user_dashboards[0].id
return self.singleDashboardView(request, first_dasboard_name, context)
# return self.multiDashboardView(request, context)
elif kwargs.get("wholePage", None):
return self.singleFullView(request, name, context)
else:
return self.singleDashboardView(request, name, context)
def readTemplate(self, fn):
TEMPLATE_DIRS = [XOS_DIR + "/templates/admin/dashboard/",
XOS_DIR + "/core/xoslib/dashboards/"]
for template_dir in TEMPLATE_DIRS:
pathname = os.path.join(template_dir, fn) + ".html"
if os.path.exists(pathname):
break
else:
return "failed to find %s in %s" % (fn, TEMPLATE_DIRS)
template = open(pathname, "r").read()
if (fn == "tenant"):
# fix for tenant view - it writes html to a div called tabs-5
template = '<div id="tabs-5"></div>' + template
return template
def embedDashboardUrl(self, url):
if url.startswith("template:"):
fn = url[9:]
return self.readTemplate(fn)
elif url.startswith("http"):
return '<iframe src="%s" width="100%%" height="100%%" style="min-height: 1024px;" frameBorder="0"></iframe>' % url
else:
return "don't know how to load dashboard %s" % url
def embedDashboardView(self, view, i=0):
body = ""
url = view.url
if (view.controllers.all().count() > 0):
body = body + 'Controller: <select id="dashselect-%d">' % i
body = body + '<option value="None">(select a controller)</option>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<option value="%d">%s</option>' % (j, controllerdashboard.controller.name)
body = body + '</select><hr>'
for j, controllerdashboard in enumerate(view.controllerdashboardviews.all()):
body = body + '<script type="text/template" id="dashtemplate-%d-%d">\n%s\n</script>\n' % (i,j, self.embedDashboardUrl(controllerdashboard.url));
body = body + '<div id="dashcontent-%d" class="dashcontent"></div>\n' % i
body = body + """<script>
$("#dashselect-%d").change(function() {
v=$("#dashselect-%d").val();
if (v=="None") {
$("#dashcontent-%d").html("");
return;
}
$("#dashcontent-%d").html( $("#dashtemplate-%d-" + v).html() );
});
//$("#dashcontent-%d").html( $("#dashtemplate-%d-0").html() );
</script>
""" % (i, i, i, i, i, i, i)
else:
body = body + self.embedDashboardUrl(url)
return body
def multiDashboardView(self, request, context):
head_template = self.head_template
tail_template = self.tail_template
dashboards = request.user.get_dashboards()
if not request.user.is_appuser:
customize = DashboardView.objects.filter(name="Customize")
if customize:
dashboards.append(customize[0])
tabs = []
bodies = []
i = 0
for view in dashboards:
if (not view.enabled):
continue
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, view.name))
body = '<div id="dashtab-%d">%s</div>\n' % (i, self.embedDashboardView(view, i))
bodies.append(body)
i = i + 1
# embed content provider dashboards
for cp in ContentProvider.objects.all():
if request.user in cp.users.all():
tabs.append('<li><a href="#dashtab-%d">%s</a></li>\n' % (i, cp.name))
body = ""
body = body + '<div id="dashtab-%d">\n' % i
body = body + self.embedDashboardUrl("http:/admin/hpc/contentprovider/%s/%s/embeddedfilteredchange" % (cp.serviceProvider.hpcService.id, cp.id))
body = body + '</div>\n'
bodies.append(body)
i = i + 1
if (len(tabs) == 1) and (len(bodies) == 1):
# there is only one dashboard, so optimize out the tabbing
contents = bodies[0]
else:
contents = """
<div id="hometabs" >
<ul id="suit_form_tabs" class="nav nav-tabs nav-tabs-suit" data-tab-prefix="suit-tab">
%s
</ul>
%s
</div>
""" % ("\n".join(tabs), "\n".join(bodies))
t = template.Template(head_template + contents + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleDashboardView(self, request, id, context):
head_template = self.head_template
tail_template = self.tail_template
# if id is a number, load by datamodel,
# else look directly for the template
if(isInt(id)):
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
else:
t = template.Template(head_template + self.readTemplate("xos" + id) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
def singleFullView(self, request, id, context):
head_template = self.head_wholePage_template
tail_template = self.tail_template
view = DashboardView.objects.get(id=id)
t = template.Template(head_template + self.embedDashboardView(view) + self.tail_template)
response_kwargs = {}
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=request,
template=t,
context=context,
**response_kwargs)
| true | true |
f72be85a54f072db1fb3df347bed8b5574b5043c | 3,782 | py | Python | fn_task_utils/fn_task_utils/components/task_utils_close_task.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 65 | 2017-12-04T13:58:32.000Z | 2022-03-24T18:33:17.000Z | fn_task_utils/fn_task_utils/components/task_utils_close_task.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 48 | 2018-03-02T19:17:14.000Z | 2022-03-09T22:00:38.000Z | fn_task_utils/fn_task_utils/components/task_utils_close_task.py | nickpartner-goahead/resilient-community-apps | 097c0dbefddbd221b31149d82af9809420498134 | [
"MIT"
] | 95 | 2018-01-11T16:23:39.000Z | 2022-03-21T11:34:29.000Z | # -*- coding: utf-8 -*-
# Copyright © IBM Corporation 2010, 2019
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from resilient_lib import ResultPayload
from fn_task_utils.lib.task_common import find_task_by_name, get_function_input
class FunctionComponent(ResilientComponent):
"""Component that implements Resilient function 'task_utils_close_task"""
def __init__(self, opts):
"""constructor provides access to the configuration options"""
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_task_utils", {})
@handler("reload")
def _reload(self, event, opts):
"""Configuration options have changed, save new values"""
self.options = opts.get("fn_task_utils", {})
@function("task_utils_close_task")
def _task_utils_close_task_function(self, event, *args, **kwargs):
"""Function: A function which will attempt to close either a System or Custom task using the REST API."""
def close_task_status(task):
"""
A inner function which is used as a lambda
Get_put from the res_client gets our data and this lambda decides what to do with the data
The return value of this lambda is then sent to Resilient as a PUT.
:param task:
:return task:
"""
task["status"] = "C"
log.debug("Changed status to closed for task with name %s" % task["name"])
return task
try:
payload = ResultPayload("task_utils_close_task", **kwargs)
# Get the function parameters:
incident_id = get_function_input(kwargs, "incident_id") # number
task_id = get_function_input(kwargs, "task_id", optional=True) # number
task_name = get_function_input(kwargs, "task_name", optional=True) # text
log = logging.getLogger(__name__)
log.info("incident_id: %s", incident_id)
log.info("task_id: %s", task_id)
log.info("task_name: %s", task_name)
res_client = self.rest_client()
if not task_name and not task_id:
raise ValueError("Either a Task ID or a Task Name to search for must be provided.")
if task_id:
log.debug("Task ID was provided, using this to contact REST API")
else:
if task_name:
yield StatusMessage(
u"task_name was provided; Searching incident {} for first matching task with name '{}'".format(
incident_id, task_name))
task_id = find_task_by_name(res_client, incident_id, task_name)
if not task_id:
raise ValueError(u"Could not find task with name {}".format(task_name))
task_url = "/tasks/{}".format(task_id)
try:
res_client.get_put(task_url, lambda task: close_task_status(task))
except Exception as close_exception:
err_msg = "Encountered exception while trying to close task. Error: {}", close_exception
raise ValueError(err_msg)
yield StatusMessage("Task {} has been closed".format(task_id))
results = payload.done(
success=True,
content={
"task_id": task_id,
"task_name": task_name
}
)
# Produce a FunctionResult with the results
yield FunctionResult(results)
except Exception:
yield FunctionError()
| 41.56044 | 119 | 0.608673 |
import logging
from resilient_circuits import ResilientComponent, function, handler, StatusMessage, FunctionResult, FunctionError
from resilient_lib import ResultPayload
from fn_task_utils.lib.task_common import find_task_by_name, get_function_input
class FunctionComponent(ResilientComponent):
def __init__(self, opts):
super(FunctionComponent, self).__init__(opts)
self.options = opts.get("fn_task_utils", {})
@handler("reload")
def _reload(self, event, opts):
self.options = opts.get("fn_task_utils", {})
@function("task_utils_close_task")
def _task_utils_close_task_function(self, event, *args, **kwargs):
def close_task_status(task):
task["status"] = "C"
log.debug("Changed status to closed for task with name %s" % task["name"])
return task
try:
payload = ResultPayload("task_utils_close_task", **kwargs)
incident_id = get_function_input(kwargs, "incident_id")
task_id = get_function_input(kwargs, "task_id", optional=True)
task_name = get_function_input(kwargs, "task_name", optional=True)
log = logging.getLogger(__name__)
log.info("incident_id: %s", incident_id)
log.info("task_id: %s", task_id)
log.info("task_name: %s", task_name)
res_client = self.rest_client()
if not task_name and not task_id:
raise ValueError("Either a Task ID or a Task Name to search for must be provided.")
if task_id:
log.debug("Task ID was provided, using this to contact REST API")
else:
if task_name:
yield StatusMessage(
u"task_name was provided; Searching incident {} for first matching task with name '{}'".format(
incident_id, task_name))
task_id = find_task_by_name(res_client, incident_id, task_name)
if not task_id:
raise ValueError(u"Could not find task with name {}".format(task_name))
task_url = "/tasks/{}".format(task_id)
try:
res_client.get_put(task_url, lambda task: close_task_status(task))
except Exception as close_exception:
err_msg = "Encountered exception while trying to close task. Error: {}", close_exception
raise ValueError(err_msg)
yield StatusMessage("Task {} has been closed".format(task_id))
results = payload.done(
success=True,
content={
"task_id": task_id,
"task_name": task_name
}
)
yield FunctionResult(results)
except Exception:
yield FunctionError()
| true | true |
f72be963c795ad4620dda7b2f24b5d80e2f23382 | 8,139 | py | Python | tests/test_templates.py | eon01/wagtail-metadata | 583968b67157ac7944631966919b523674568141 | [
"BSD-3-Clause"
] | null | null | null | tests/test_templates.py | eon01/wagtail-metadata | 583968b67157ac7944631966919b523674568141 | [
"BSD-3-Clause"
] | null | null | null | tests/test_templates.py | eon01/wagtail-metadata | 583968b67157ac7944631966919b523674568141 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from django.forms.utils import flatatt
from django.template import TemplateSyntaxError, engines
from django.test import RequestFactory, TestCase, override_settings
from django.utils.html import format_html
from wagtail.core.models import Site
from wagtail.images.models import Image
from wagtail.images.tests.utils import get_test_image_file
from tests.app.models import TestModel, TestPage
from wagtailmetadata.tags import get_meta_image_url
class TemplateCase(object):
def setUp(self):
self.site = Site.objects.first()
self.site.site_name = 'Example site'
self.site.save()
self.factory = RequestFactory()
self.request = self.factory.get('/test/')
self.request.site = self.site
self.image = Image.objects.create(
title='Test Image',
file=get_test_image_file(),
)
self.page = self.site.root_page.add_child(instance=TestPage(
title='Test Page',
search_image=self.image,
search_description='Some test content description',
))
self.test_model = TestModel.objects.create()
def render(self, string, context=None, request_context=True):
if context is None:
context = {}
# Add a request to the template, to simulate a RequestContext
if request_context:
context['request'] = self.request
template = self.engine.from_string(string)
return template.render(context)
def meta(self, attrs):
return format_html('<meta{0}>'.format(flatatt(attrs)))
def test_twitter_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary_large_image',
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_twitter_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary',
}), out)
self.assertNotIn('twitter:image', out)
def test_og_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'property': 'og:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:site_name', 'content': self.site.site_name
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_og_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertNotIn('og:image', out)
def test_misc_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_generic_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_custom_model(self):
out = self.render_with_model()
self.assertInHTML(self.meta({
'itemprop': 'url',
'content': self.test_model.get_meta_url()
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.test_model.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description',
'content': self.test_model.get_meta_description()
}), out)
def fill_out_page_meta_fields(self):
self.page.search_description = 'Hello, world'
self.page.search_image = Image.objects.create(
title='Page image', file=get_test_image_file())
def test_page_twitter_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_og_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_misc_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_generic_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML("<title>{}</title>".format(self.page.title), out)
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_error_messages(self):
self.assertRaises(TemplateSyntaxError, self.render_with_error)
def test_get_meta_image_url_filter(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("original.png"))
@override_settings(WAGTAILMETADATA_IMAGE_FILTER="fill-10x20")
def test_get_meta_image_url_filter_with_override(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("fill-10x20.png"))
class TestJinja2(TemplateCase, TestCase):
engine = engines['jinja2']
def render_meta(self):
return self.render('{{ meta_tags() }}', context={'page': self.page})
def render_with_model(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': None})
class TestDjangoTemplateEngine(TemplateCase, TestCase):
engine = engines['django']
def render_meta(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags %}', context={'self': self.page})
def render_with_model(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': None})
| 34.341772 | 120 | 0.621207 |
from django.forms.utils import flatatt
from django.template import TemplateSyntaxError, engines
from django.test import RequestFactory, TestCase, override_settings
from django.utils.html import format_html
from wagtail.core.models import Site
from wagtail.images.models import Image
from wagtail.images.tests.utils import get_test_image_file
from tests.app.models import TestModel, TestPage
from wagtailmetadata.tags import get_meta_image_url
class TemplateCase(object):
def setUp(self):
self.site = Site.objects.first()
self.site.site_name = 'Example site'
self.site.save()
self.factory = RequestFactory()
self.request = self.factory.get('/test/')
self.request.site = self.site
self.image = Image.objects.create(
title='Test Image',
file=get_test_image_file(),
)
self.page = self.site.root_page.add_child(instance=TestPage(
title='Test Page',
search_image=self.image,
search_description='Some test content description',
))
self.test_model = TestModel.objects.create()
def render(self, string, context=None, request_context=True):
if context is None:
context = {}
if request_context:
context['request'] = self.request
template = self.engine.from_string(string)
return template.render(context)
def meta(self, attrs):
return format_html('<meta{0}>'.format(flatatt(attrs)))
def test_twitter_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary_large_image',
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_twitter_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:card', 'content': 'summary',
}), out)
self.assertNotIn('twitter:image', out)
def test_og_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'property': 'og:title',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:site_name', 'content': self.site.site_name
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_og_no_image(self):
self.page.search_image = None
out = self.render_meta()
self.assertNotIn('og:image', out)
def test_misc_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'url', 'content': self.page.full_url
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.page.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_generic_render(self):
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_custom_model(self):
out = self.render_with_model()
self.assertInHTML(self.meta({
'itemprop': 'url',
'content': self.test_model.get_meta_url()
}), out)
self.assertInHTML(self.meta({
'itemprop': 'name',
'content': self.test_model.get_meta_title(),
}), out)
self.assertInHTML(self.meta({
'itemprop': 'description',
'content': self.test_model.get_meta_description()
}), out)
def fill_out_page_meta_fields(self):
self.page.search_description = 'Hello, world'
self.page.search_image = Image.objects.create(
title='Page image', file=get_test_image_file())
def test_page_twitter_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'name': 'twitter:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'name': 'twitter:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_og_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'property': 'og:description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'property': 'og:image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_misc_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML(self.meta({
'itemprop': 'description', 'content': self.page.search_description,
}), out)
self.assertInHTML(self.meta({
'itemprop': 'image',
'content': get_meta_image_url(self.request, self.page.search_image),
}), out)
def test_page_generic_render(self):
self.fill_out_page_meta_fields()
out = self.render_meta()
self.assertInHTML("<title>{}</title>".format(self.page.title), out)
self.assertInHTML(self.meta({
'name': 'description', 'content': self.page.search_description,
}), out)
def test_error_messages(self):
self.assertRaises(TemplateSyntaxError, self.render_with_error)
def test_get_meta_image_url_filter(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("original.png"))
@override_settings(WAGTAILMETADATA_IMAGE_FILTER="fill-10x20")
def test_get_meta_image_url_filter_with_override(self):
self.fill_out_page_meta_fields()
result = get_meta_image_url(self.request, self.page.search_image)
self.assertTrue(result.endswith("fill-10x20.png"))
class TestJinja2(TemplateCase, TestCase):
engine = engines['jinja2']
def render_meta(self):
return self.render('{{ meta_tags() }}', context={'page': self.page})
def render_with_model(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{{ meta_tags(custom) }}', context={'custom': None})
class TestDjangoTemplateEngine(TemplateCase, TestCase):
engine = engines['django']
def render_meta(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags %}', context={'self': self.page})
def render_with_model(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': self.test_model})
def render_with_error(self):
return self.render('{% load wagtailmetadata_tags %}{% meta_tags custom %}', context={'custom': None})
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.