hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c486f1cf2689297fa04af34f08c7bca55b19f3d
| 15,971
|
py
|
Python
|
src/c4/cmany/project.py
|
biojppm/cmany
|
b20c24169d60077122ae29a0c09526913340fd5c
|
[
"MIT"
] | 20
|
2017-05-17T18:43:08.000Z
|
2021-02-13T16:20:53.000Z
|
src/c4/cmany/project.py
|
biojppm/cmany
|
b20c24169d60077122ae29a0c09526913340fd5c
|
[
"MIT"
] | 8
|
2017-06-04T17:01:06.000Z
|
2022-03-17T12:43:32.000Z
|
src/c4/cmany/project.py
|
biojppm/cmany
|
b20c24169d60077122ae29a0c09526913340fd5c
|
[
"MIT"
] | 1
|
2017-06-04T13:09:19.000Z
|
2017-06-04T13:09:19.000Z
|
#!/usr/bin/env python3
import os
import glob
import json
import copy
import timeit
from collections import OrderedDict as odict
from ruamel import yaml as yaml
from ruamel.yaml.comments import CommentedMap as CommentedMap
from . import util
from . import conf
from .build_flags import BuildFlags
from .build_item import BuildItem
from .build_type import BuildType
from .system import System
from .architecture import Architecture
from .compiler import Compiler
from .variant import Variant
from .build import Build
from .combination_rules import CombinationRules
from .cmake import getcachevars
from . import cmake
from . import err
from .util import path_exists as _pexists
from .util import logdbg as dbg
# -----------------------------------------------------------------------------
def _getdir(attr_name, default, kwargs, cwd):
d = kwargs.get(attr_name)
if d is None:
d = os.path.join(cwd, default)
else:
if not os.path.isabs(d):
d = os.path.join(cwd, d)
d = util.abspath(d)
return d
# -----------------------------------------------------------------------------
class Project:
def __init__(self, **kwargs):
#
self.kwargs = kwargs
self.num_jobs = kwargs.get('jobs')
self.targets = kwargs.get('target')
self.continue_on_fail = kwargs.get('continue')
#
cwd = util.abspath(os.getcwd())
pdir = kwargs.get('proj_dir')
dbg("cwd:", cwd)
dbg("proj_dir:", pdir)
if pdir is None:
raise err.ProjDirNotFound(None)
if pdir == ".":
pdir = cwd
pdir = util.abspath(pdir)
dbg("proj_dir, abs:", pdir)
#
if not _pexists(pdir):
raise err.ProjDirNotFound(pdir)
#
self.cmakelists = os.path.join(pdir, "CMakeLists.txt")
cmakecache = None
if _pexists(self.cmakelists):
dbg("found CMakeLists.txt:", self.cmakelists)
self.build_dir = _getdir('build_dir', 'build', kwargs, cwd)
self.install_dir = _getdir('install_dir', 'install', kwargs, cwd)
self.root_dir = pdir
elif _pexists(pdir, "CMakeCache.txt"):
cmakecache = os.path.join(pdir, "CMakeCache.txt")
dbg("found CMakeCache.txt:", cmakecache)
ch = cmake.CMakeCache(pdir)
self.build_dir = os.path.dirname(pdir)
self.install_dir = os.path.dirname(ch['CMAKE_INSTALL_PREFIX'].val)
self.root_dir = ch['CMAKE_HOME_DIRECTORY'].val
self.cmakelists = os.path.join(self.root_dir, "CMakeLists.txt")
#
self.root_dir = os.path.realpath(self.root_dir)
self.build_dir = os.path.realpath(self.build_dir)
self.install_dir = os.path.realpath(self.install_dir)
self.cmakelists = os.path.realpath(self.cmakelists)
#
dbg("root_dir:", self.root_dir)
dbg("build_dir:", self.build_dir)
dbg("install_dir:", self.install_dir)
dbg("CMakeLists.txt:", self.cmakelists)
#
if not _pexists(self.cmakelists):
raise err.CMakeListsNotFound(pdir)
#
if cmakecache is not None:
self._init_with_build_dir(os.path.dirname(cmakecache), **kwargs)
elif cmake.hascache(self.root_dir):
self._init_with_build_dir(self.root_dir, **kwargs)
elif kwargs.get('glob'):
self._init_with_glob(**kwargs)
else:
self.load_configs()
self._init_with_build_items(**kwargs)
def _init_with_build_dir(self, pdir, **kwargs):
build = Build.deserialize(pdir)
self.builds = [build]
def _init_with_glob(self, **kwargs):
g = kwargs.get('glob')
self.builds = []
for pattern in g:
bp = os.path.join(self.build_dir, pattern)
li = glob.glob(bp)
for b in li:
build = Build.deserialize(b)
self.builds.append(build)
def _init_with_build_items(self, **kwargs):
s, a, c, t, v = __class__.get_build_items(**kwargs)
#
cr = CombinationRules(kwargs.get('combination_rules', []))
combs = cr.valid_combinations(s, a, c, t, v)
dbg("combinations:", combs)
self.combination_rules = cr
#
self.builds = []
for comb in combs:
dbg("adding build from combination:", comb)
self.add_build(*comb) #s_, a_, c_, t_, v_)
#
self.systems = s
self.architectures = a
self.compilers = c
self.build_types = t
self.variants = v
#
# add new build params as needed to deal with adjusted builds
def _addnew(b, name):
a = getattr(b, name)
ali = getattr(self, name + 's')
if not [elm for elm in ali if str(elm) == str(a)]:
ali.append(a)
for b in self.builds:
if not b.adjusted:
continue
_addnew(b, 'system')
_addnew(b, 'architecture')
_addnew(b, 'build_type')
_addnew(b, 'compiler')
_addnew(b, 'variant')
@staticmethod
def get_build_items(**kwargs):
d = odict()
for c, cls in (
('systems', System),
('architectures', Architecture),
('compilers', Compiler),
('build_types', BuildType),
('variants', Variant)):
d[c] = (cls, kwargs.get(c))
coll = BuildItem.create(d)
s = coll['systems']
a = coll['architectures']
c = coll['compilers']
t = coll['build_types']
v = coll['variants']
return s, a, c, t, v
def load_configs(self):
seq = [os.path.join(d, "cmany.yml") for d in (
conf.CONF_DIR, conf.USER_DIR, self.root_dir)]
if self.kwargs.get('no_default_config'):
seq = []
for f in self.kwargs.get('config_file', []):
ff = f
if not os.path.isabs(ff):
ff = os.path.join(self.root_dir, ff)
if not os.path.exists(ff):
raise err.ConfigFileNotFound(ff)
seq.append(f)
self.configs = conf.Configs.load_seq(seq)
def save_configs(self):
# c = Configs()
pass
def create_proj(self):
yml = CommentedMap()
yml['project'] = CommentedMap()
#
def _add(name):
items = getattr(self, name)
#if BuildItem.trivial_item(items):
# yml['project'][name] = "_default_"
#elif BuildItem.no_flags_in_collection(items):
if BuildItem.no_flags_in_collection(items):
out = []
for s in items:
out.append(s.name)
yml['project'][name] = out
else:
out = []
for s in items:
cm = CommentedMap()
cm[s.name] = CommentedMap()
s.save_config(cm[s.name])
out.append(cm)
yml['project'][name] = out
#
_add('systems')
_add('architectures')
_add('compilers')
_add('build_types')
_add('variants')
txt = yaml.round_trip_dump(yml)
fn = self.kwargs['output_file']
if not os.path.isabs(fn):
fn = os.path.join(self.root_dir, fn)
with open(fn, "w") as f:
f.write(txt)
def add_build(self, system, arch, compiler, build_type, variant):
# duplicate the build items, as they may be mutated due
# to translation of their flags for the compiler
def _dup_item(item):
i = copy.deepcopy(item)
i.flags.resolve_flag_aliases(compiler, aliases=self.configs.flag_aliases)
return i
s = _dup_item(system)
a = _dup_item(arch)
t = _dup_item(build_type)
c = _dup_item(compiler)
v = _dup_item(variant)
#
f = BuildFlags('all_builds', **self.kwargs)
f.resolve_flag_aliases(compiler, aliases=self.configs.flag_aliases)
#
# create the build
dbg("adding build:", s, a, t, c, v, f)
b = Build(self.root_dir, self.build_dir, self.install_dir,
s, a, t, c, v, f,
self.num_jobs, dict(self.kwargs))
#
# When a build is created, its parameters may have been adjusted
# because of an incompatible generator specification.
# So drop this build if an equal one already exists
if b.adjusted and self.exists(b):
return False # a similar build already exists
#
# finally, this.
self.builds.append(b)
return True # build successfully added
def exists(self, build):
for b in self.builds:
if str(b.tag) == str(build.tag):
return True
return False
def select(self, **kwargs):
out = [b for b in self.builds]
def _h(kw, attr):
global out
g = kwargs.get(kw)
if g is not None:
lo = []
for b in out:
if str(getattr(b, attr)) == str(g):
lo.append(b)
out = lo
_h("sys", "system")
_h("arch", "architecture")
_h("compiler", "compiler")
_h("build_type", "build_type")
_h("variant", "variant")
return out
def create_tree(self, **restrict_to):
builds = self.select(**restrict_to)
for b in builds:
b.create_dir()
b.create_preload_file()
# print(b, ":", d)
def configure(self, **restrict_to):
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
self._execute(Build.configure, "Configure", silent=False, **restrict_to)
def reconfigure(self, **restrict_to):
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
self._execute(Build.reconfigure, "Reconfigure", silent=False, **restrict_to)
def export_compile_commands(self, **restrict_to):
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
self._execute(Build.export_compile_commands, "Export compile commands", silent=False, **restrict_to)
def build(self, **restrict_to):
def do_build(build):
build.build(self.targets)
self._execute(do_build, "Build", silent=False, **restrict_to)
def rebuild(self, **restrict_to):
def do_rebuild(build):
build.rebuild(self.targets)
self._execute(do_rebuild, "Rebuild", silent=False, **restrict_to)
def clean(self, **restrict_to):
self._execute(Build.clean, "Clean", silent=False, **restrict_to)
def install(self, **restrict_to):
self._execute(Build.install, "Install", silent=False, **restrict_to)
def reinstall(self, **restrict_to):
self._execute(Build.reinstall, "Reinstall", silent=False, **restrict_to)
def run_cmd(self, cmd, **subprocess_args):
def run_it(build):
build.run_custom_cmd(cmd, **subprocess_args)
self._execute(run_it, "Run cmd", silent=False)
def export_vs(self):
confs = []
for b in self.builds:
confs.append(b.json_data())
jd = odict([('configurations', confs)])
with open(self.configfile, 'w') as f:
json.dump(jd, f, indent=2)
def show_vars(self, varlist):
varv = odict()
pat = os.path.join(self.build_dir, '*', 'CMakeCache.txt')
g = glob.glob(pat)
md = 0
mv = 0
for p in g:
d = os.path.dirname(p)
b = os.path.basename(d)
md = max(md, len(b))
vars = getcachevars(d, varlist)
for k, v in vars.items():
sk = str(k)
if not varv.get(sk):
varv[sk] = odict()
varv[sk][b] = v
mv = max(mv, len(sk))
#
fmt = "{:" + str(mv) + "}[{:" + str(md) + "}]={}"
for var, sysvalues in varv.items():
for s, v in sysvalues.items():
print(fmt.format(var, s, v))
def show_build_names(self):
for b in self.builds:
print(b)
def show_build_dirs(self):
for b in self.builds:
print(b.builddir)
def show_builds(self):
for b in self.builds:
b.show_properties()
def show_targets(self):
for t in self.builds[0].get_targets():
print(t)
def _execute(self, fn, msg, silent, **restrict_to):
builds = self.select(**restrict_to)
failed = odict()
durations = odict()
num = len(builds)
if not silent:
if num == 0:
print("no builds selected")
if num == 0:
return
def nt(*args, **kwargs): # notice
if silent: return
util.lognotice(*args, **kwargs)
def dn(*args, **kwargs): # done
if silent: return
util.logdone(*args, **kwargs)
def er(*args, **kwargs): # error
if silent: return
util.logerr(*args, **kwargs)
#
if num > 1:
nt("")
nt("===============================================")
nt(msg + ": start", num, "builds:")
for b in builds:
nt(b)
nt("===============================================")
#
for i, b in enumerate(builds):
if i > 0:
nt("\n")
nt("-----------------------------------------------")
if num > 1:
nt(msg + ": build #{} of {}:".format(i + 1, num), b)
else:
nt(msg, b)
nt("-----------------------------------------------")
#
t = timeit.default_timer()
try:
# this is where it happens
fn(b) # <-- here
word, logger = "finished", dn
# exceptions thrown from builds inherit this type
except err.BuildError as e:
word, logger = "failed", er
util.logerr(f"{b} failed! {e}")
failed[b] = e
if not self.continue_on_fail:
raise
t = timeit.default_timer() - t
hrt = util.human_readable_time(t)
durations[b] = (t, hrt)
if num > 1:
ip1 = i + 1
info = f"{word} build #{ip1} of {num} ({hrt})"
else:
info = f"{word} building ({hrt})"
logger(msg + ": " + info + ":", b)
#
nt("-----------------------------------------------")
if num > 1:
if failed:
dn(msg + ": processed", num, "builds: (with failures)")
else:
dn(msg + ": finished", num, "builds:")
tot = 0.
for _, (d, _) in durations.items():
tot += d
for b in builds:
dur, hrt = durations[b]
times = "({}, {:.3f}%, {:.3f}x avg)".format(
hrt, dur / tot * 100., dur / (tot / float(num))
)
fail = failed.get(b)
if fail:
er(b, times, "[FAIL]!!!", fail)
else:
dn(b, times)
if failed:
msg = "{}/{} builds failed ({:.1f}%)!"
er(msg.format(len(failed), num, float(len(failed)) / num * 100.0))
else:
dn(f"all {num} builds succeeded!")
dn("total time:", util.human_readable_time(tot))
nt("===============================================")
if failed:
raise Exception(failed)
| 34.420259
| 108
| 0.508547
|
import os
import glob
import json
import copy
import timeit
from collections import OrderedDict as odict
from ruamel import yaml as yaml
from ruamel.yaml.comments import CommentedMap as CommentedMap
from . import util
from . import conf
from .build_flags import BuildFlags
from .build_item import BuildItem
from .build_type import BuildType
from .system import System
from .architecture import Architecture
from .compiler import Compiler
from .variant import Variant
from .build import Build
from .combination_rules import CombinationRules
from .cmake import getcachevars
from . import cmake
from . import err
from .util import path_exists as _pexists
from .util import logdbg as dbg
def _getdir(attr_name, default, kwargs, cwd):
d = kwargs.get(attr_name)
if d is None:
d = os.path.join(cwd, default)
else:
if not os.path.isabs(d):
d = os.path.join(cwd, d)
d = util.abspath(d)
return d
class Project:
def __init__(self, **kwargs):
self.kwargs = kwargs
self.num_jobs = kwargs.get('jobs')
self.targets = kwargs.get('target')
self.continue_on_fail = kwargs.get('continue')
cwd = util.abspath(os.getcwd())
pdir = kwargs.get('proj_dir')
dbg("cwd:", cwd)
dbg("proj_dir:", pdir)
if pdir is None:
raise err.ProjDirNotFound(None)
if pdir == ".":
pdir = cwd
pdir = util.abspath(pdir)
dbg("proj_dir, abs:", pdir)
if not _pexists(pdir):
raise err.ProjDirNotFound(pdir)
self.cmakelists = os.path.join(pdir, "CMakeLists.txt")
cmakecache = None
if _pexists(self.cmakelists):
dbg("found CMakeLists.txt:", self.cmakelists)
self.build_dir = _getdir('build_dir', 'build', kwargs, cwd)
self.install_dir = _getdir('install_dir', 'install', kwargs, cwd)
self.root_dir = pdir
elif _pexists(pdir, "CMakeCache.txt"):
cmakecache = os.path.join(pdir, "CMakeCache.txt")
dbg("found CMakeCache.txt:", cmakecache)
ch = cmake.CMakeCache(pdir)
self.build_dir = os.path.dirname(pdir)
self.install_dir = os.path.dirname(ch['CMAKE_INSTALL_PREFIX'].val)
self.root_dir = ch['CMAKE_HOME_DIRECTORY'].val
self.cmakelists = os.path.join(self.root_dir, "CMakeLists.txt")
self.root_dir = os.path.realpath(self.root_dir)
self.build_dir = os.path.realpath(self.build_dir)
self.install_dir = os.path.realpath(self.install_dir)
self.cmakelists = os.path.realpath(self.cmakelists)
dbg("root_dir:", self.root_dir)
dbg("build_dir:", self.build_dir)
dbg("install_dir:", self.install_dir)
dbg("CMakeLists.txt:", self.cmakelists)
if not _pexists(self.cmakelists):
raise err.CMakeListsNotFound(pdir)
if cmakecache is not None:
self._init_with_build_dir(os.path.dirname(cmakecache), **kwargs)
elif cmake.hascache(self.root_dir):
self._init_with_build_dir(self.root_dir, **kwargs)
elif kwargs.get('glob'):
self._init_with_glob(**kwargs)
else:
self.load_configs()
self._init_with_build_items(**kwargs)
def _init_with_build_dir(self, pdir, **kwargs):
build = Build.deserialize(pdir)
self.builds = [build]
def _init_with_glob(self, **kwargs):
g = kwargs.get('glob')
self.builds = []
for pattern in g:
bp = os.path.join(self.build_dir, pattern)
li = glob.glob(bp)
for b in li:
build = Build.deserialize(b)
self.builds.append(build)
def _init_with_build_items(self, **kwargs):
s, a, c, t, v = __class__.get_build_items(**kwargs)
cr = CombinationRules(kwargs.get('combination_rules', []))
combs = cr.valid_combinations(s, a, c, t, v)
dbg("combinations:", combs)
self.combination_rules = cr
self.builds = []
for comb in combs:
dbg("adding build from combination:", comb)
self.add_build(*comb)
self.systems = s
self.architectures = a
self.compilers = c
self.build_types = t
self.variants = v
def _addnew(b, name):
a = getattr(b, name)
ali = getattr(self, name + 's')
if not [elm for elm in ali if str(elm) == str(a)]:
ali.append(a)
for b in self.builds:
if not b.adjusted:
continue
_addnew(b, 'system')
_addnew(b, 'architecture')
_addnew(b, 'build_type')
_addnew(b, 'compiler')
_addnew(b, 'variant')
@staticmethod
def get_build_items(**kwargs):
d = odict()
for c, cls in (
('systems', System),
('architectures', Architecture),
('compilers', Compiler),
('build_types', BuildType),
('variants', Variant)):
d[c] = (cls, kwargs.get(c))
coll = BuildItem.create(d)
s = coll['systems']
a = coll['architectures']
c = coll['compilers']
t = coll['build_types']
v = coll['variants']
return s, a, c, t, v
def load_configs(self):
seq = [os.path.join(d, "cmany.yml") for d in (
conf.CONF_DIR, conf.USER_DIR, self.root_dir)]
if self.kwargs.get('no_default_config'):
seq = []
for f in self.kwargs.get('config_file', []):
ff = f
if not os.path.isabs(ff):
ff = os.path.join(self.root_dir, ff)
if not os.path.exists(ff):
raise err.ConfigFileNotFound(ff)
seq.append(f)
self.configs = conf.Configs.load_seq(seq)
def save_configs(self):
pass
def create_proj(self):
yml = CommentedMap()
yml['project'] = CommentedMap()
def _add(name):
items = getattr(self, name)
if BuildItem.no_flags_in_collection(items):
out = []
for s in items:
out.append(s.name)
yml['project'][name] = out
else:
out = []
for s in items:
cm = CommentedMap()
cm[s.name] = CommentedMap()
s.save_config(cm[s.name])
out.append(cm)
yml['project'][name] = out
_add('systems')
_add('architectures')
_add('compilers')
_add('build_types')
_add('variants')
txt = yaml.round_trip_dump(yml)
fn = self.kwargs['output_file']
if not os.path.isabs(fn):
fn = os.path.join(self.root_dir, fn)
with open(fn, "w") as f:
f.write(txt)
def add_build(self, system, arch, compiler, build_type, variant):
def _dup_item(item):
i = copy.deepcopy(item)
i.flags.resolve_flag_aliases(compiler, aliases=self.configs.flag_aliases)
return i
s = _dup_item(system)
a = _dup_item(arch)
t = _dup_item(build_type)
c = _dup_item(compiler)
v = _dup_item(variant)
f = BuildFlags('all_builds', **self.kwargs)
f.resolve_flag_aliases(compiler, aliases=self.configs.flag_aliases)
dbg("adding build:", s, a, t, c, v, f)
b = Build(self.root_dir, self.build_dir, self.install_dir,
s, a, t, c, v, f,
self.num_jobs, dict(self.kwargs))
if b.adjusted and self.exists(b):
return False
self.builds.append(b)
return True
def exists(self, build):
for b in self.builds:
if str(b.tag) == str(build.tag):
return True
return False
def select(self, **kwargs):
out = [b for b in self.builds]
def _h(kw, attr):
global out
g = kwargs.get(kw)
if g is not None:
lo = []
for b in out:
if str(getattr(b, attr)) == str(g):
lo.append(b)
out = lo
_h("sys", "system")
_h("arch", "architecture")
_h("compiler", "compiler")
_h("build_type", "build_type")
_h("variant", "variant")
return out
def create_tree(self, **restrict_to):
builds = self.select(**restrict_to)
for b in builds:
b.create_dir()
b.create_preload_file()
def configure(self, **restrict_to):
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
self._execute(Build.configure, "Configure", silent=False, **restrict_to)
def reconfigure(self, **restrict_to):
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
self._execute(Build.reconfigure, "Reconfigure", silent=False, **restrict_to)
def export_compile_commands(self, **restrict_to):
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
self._execute(Build.export_compile_commands, "Export compile commands", silent=False, **restrict_to)
def build(self, **restrict_to):
def do_build(build):
build.build(self.targets)
self._execute(do_build, "Build", silent=False, **restrict_to)
def rebuild(self, **restrict_to):
def do_rebuild(build):
build.rebuild(self.targets)
self._execute(do_rebuild, "Rebuild", silent=False, **restrict_to)
def clean(self, **restrict_to):
self._execute(Build.clean, "Clean", silent=False, **restrict_to)
def install(self, **restrict_to):
self._execute(Build.install, "Install", silent=False, **restrict_to)
def reinstall(self, **restrict_to):
self._execute(Build.reinstall, "Reinstall", silent=False, **restrict_to)
def run_cmd(self, cmd, **subprocess_args):
def run_it(build):
build.run_custom_cmd(cmd, **subprocess_args)
self._execute(run_it, "Run cmd", silent=False)
def export_vs(self):
confs = []
for b in self.builds:
confs.append(b.json_data())
jd = odict([('configurations', confs)])
with open(self.configfile, 'w') as f:
json.dump(jd, f, indent=2)
def show_vars(self, varlist):
varv = odict()
pat = os.path.join(self.build_dir, '*', 'CMakeCache.txt')
g = glob.glob(pat)
md = 0
mv = 0
for p in g:
d = os.path.dirname(p)
b = os.path.basename(d)
md = max(md, len(b))
vars = getcachevars(d, varlist)
for k, v in vars.items():
sk = str(k)
if not varv.get(sk):
varv[sk] = odict()
varv[sk][b] = v
mv = max(mv, len(sk))
fmt = "{:" + str(mv) + "}[{:" + str(md) + "}]={}"
for var, sysvalues in varv.items():
for s, v in sysvalues.items():
print(fmt.format(var, s, v))
def show_build_names(self):
for b in self.builds:
print(b)
def show_build_dirs(self):
for b in self.builds:
print(b.builddir)
def show_builds(self):
for b in self.builds:
b.show_properties()
def show_targets(self):
for t in self.builds[0].get_targets():
print(t)
def _execute(self, fn, msg, silent, **restrict_to):
builds = self.select(**restrict_to)
failed = odict()
durations = odict()
num = len(builds)
if not silent:
if num == 0:
print("no builds selected")
if num == 0:
return
def nt(*args, **kwargs):
if silent: return
util.lognotice(*args, **kwargs)
def dn(*args, **kwargs):
if silent: return
util.logdone(*args, **kwargs)
def er(*args, **kwargs):
if silent: return
util.logerr(*args, **kwargs)
if num > 1:
nt("")
nt("===============================================")
nt(msg + ": start", num, "builds:")
for b in builds:
nt(b)
nt("===============================================")
for i, b in enumerate(builds):
if i > 0:
nt("\n")
nt("-----------------------------------------------")
if num > 1:
nt(msg + ": build #{} of {}:".format(i + 1, num), b)
else:
nt(msg, b)
nt("-----------------------------------------------")
t = timeit.default_timer()
try:
fn(b)
word, logger = "finished", dn
except err.BuildError as e:
word, logger = "failed", er
util.logerr(f"{b} failed! {e}")
failed[b] = e
if not self.continue_on_fail:
raise
t = timeit.default_timer() - t
hrt = util.human_readable_time(t)
durations[b] = (t, hrt)
if num > 1:
ip1 = i + 1
info = f"{word} build #{ip1} of {num} ({hrt})"
else:
info = f"{word} building ({hrt})"
logger(msg + ": " + info + ":", b)
nt("-----------------------------------------------")
if num > 1:
if failed:
dn(msg + ": processed", num, "builds: (with failures)")
else:
dn(msg + ": finished", num, "builds:")
tot = 0.
for _, (d, _) in durations.items():
tot += d
for b in builds:
dur, hrt = durations[b]
times = "({}, {:.3f}%, {:.3f}x avg)".format(
hrt, dur / tot * 100., dur / (tot / float(num))
)
fail = failed.get(b)
if fail:
er(b, times, "[FAIL]!!!", fail)
else:
dn(b, times)
if failed:
msg = "{}/{} builds failed ({:.1f}%)!"
er(msg.format(len(failed), num, float(len(failed)) / num * 100.0))
else:
dn(f"all {num} builds succeeded!")
dn("total time:", util.human_readable_time(tot))
nt("===============================================")
if failed:
raise Exception(failed)
| true
| true
|
1c486f22ea70cd1af40bf4a804d81f728c367b5b
| 10,630
|
py
|
Python
|
salt/states/tomcat.py
|
yuriks/salt
|
d2a5bd8adddb98ec1718d79384aa13b4f37e8028
|
[
"Apache-2.0",
"MIT"
] | 1
|
2020-03-31T22:51:16.000Z
|
2020-03-31T22:51:16.000Z
|
salt/states/tomcat.py
|
yuriks/salt
|
d2a5bd8adddb98ec1718d79384aa13b4f37e8028
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
salt/states/tomcat.py
|
yuriks/salt
|
d2a5bd8adddb98ec1718d79384aa13b4f37e8028
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-09-30T07:00:01.000Z
|
2021-09-30T07:00:01.000Z
|
# -*- coding: utf-8 -*-
'''
Manage Apache Tomcat web applications
=====================================
.. note::
This state requires the Tomcat Manager webapp to be installed and running.
The following grains/pillars must be set for communication with Tomcat Manager
to work:
.. code-block:: yaml
tomcat-manager:
user: 'tomcat-manager'
passwd: 'Passw0rd'
Configuring Tomcat Manager
--------------------------
To manage webapps via the Tomcat Manager, you'll need to configure
a valid user in the file ``conf/tomcat-users.xml``.
.. code-block:: xml
:caption: conf/tomcat-users.xml
<?xml version='1.0' encoding='utf-8'?>
<tomcat-users>
<role rolename="manager-script"/>
<user username="tomcat-manager" password="Passw0rd" roles="manager-script"/>
</tomcat-users>
Notes
-----
- Using multiple versions (aka. parallel deployments) on the same context
path is not supported.
- More information about the Tomcat Manager:
http://tomcat.apache.org/tomcat-7.0-doc/manager-howto.html
- If you use only this module for deployments you might want to restrict
access to the manager so it's only accessible via localhost.
For more info: http://tomcat.apache.org/tomcat-7.0-doc/manager-howto.html#Configuring_Manager_Application_Access
- Last tested on:
Tomcat Version:
Apache Tomcat/7.0.54
JVM Vendor:
Oracle Corporation
JVM Version:
1.8.0_101-b13
OS Architecture:
amd64
OS Name:
Linux
OS Version:
3.10.0-327.22.2.el7.x86_64
'''
from __future__ import absolute_import, unicode_literals, print_function
from salt.ext import six
# Private
def __virtual__():
'''
Load if the module tomcat exists
'''
return 'tomcat' if 'tomcat.status' in __salt__ else False
# Functions
def war_deployed(name,
war,
force=False,
url='http://localhost:8080/manager',
timeout=180,
temp_war_location=None,
version=True):
'''
Enforce that the WAR will be deployed and started in the context path,
while making use of WAR versions in the filename.
.. note::
For more info about Tomcats file paths and context naming, please see
http://tomcat.apache.org/tomcat-7.0-doc/config/context.html#Naming
name
The context path to deploy (incl. forward slash) the WAR to.
war
Absolute path to WAR file (should be accessible by the user running
Tomcat) or a path supported by the ``salt.modules.cp.get_url`` function.
force : False
Force deployment even if the version strings are the same.
Disabled by default.
url : http://localhost:8080/manager
The URL of the Tomcat Web Application Manager.
timeout : 180
Timeout for HTTP requests to the Tomcat Manager.
temp_war_location : None
Use another location to temporarily copy the WAR file to.
By default the system's temp directory is used.
version : ''
Specify the WAR version. If this argument is provided, it overrides
the version encoded in the WAR file name, if one is present.
.. versionadded:: 2015.8.6
Use ``False`` or blank value to prevent guessing the version and keeping it blank.
.. versionadded:: 2016.11.0
Example:
.. code-block:: yaml
jenkins:
tomcat.war_deployed:
- name: /salt-powered-jenkins
- war: salt://jenkins-1.2.4.war
- require:
- service: application-service
.. note::
Be aware that in the above example the WAR ``jenkins-1.2.4.war`` will
be deployed to the context path ``salt-powered-jenkins##1.2.4``. To avoid this
either specify a version yourself, or set version to ``False``.
'''
# Prepare
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
# if version is defined or False, we don't want to overwrite
if version is True:
version = __salt__['tomcat.extract_war_version'](war) or ''
elif not version:
version = ''
webapps = __salt__['tomcat.ls'](url, timeout)
deploy = False
undeploy = False
status = True
# Gathered/specified new WAR version string
specified_ver = 'version {0}'.format(version) if version else 'no version'
# Determine what to do
try:
# Printed version strings, here to throw exception if no webapps[name]
current_ver = 'version ' + webapps[name]['version'] \
if webapps[name]['version'] else 'no version'
# `endswith` on the supposed string will cause Exception if empty
if (not webapps[name]['version'].endswith(version)
or (version == '' and webapps[name]['version'] != version)
or force):
deploy = True
undeploy = True
ret['changes']['undeploy'] = ('undeployed {0} with {1}'.
format(name, current_ver))
ret['changes']['deploy'] = ('will deploy {0} with {1}'.
format(name, specified_ver))
else:
deploy = False
ret['comment'] = ('{0} with {1} is already deployed'.
format(name, specified_ver))
if webapps[name]['mode'] != 'running':
ret['changes']['start'] = 'starting {0}'.format(name)
status = False
else:
return ret
except Exception: # pylint: disable=broad-except
deploy = True
ret['changes']['deploy'] = ('deployed {0} with {1}'.
format(name, specified_ver))
# Test
if __opts__['test']:
ret['result'] = None
return ret
# make sure the webapp is up if deployed
if deploy is False:
if status is False:
ret['comment'] = __salt__['tomcat.start'](name, url,
timeout=timeout)
ret['result'] = ret['comment'].startswith('OK')
return ret
# Undeploy
if undeploy:
un = __salt__['tomcat.undeploy'](name, url, timeout=timeout)
if un.startswith('FAIL'):
ret['result'] = False
ret['comment'] = un
return ret
# Deploy
deploy_res = __salt__['tomcat.deploy_war'](war,
name,
'yes',
url,
__env__,
timeout,
temp_war_location=temp_war_location,
version=version)
# Return
if deploy_res.startswith('OK'):
ret['result'] = True
ret['comment'] = six.text_type(__salt__['tomcat.ls'](url, timeout)[name])
ret['changes']['deploy'] = ('deployed {0} with {1}'.
format(name, specified_ver))
else:
ret['result'] = False
ret['comment'] = deploy_res
ret['changes'].pop('deploy')
return ret
def wait(name, url='http://localhost:8080/manager', timeout=180):
'''
Wait for the Tomcat Manager to load.
Notice that if tomcat is not running we won't wait for it start and the
state will fail. This state can be required in the tomcat.war_deployed
state to make sure tomcat is running and that the manager is running as
well and ready for deployment.
url : http://localhost:8080/manager
The URL of the server with the Tomcat Manager webapp.
timeout : 180
Timeout for HTTP request to the Tomcat Manager.
Example:
.. code-block:: yaml
tomcat-service:
service.running:
- name: tomcat
- enable: True
wait-for-tomcatmanager:
tomcat.wait:
- timeout: 300
- require:
- service: tomcat-service
jenkins:
tomcat.war_deployed:
- name: /ran
- war: salt://jenkins-1.2.4.war
- require:
- tomcat: wait-for-tomcatmanager
'''
result = __salt__['tomcat.status'](url, timeout)
ret = {'name': name,
'result': result,
'changes': {},
'comment': ('tomcat manager is ready' if result
else 'tomcat manager is not ready')
}
return ret
def mod_watch(name, url='http://localhost:8080/manager', timeout=180):
'''
The tomcat watcher, called to invoke the watch command.
When called, it will reload the webapp in question
.. note::
This state exists to support special handling of the ``watch``
:ref:`requisite <requisites>`. It should not be called directly.
Parameters for this function should be set by the state being triggered.
'''
msg = __salt__['tomcat.reload'](name, url, timeout)
result = msg.startswith('OK')
ret = {'name': name,
'result': result,
'changes': {name: result},
'comment': msg
}
return ret
def undeployed(name,
url='http://localhost:8080/manager',
timeout=180):
'''
Enforce that the WAR will be undeployed from the server
name
The context path to undeploy.
url : http://localhost:8080/manager
The URL of the server with the Tomcat Manager webapp.
timeout : 180
Timeout for HTTP request to the Tomcat Manager.
Example:
.. code-block:: yaml
jenkins:
tomcat.undeployed:
- name: /ran
- require:
- service: application-service
'''
# Prepare
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
if not __salt__['tomcat.status'](url, timeout):
ret['comment'] = 'Tomcat Manager does not respond'
ret['result'] = False
return ret
try:
version = __salt__['tomcat.ls'](url, timeout)[name]['version']
ret['changes'] = {'undeploy': version}
except KeyError:
return ret
# Test
if __opts__['test']:
ret['result'] = None
return ret
undeploy = __salt__['tomcat.undeploy'](name, url, timeout=timeout)
if undeploy.startswith('FAIL'):
ret['result'] = False
ret['comment'] = undeploy
return ret
return ret
| 30.371429
| 114
| 0.564722
|
from __future__ import absolute_import, unicode_literals, print_function
from salt.ext import six
def __virtual__():
return 'tomcat' if 'tomcat.status' in __salt__ else False
def war_deployed(name,
war,
force=False,
url='http://localhost:8080/manager',
timeout=180,
temp_war_location=None,
version=True):
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
if version is True:
version = __salt__['tomcat.extract_war_version'](war) or ''
elif not version:
version = ''
webapps = __salt__['tomcat.ls'](url, timeout)
deploy = False
undeploy = False
status = True
# Gathered/specified new WAR version string
specified_ver = 'version {0}'.format(version) if version else 'no version'
# Determine what to do
try:
# Printed version strings, here to throw exception if no webapps[name]
current_ver = 'version ' + webapps[name]['version'] \
if webapps[name]['version'] else 'no version'
# `endswith` on the supposed string will cause Exception if empty
if (not webapps[name]['version'].endswith(version)
or (version == '' and webapps[name]['version'] != version)
or force):
deploy = True
undeploy = True
ret['changes']['undeploy'] = ('undeployed {0} with {1}'.
format(name, current_ver))
ret['changes']['deploy'] = ('will deploy {0} with {1}'.
format(name, specified_ver))
else:
deploy = False
ret['comment'] = ('{0} with {1} is already deployed'.
format(name, specified_ver))
if webapps[name]['mode'] != 'running':
ret['changes']['start'] = 'starting {0}'.format(name)
status = False
else:
return ret
except Exception: # pylint: disable=broad-except
deploy = True
ret['changes']['deploy'] = ('deployed {0} with {1}'.
format(name, specified_ver))
# Test
if __opts__['test']:
ret['result'] = None
return ret
# make sure the webapp is up if deployed
if deploy is False:
if status is False:
ret['comment'] = __salt__['tomcat.start'](name, url,
timeout=timeout)
ret['result'] = ret['comment'].startswith('OK')
return ret
# Undeploy
if undeploy:
un = __salt__['tomcat.undeploy'](name, url, timeout=timeout)
if un.startswith('FAIL'):
ret['result'] = False
ret['comment'] = un
return ret
# Deploy
deploy_res = __salt__['tomcat.deploy_war'](war,
name,
'yes',
url,
__env__,
timeout,
temp_war_location=temp_war_location,
version=version)
# Return
if deploy_res.startswith('OK'):
ret['result'] = True
ret['comment'] = six.text_type(__salt__['tomcat.ls'](url, timeout)[name])
ret['changes']['deploy'] = ('deployed {0} with {1}'.
format(name, specified_ver))
else:
ret['result'] = False
ret['comment'] = deploy_res
ret['changes'].pop('deploy')
return ret
def wait(name, url='http://localhost:8080/manager', timeout=180):
result = __salt__['tomcat.status'](url, timeout)
ret = {'name': name,
'result': result,
'changes': {},
'comment': ('tomcat manager is ready' if result
else 'tomcat manager is not ready')
}
return ret
def mod_watch(name, url='http://localhost:8080/manager', timeout=180):
msg = __salt__['tomcat.reload'](name, url, timeout)
result = msg.startswith('OK')
ret = {'name': name,
'result': result,
'changes': {name: result},
'comment': msg
}
return ret
def undeployed(name,
url='http://localhost:8080/manager',
timeout=180):
# Prepare
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
if not __salt__['tomcat.status'](url, timeout):
ret['comment'] = 'Tomcat Manager does not respond'
ret['result'] = False
return ret
try:
version = __salt__['tomcat.ls'](url, timeout)[name]['version']
ret['changes'] = {'undeploy': version}
except KeyError:
return ret
# Test
if __opts__['test']:
ret['result'] = None
return ret
undeploy = __salt__['tomcat.undeploy'](name, url, timeout=timeout)
if undeploy.startswith('FAIL'):
ret['result'] = False
ret['comment'] = undeploy
return ret
return ret
| true
| true
|
1c486f58fa570c63d63f4cb3f130a23e65f259d7
| 7,188
|
py
|
Python
|
bc/navigation/migrations/0001_initial.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | 1
|
2021-02-27T07:27:17.000Z
|
2021-02-27T07:27:17.000Z
|
bc/navigation/migrations/0001_initial.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | null | null | null |
bc/navigation/migrations/0001_initial.py
|
Buckinghamshire-Digital-Service/buckinghamshire-council
|
bbbdb52b515bcdfc79a2bd9198dfa4828405370e
|
[
"BSD-3-Clause"
] | 1
|
2021-06-09T15:56:54.000Z
|
2021-06-09T15:56:54.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2017-12-20 09:28
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [("wagtailcore", "0040_page_draft_title")]
operations = [
migrations.CreateModel(
name="NavigationSettings",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"primary_navigation",
wagtail.core.fields.StreamField(
(
(
"link",
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
),
),
),
blank=True,
help_text="Main site navigation",
),
),
(
"secondary_navigation",
wagtail.core.fields.StreamField(
(
(
"link",
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
),
),
),
blank=True,
help_text="Alternative navigation",
),
),
(
"footer_navigation",
wagtail.core.fields.StreamField(
(
(
"column",
wagtail.core.blocks.StructBlock(
(
(
"heading",
wagtail.core.blocks.CharBlock(
required=False,
help_text="Leave blank if no header required.",
),
),
(
"links",
wagtail.core.blocks.ListBlock(
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
)
),
),
)
),
),
),
blank=True,
help_text="Multiple columns of footer links with optional header.",
),
),
(
"footer_links",
wagtail.core.fields.StreamField(
(
(
"link",
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
),
),
),
blank=True,
help_text="Single list of elements at the base of the page.",
),
),
(
"site",
models.OneToOneField(
editable=False,
on_delete=django.db.models.deletion.CASCADE,
to="wagtailcore.Site",
),
),
],
options={"abstract": False},
)
]
| 43.041916
| 116
| 0.229967
|
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
initial = True
dependencies = [("wagtailcore", "0040_page_draft_title")]
operations = [
migrations.CreateModel(
name="NavigationSettings",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"primary_navigation",
wagtail.core.fields.StreamField(
(
(
"link",
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
),
),
),
blank=True,
help_text="Main site navigation",
),
),
(
"secondary_navigation",
wagtail.core.fields.StreamField(
(
(
"link",
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
),
),
),
blank=True,
help_text="Alternative navigation",
),
),
(
"footer_navigation",
wagtail.core.fields.StreamField(
(
(
"column",
wagtail.core.blocks.StructBlock(
(
(
"heading",
wagtail.core.blocks.CharBlock(
required=False,
help_text="Leave blank if no header required.",
),
),
(
"links",
wagtail.core.blocks.ListBlock(
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
)
),
),
)
),
),
),
blank=True,
help_text="Multiple columns of footer links with optional header.",
),
),
(
"footer_links",
wagtail.core.fields.StreamField(
(
(
"link",
wagtail.core.blocks.StructBlock(
(
(
"page",
wagtail.core.blocks.PageChooserBlock(),
),
(
"title",
wagtail.core.blocks.CharBlock(
help_text="Leave blank to use the page's own title",
required=False,
),
),
)
),
),
),
blank=True,
help_text="Single list of elements at the base of the page.",
),
),
(
"site",
models.OneToOneField(
editable=False,
on_delete=django.db.models.deletion.CASCADE,
to="wagtailcore.Site",
),
),
],
options={"abstract": False},
)
]
| true
| true
|
1c48703c6ae531496c99cd091196e12d31f42352
| 787
|
py
|
Python
|
src/aoc_day03.py
|
samuelrothen/advent_of_code_2020
|
22cfc62314d0d19fe91cd1e606064a8828ef0bc7
|
[
"MIT"
] | 1
|
2021-01-24T21:17:22.000Z
|
2021-01-24T21:17:22.000Z
|
src/aoc_day03.py
|
samuelrothen/advent_of_code_2020
|
22cfc62314d0d19fe91cd1e606064a8828ef0bc7
|
[
"MIT"
] | null | null | null |
src/aoc_day03.py
|
samuelrothen/advent_of_code_2020
|
22cfc62314d0d19fe91cd1e606064a8828ef0bc7
|
[
"MIT"
] | null | null | null |
# Advent of Code Day 3
def check_trees(tree_map, n_steps_r, n_steps_d):
x = 0
n_trees = 0
map_border = len(tree_map[0])
for y in range(0, len(tree_map)-1, n_steps_d):
x = (x + n_steps_r) % map_border
field = tree_map[n_steps_d + y][x]
if field == '#':
n_trees += 1
return n_trees
with open ('../input/day03.txt', 'r') as f:
tree_map = f.read().splitlines()
# Right 1, down 1
r1_d1 = check_trees(tree_map, 1, 1)
# Right 3, down 1 (Part 1)
r3_d1 = check_trees(tree_map, 3, 1)
# Right 5, down 1
r5_d1 = check_trees(tree_map, 5, 1)
# Right 7, down 1
r7_d1 = check_trees(tree_map, 7, 1)
# Right 1, down 2
r1_d2 = check_trees(tree_map, 1, 2)
print(f'Part 1: {r3_d1}')
print(f'Part 2: {r1_d1 * r3_d1 * r5_d1 * r7_d1 * r1_d2}')
| 23.848485
| 57
| 0.614994
|
def check_trees(tree_map, n_steps_r, n_steps_d):
x = 0
n_trees = 0
map_border = len(tree_map[0])
for y in range(0, len(tree_map)-1, n_steps_d):
x = (x + n_steps_r) % map_border
field = tree_map[n_steps_d + y][x]
if field == '#':
n_trees += 1
return n_trees
with open ('../input/day03.txt', 'r') as f:
tree_map = f.read().splitlines()
r1_d1 = check_trees(tree_map, 1, 1)
r3_d1 = check_trees(tree_map, 3, 1)
r5_d1 = check_trees(tree_map, 5, 1)
r7_d1 = check_trees(tree_map, 7, 1)
r1_d2 = check_trees(tree_map, 1, 2)
print(f'Part 1: {r3_d1}')
print(f'Part 2: {r1_d1 * r3_d1 * r5_d1 * r7_d1 * r1_d2}')
| true
| true
|
1c4870e4e14175611c397b227767f34f313a3296
| 29,015
|
py
|
Python
|
vnpy/trader/engine.py
|
crystalphi/vnpy
|
14eb6ff626b07230830248ee6aea8923a5a887f4
|
[
"MIT"
] | 1
|
2021-11-24T09:19:57.000Z
|
2021-11-24T09:19:57.000Z
|
vnpy/trader/engine.py
|
crystalphi/vnpy
|
14eb6ff626b07230830248ee6aea8923a5a887f4
|
[
"MIT"
] | null | null | null |
vnpy/trader/engine.py
|
crystalphi/vnpy
|
14eb6ff626b07230830248ee6aea8923a5a887f4
|
[
"MIT"
] | null | null | null |
"""
"""
import logging
from logging import Logger
import smtplib
import os
from abc import ABC
from datetime import datetime
from email.message import EmailMessage
from queue import Empty, Queue
from threading import Thread
from typing import Any, Sequence, Type, Dict, List, Optional
from vnpy.event import Event, EventEngine
from .app import BaseApp
from .event import (
EVENT_TICK,
EVENT_ORDER,
EVENT_TRADE,
EVENT_POSITION,
EVENT_ACCOUNT,
EVENT_CONTRACT,
EVENT_LOG
)
from .gateway import BaseGateway
from .object import (
Direction,
Exchange,
CancelRequest,
LogData,
OrderRequest,
SubscribeRequest,
HistoryRequest,
OrderData,
BarData,
TickData,
TradeData,
PositionData,
AccountData,
ContractData
)
from .setting import SETTINGS
from .utility import get_folder_path, TRADER_DIR
# 专有的logger文件
from .util_logger import setup_logger
class MainEngine:
"""
Acts as the core of VN Trader.
"""
def __init__(self, event_engine: EventEngine = None):
""""""
if event_engine:
self.event_engine: EventEngine = event_engine
else:
self.event_engine = EventEngine()
self.event_engine.start()
self.gateways: Dict[str, BaseGateway] = {}
self.engines: Dict[str, BaseEngine] = {}
self.apps: Dict[str, BaseApp] = {}
self.exchanges: List[Exchange] = []
self.rm_engine = None
self.algo_engine = None
self.rpc_service = None
os.chdir(TRADER_DIR) # Change working directory
self.init_engines() # Initialize function engines
def add_engine(self, engine_class: Any) -> "BaseEngine":
"""
Add function engine.
"""
engine = engine_class(self, self.event_engine)
self.engines[engine.engine_name] = engine
return engine
def add_gateway(self, gateway_class: Type[BaseGateway], gateway_name: str = None) -> BaseGateway:
"""
Add gateway.
"""
if gateway_name:
# 使用指定的gateway_name, 可以区分相同接口不同账号的gateway同时接入
gateway = gateway_class(self.event_engine, gateway_name=gateway_name)
else:
# 缺省使用了接口自己定义的gateway_name
gateway = gateway_class(self.event_engine)
gateway_name = gateway.gateway_name
self.gateways[gateway_name] = gateway
# Add gateway supported exchanges into engine
for exchange in gateway.exchanges:
if exchange not in self.exchanges:
self.exchanges.append(exchange)
return gateway
def add_app(self, app_class: Type[BaseApp]) -> "BaseEngine":
"""
Add app.
"""
app = app_class()
self.apps[app.app_name] = app
engine = self.add_engine(app.engine_class)
if app.app_name == "RiskManager":
self.rm_engine = engine
elif app.app_name == "AlgoTrading":
self.algo_engine = engine
elif app.app_name == 'RpcService':
self.rpc_service = engine
return engine
def init_engines(self) -> None:
"""
Init all engines.
"""
self.add_engine(LogEngine)
self.add_engine(OmsEngine)
self.add_engine(EmailEngine)
def write_log(self, msg: str, source: str = "") -> None:
"""
Put log event with specific message.
"""
log = LogData(msg=msg, gateway_name=source)
event = Event(EVENT_LOG, log)
self.event_engine.put(event)
def get_gateway(self, gateway_name: str) -> BaseGateway:
"""
Return gateway object by name.
"""
gateway = self.gateways.get(gateway_name, None)
if not gateway:
self.write_log(f"找不到底层接口:{gateway_name}")
return gateway
def get_engine(self, engine_name: str) -> "BaseEngine":
"""
Return engine object by name.
"""
engine = self.engines.get(engine_name, None)
if not engine:
self.write_log(f"找不到引擎:{engine_name}")
return engine
def get_default_setting(self, gateway_name: str) -> Optional[Dict[str, Any]]:
"""
Get default setting dict of a specific gateway.
"""
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.get_default_setting()
return None
def get_all_gateway_names(self) -> List[str]:
"""
Get all names of gatewasy added in main engine.
"""
return list(self.gateways.keys())
def get_all_gateway_status(self) -> List[dict]:
"""
Get all gateway status
:return:
"""
return list([{k: v.get_status()} for k, v in self.gateways.items()])
def get_all_apps(self) -> List[BaseApp]:
"""
Get all app objects.
"""
return list(self.apps.values())
def get_all_exchanges(self) -> List[Exchange]:
"""
Get all exchanges.
"""
return self.exchanges
def connect(self, setting: dict, gateway_name: str) -> None:
"""
Start connection of a specific gateway.
"""
gateway = self.get_gateway(gateway_name)
if gateway:
gateway.connect(setting)
def subscribe(self, req: SubscribeRequest, gateway_name: str) -> None:
"""
Subscribe tick data update of a specific gateway.
如果没有指定gateway,那么所有的gateway都会接收改订阅请求
"""
if gateway_name:
gateway = self.get_gateway(gateway_name)
if gateway:
gateway.subscribe(req)
else:
for gateway in self.gateways.values():
if gateway:
gateway.subscribe(req)
def send_order(self, req: OrderRequest, gateway_name: str) -> str:
"""
Send new order request to a specific gateway.
扩展支持自定义套利合约。 由cta_strategy_pro发出算法单委托,由算法引擎进行处理
"""
# 自定义套利合约,交给算法引擎处理
if self.algo_engine and req.exchange == Exchange.SPD:
return self.algo_engine.send_spd_order(
req=req,
gateway_name=gateway_name)
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.send_order(req)
else:
return ""
def cancel_order(self, req: CancelRequest, gateway_name: str) -> bool:
"""
Send cancel order request to a specific gateway.
"""
# 自定义套利合约,交给算法引擎处理
if self.algo_engine and req.exchange == Exchange.SPD:
return self.algo_engine.cancel_spd_order(
req=req)
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.cancel_order(req)
return False
def send_orders(self, reqs: Sequence[OrderRequest], gateway_name: str) -> List[str]:
"""
批量发单
"""
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.send_orders(reqs)
else:
return ["" for req in reqs]
def cancel_orders(self, reqs: Sequence[CancelRequest], gateway_name: str) -> None:
"""
"""
gateway = self.get_gateway(gateway_name)
if gateway:
gateway.cancel_orders(reqs)
def query_history(self, req: HistoryRequest, gateway_name: str) -> Optional[List[BarData]]:
"""
Send cancel order request to a specific gateway.
"""
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.query_history(req)
else:
self.write_log(f'网关为空,请检查合约得网关是否与连接得网关一致')
return None
def close(self) -> None:
"""
Make sure every gateway and app is closed properly before
programme exit.
"""
if hasattr(self, 'save_contracts'):
self.save_contracts()
# Stop event engine first to prevent new timer event.
self.event_engine.stop()
for engine in self.engines.values():
engine.close()
for gateway in self.gateways.values():
gateway.close()
class BaseEngine(ABC):
"""
Abstract class for implementing an function engine.
"""
def __init__(
self,
main_engine: MainEngine,
event_engine: EventEngine,
engine_name: str,
):
""""""
self.main_engine = main_engine
self.event_engine = event_engine
self.engine_name = engine_name
self.logger = None
self.create_logger(engine_name)
def create_logger(self, logger_name: str = 'base_engine'):
"""
创建engine独有的日志
:param logger_name: 日志名,缺省为engine的名称
:return:
"""
log_path = get_folder_path("log")
log_filename = str(log_path.joinpath(logger_name))
print(u'create logger:{}'.format(log_filename))
self.logger = setup_logger(file_name=log_filename, name=logger_name,
log_level=SETTINGS.get('log.level', logging.DEBUG))
def write_log(self, msg: str, source: str = "", level: int = logging.DEBUG):
"""
写入日志
:param msg: 日志内容
:param source: 来源
:param level: 日志级别
:return:
"""
if self.logger:
if len(source) > 0:
msg = f'[{source}]{msg}'
self.logger.log(level, msg)
else:
log = LogData(msg=msg, level=level, gateway_name='')
event = Event(EVENT_LOG, log)
self.event_engine.put(event)
def close(self):
""""""
pass
class LogEngine(BaseEngine):
"""
Processes log event and output with logging module.
"""
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
""""""
super(LogEngine, self).__init__(main_engine, event_engine, "log")
if not SETTINGS["log.active"]:
return
self.level: int = SETTINGS["log.level"]
self.logger: Logger = logging.getLogger("VN Trader")
self.logger.setLevel(self.level)
self.formatter = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s"
)
self.add_null_handler()
if SETTINGS["log.console"]:
self.add_console_handler()
if SETTINGS["log.file"]:
self.add_file_handler()
self.register_event()
def add_null_handler(self) -> None:
"""
Add null handler for logger.
"""
null_handler = logging.NullHandler()
self.logger.addHandler(null_handler)
def add_console_handler(self) -> None:
"""
Add console output of log.
"""
console_handler = logging.StreamHandler()
console_handler.setLevel(self.level)
console_handler.setFormatter(self.formatter)
self.logger.addHandler(console_handler)
def add_file_handler(self) -> None:
"""
Add file output of log.
"""
today_date = datetime.now().strftime("%Y%m%d")
filename = f"vt_{today_date}.log"
log_path = get_folder_path("log")
file_path = log_path.joinpath(filename)
file_handler = logging.FileHandler(
file_path, mode="a", encoding="utf8"
)
file_handler.setLevel(self.level)
file_handler.setFormatter(self.formatter)
self.logger.addHandler(file_handler)
def register_event(self) -> None:
""""""
self.event_engine.register(EVENT_LOG, self.process_log_event)
def process_log_event(self, event: Event) -> None:
"""
Process log event.
"""
log = event.data
self.logger.log(log.level, log.msg)
class OmsEngine(BaseEngine):
"""
Provides order management system function for VN Trader.
"""
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
""""""
super(OmsEngine, self).__init__(main_engine, event_engine, "oms")
self.ticks: Dict[str, TickData] = {}
self.orders: Dict[str, OrderData] = {}
self.trades: Dict[str, TradeData] = {}
self.positions: Dict[str, PositionData] = {}
self.accounts: Dict[str, AccountData] = {}
self.contracts: Dict[str, ContractData] = {}
self.today_contracts: Dict[str, ContractData] = {}
# 自定义合约
self.custom_contracts = {} # vt_symbol: ContractData
self.custom_settings = {} # symbol: dict
self.symbol_spd_maping = {} # symbol: [spd_symbol]
self.prices = {}
self.active_orders: Dict[str, OrderData] = {}
self.add_function()
self.register_event()
self.load_contracts()
def __del__(self):
"""保存缓存"""
self.save_contracts()
def load_contracts(self) -> None:
"""从本地缓存加载合约字典"""
import bz2
import pickle
contract_file_name = 'vn_contract.pkb2'
if os.path.exists(contract_file_name):
try:
with bz2.BZ2File(contract_file_name, 'rb') as f:
self.contracts = pickle.load(f)
self.write_log(f'加载缓存合约字典:{contract_file_name}')
except Exception as ex:
self.write_log(f'加载缓存合约异常:{str(ex)}')
# 更新自定义合约
custom_contracts = self.get_all_custom_contracts()
self.get_all_custom_contracts(rtn_setting=True)
for contract in custom_contracts.values():
# 更新合约缓存
self.contracts.update({contract.symbol: contract})
self.contracts.update({contract.vt_symbol: contract})
self.today_contracts[contract.vt_symbol] = contract
self.today_contracts[contract.symbol] = contract
# 获取自定义合约的主动腿/被动腿
setting = self.custom_settings.get(contract.symbol, {})
leg1_symbol = setting.get('leg1_symbol')
leg2_symbol = setting.get('leg2_symbol')
# 构建映射关系
for symbol in [leg1_symbol, leg2_symbol]:
spd_mapping_list = self.symbol_spd_maping.get(symbol, [])
# 更新映射 symbol => spd_symbol
if (not contract.symbol.endswith('.SPD')) and contract.symbol not in spd_mapping_list:
spd_mapping_list.append(contract.symbol)
self.symbol_spd_maping.update({symbol: spd_mapping_list})
def save_contracts(self) -> None:
"""持久化合约对象到缓存文件"""
import bz2
import pickle
contract_file_name = 'vn_contract.pkb2'
with bz2.BZ2File(contract_file_name, 'wb') as f:
if len(self.today_contracts) > 0:
self.write_log(f'保存今日合约对象到缓存文件')
pickle.dump(self.today_contracts, f)
else:
pickle.dump(self.contracts, f)
def add_function(self) -> None:
"""Add query function to main engine."""
self.main_engine.get_tick = self.get_tick
self.main_engine.get_order = self.get_order
self.main_engine.get_price = self.get_price
self.main_engine.get_trade = self.get_trade
self.main_engine.get_position = self.get_position
self.main_engine.get_account = self.get_account
self.main_engine.get_contract = self.get_contract
self.main_engine.get_exchange = self.get_exchange
self.main_engine.get_custom_contract = self.get_custom_contract
self.main_engine.get_all_ticks = self.get_all_ticks
self.main_engine.get_all_orders = self.get_all_orders
self.main_engine.get_all_trades = self.get_all_trades
self.main_engine.get_all_positions = self.get_all_positions
self.main_engine.get_all_accounts = self.get_all_accounts
self.main_engine.get_all_contracts = self.get_all_contracts
self.main_engine.get_all_active_orders = self.get_all_active_orders
self.main_engine.get_all_custom_contracts = self.get_all_custom_contracts
self.main_engine.get_mapping_spd = self.get_mapping_spd
self.main_engine.save_contracts = self.save_contracts
def register_event(self) -> None:
""""""
self.event_engine.register(EVENT_TICK, self.process_tick_event)
self.event_engine.register(EVENT_ORDER, self.process_order_event)
self.event_engine.register(EVENT_TRADE, self.process_trade_event)
self.event_engine.register(EVENT_POSITION, self.process_position_event)
self.event_engine.register(EVENT_ACCOUNT, self.process_account_event)
self.event_engine.register(EVENT_CONTRACT, self.process_contract_event)
def process_tick_event(self, event: Event) -> None:
""""""
tick = event.data
self.ticks[tick.vt_symbol] = tick
if '&' not in tick.symbol and tick.last_price:
self.prices[tick.vt_symbol] = tick.last_price
def process_order_event(self, event: Event) -> None:
""""""
order = event.data
self.orders[order.vt_orderid] = order
# If order is active, then update data in dict.
if order.is_active():
self.active_orders[order.vt_orderid] = order
# Otherwise, pop inactive order from in dict
elif order.vt_orderid in self.active_orders:
self.active_orders.pop(order.vt_orderid)
def process_trade_event(self, event: Event) -> None:
""""""
trade = event.data
self.trades[trade.vt_tradeid] = trade
def process_position_event(self, event: Event) -> None:
""""""
position = event.data
self.positions[position.vt_positionid] = position
if position.exchange != Exchange.SPD:
self.create_spd_position_event(position.symbol, position.direction)
def reverse_direction(self, direction):
"""返回反向持仓"""
if direction == Direction.LONG:
return Direction.SHORT
elif direction == Direction.SHORT:
return Direction.LONG
return direction
def create_spd_position_event(self, symbol, direction):
"""创建自定义品种对持仓信息"""
spd_symbols = self.symbol_spd_maping.get(symbol, [])
if not spd_symbols:
return
for spd_symbol in spd_symbols:
spd_setting = self.custom_settings.get(spd_symbol, None)
if not spd_setting:
continue
leg1_symbol = spd_setting.get('leg1_symbol')
leg2_symbol = spd_setting.get('leg2_symbol')
leg1_contract = self.contracts.get(leg1_symbol)
leg2_contract = self.contracts.get(leg2_symbol)
spd_contract = self.contracts.get(spd_symbol)
if leg1_contract is None or leg2_contract is None:
continue
leg1_ratio = spd_setting.get('leg1_ratio', 1)
leg2_ratio = spd_setting.get('leg2_ratio', 1)
# 找出leg1,leg2的持仓,并判断出spd的方向
spd_pos = None
if leg1_symbol == symbol:
k1 = f"{leg1_contract.gateway_name}.{leg1_contract.vt_symbol}.{direction.value}"
leg1_pos = self.positions.get(k1)
k2 = f"{leg2_contract.gateway_name}.{leg2_contract.vt_symbol}.{self.reverse_direction(direction).value}"
leg2_pos = self.positions.get(k2)
spd_direction = direction
k3 = f"{spd_contract.gateway_name}.{spd_symbol}.{Exchange.SPD.value}.{spd_direction.value}"
spd_pos = self.positions.get(k3)
elif leg2_symbol == symbol:
k1 = f"{leg1_contract.gateway_name}.{leg1_contract.vt_symbol}.{self.reverse_direction(direction).value}"
leg1_pos = self.positions.get(k1)
k2 = f"{leg2_contract.gateway_name}.{leg2_contract.vt_symbol}.{direction.value}"
leg2_pos = self.positions.get(k2)
spd_direction = self.reverse_direction(direction)
k3 = f"{spd_contract.gateway_name}.{spd_symbol}.{Exchange.SPD.value}.{spd_direction.value}"
spd_pos = self.positions.get(k3)
else:
continue
if leg1_pos is None or leg2_pos is None: # or leg1_pos.volume ==0 or leg2_pos.volume == 0:
continue
# 根据leg1/leg2的volume ratio,计算出最小spd_volume
spd_volume = min(int(leg1_pos.volume / leg1_ratio), int(leg2_pos.volume / leg2_ratio))
if spd_volume <= 0 and spd_pos is None:
continue
if spd_setting.get('is_ratio', False) and leg2_pos.price > 0:
spd_price = 100 * (leg2_pos.price * leg1_ratio) / (leg2_pos.price * leg2_ratio)
elif spd_setting.get('is_spread', False):
spd_price = leg1_pos.price * leg1_ratio - leg2_pos.price * leg2_ratio
else:
spd_price = 0
spd_pos = PositionData(
gateway_name=spd_contract.gateway_name,
accountid=leg1_pos.accountid,
symbol=spd_symbol,
exchange=Exchange.SPD,
direction=spd_direction,
volume=spd_volume,
price=spd_price
)
event = Event(EVENT_POSITION, data=spd_pos)
self.event_engine.put(event)
def process_account_event(self, event: Event) -> None:
""""""
account = event.data
self.accounts[account.vt_accountid] = account
def process_contract_event(self, event: Event) -> None:
""""""
contract = event.data
self.contracts[contract.vt_symbol] = contract
self.contracts[contract.symbol] = contract
self.today_contracts[contract.vt_symbol] = contract
self.today_contracts[contract.symbol] = contract
def get_exchange(self, symbol: str) -> Exchange:
"""获取合约对应的交易所"""
contract = self.contracts.get(symbol, None)
if contract is None:
return Exchange.LOCAL
return contract.exchange
def get_tick(self, vt_symbol: str) -> Optional[TickData]:
"""
Get latest market tick data by vt_symbol.
"""
return self.ticks.get(vt_symbol, None)
def get_price(self, vt_symbol):
"""
get the lastest price by vt_symbol
:param vt_symbol:
:return:
"""
return self.prices.get(vt_symbol, None)
def get_order(self, vt_orderid) -> Optional[OrderData]:
"""
Get latest order data by vt_orderid.
"""
return self.orders.get(vt_orderid, None)
def get_trade(self, vt_tradeid: str) -> Optional[TradeData]:
"""
Get trade data by vt_tradeid.
"""
return self.trades.get(vt_tradeid, None)
def get_position(self, vt_positionid: str) -> Optional[PositionData]:
"""
Get latest position data by vt_positionid.
"""
return self.positions.get(vt_positionid, None)
def get_account(self, vt_accountid: str) -> Optional[AccountData]:
"""
Get latest account data by vt_accountid.
"""
return self.accounts.get(vt_accountid, None)
def get_contract(self, vt_symbol: str) -> Optional[ContractData]:
"""
Get contract data by vt_symbol.
"""
return self.contracts.get(vt_symbol, None)
def get_all_ticks(self) -> List[TickData]:
"""
Get all tick data.
"""
return list(self.ticks.values())
def get_all_orders(self) -> List[OrderData]:
"""
Get all order data.
"""
return list(self.orders.values())
def get_all_trades(self) -> List[TradeData]:
"""
Get all trade data.
"""
return list(self.trades.values())
def get_all_positions(self) -> List[PositionData]:
"""
Get all position data.
"""
return list(self.positions.values())
def get_all_accounts(self) -> List[AccountData]:
"""
Get all account data.
"""
return list(self.accounts.values())
def get_all_contracts(self) -> List[ContractData]:
"""
Get all contract data.
"""
return list(self.contracts.values())
def get_all_active_orders(self, vt_symbol: str = "") -> List[OrderData]:
"""
Get all active orders by vt_symbol.
If vt_symbol is empty, return all active orders.
"""
if not vt_symbol:
return list(self.active_orders.values())
else:
active_orders = [
order
for order in self.active_orders.values()
if order.vt_symbol == vt_symbol
]
return active_orders
def get_custom_contract(self, symbol):
"""
获取自定义合约的设置
:param symbol: "pb2012-1-pb2101-1-CJ"
:return: {
"name": "pb跨期价差",
"exchange": "SPD",
"leg1_symbol": "pb2012",
"leg1_exchange": "SHFE",
"leg1_ratio": 1,
"leg2_symbol": "pb2101",
"leg2_exchange": "SHFE",
"leg2_ratio": 1,
"is_spread": true,
"size": 1,
"margin_rate": 0.1,
"price_tick": 5
}
"""
return self.custom_settings.get(symbol, None)
def get_all_custom_contracts(self, rtn_setting=False):
"""
获取所有自定义合约
:return:
"""
if rtn_setting:
if len(self.custom_settings) == 0:
c = CustomContract()
self.custom_settings = c.get_config()
return self.custom_settings
if len(self.custom_contracts) == 0:
c = CustomContract()
self.custom_settings = c.get_config()
self.custom_contracts = c.get_contracts()
return self.custom_contracts
def get_mapping_spd(self, symbol):
"""根据主动腿/被动腿symbol,获取自定义套利对的symbol list"""
return self.symbol_spd_maping.get(symbol, [])
class CustomContract(object):
"""
定制合约
# 适用于初始化系统时,补充到本地合约信息文件中 contracts.vt
# 适用于CTP网关,加载自定义的套利合约,做内部行情撮合
"""
# 运行本地目录下,定制合约的配置文件(dict)
file_name = 'custom_contracts.json'
def __init__(self):
"""构造函数"""
from vnpy.trader.utility import load_json
self.setting = load_json(self.file_name) # 所有设置
def get_config(self):
"""获取配置"""
return self.setting
def get_contracts(self):
"""获取所有合约信息"""
d = {}
from vnpy.trader.object import ContractData, Exchange
for symbol, setting in self.setting.items():
gateway_name = setting.get('gateway_name', None)
if gateway_name is None:
gateway_name = SETTINGS.get('gateway_name', '')
vn_exchange = Exchange(setting.get('exchange', 'SPD'))
contract = ContractData(
gateway_name=gateway_name,
symbol=symbol,
exchange=vn_exchange,
name=setting.get('name', symbol),
size=setting.get('size', 100),
product=None,
pricetick=setting.get('price_tick', 0.01),
margin_rate=setting.get('margin_rate', 0.1)
)
d[contract.vt_symbol] = contract
return d
class EmailEngine(BaseEngine):
"""
Provides email sending function for VN Trader.
"""
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
""""""
super(EmailEngine, self).__init__(main_engine, event_engine, "email")
self.thread: Thread = Thread(target=self.run)
self.queue: Queue = Queue()
self.active: bool = False
self.main_engine.send_email = self.send_email
def send_email(self, subject: str, content: str, receiver: str = "") -> None:
""""""
# Start email engine when sending first email.
if not self.active:
self.start()
# Use default receiver if not specified.
if not receiver:
receiver = SETTINGS["email.receiver"]
msg = EmailMessage()
msg["From"] = SETTINGS["email.sender"]
msg["To"] = receiver
msg["Subject"] = subject
msg.set_content(content)
self.queue.put(msg)
def run(self) -> None:
""""""
while self.active:
try:
msg = self.queue.get(block=True, timeout=1)
with smtplib.SMTP_SSL(
SETTINGS["email.server"], SETTINGS["email.port"]
) as smtp:
smtp.login(
SETTINGS["email.username"], SETTINGS["email.password"]
)
smtp.send_message(msg)
except Empty:
pass
def start(self) -> None:
""""""
self.active = True
self.thread.start()
def close(self) -> None:
""""""
if not self.active:
return
self.active = False
self.thread.join()
| 32.131783
| 120
| 0.592418
|
import logging
from logging import Logger
import smtplib
import os
from abc import ABC
from datetime import datetime
from email.message import EmailMessage
from queue import Empty, Queue
from threading import Thread
from typing import Any, Sequence, Type, Dict, List, Optional
from vnpy.event import Event, EventEngine
from .app import BaseApp
from .event import (
EVENT_TICK,
EVENT_ORDER,
EVENT_TRADE,
EVENT_POSITION,
EVENT_ACCOUNT,
EVENT_CONTRACT,
EVENT_LOG
)
from .gateway import BaseGateway
from .object import (
Direction,
Exchange,
CancelRequest,
LogData,
OrderRequest,
SubscribeRequest,
HistoryRequest,
OrderData,
BarData,
TickData,
TradeData,
PositionData,
AccountData,
ContractData
)
from .setting import SETTINGS
from .utility import get_folder_path, TRADER_DIR
from .util_logger import setup_logger
class MainEngine:
def __init__(self, event_engine: EventEngine = None):
if event_engine:
self.event_engine: EventEngine = event_engine
else:
self.event_engine = EventEngine()
self.event_engine.start()
self.gateways: Dict[str, BaseGateway] = {}
self.engines: Dict[str, BaseEngine] = {}
self.apps: Dict[str, BaseApp] = {}
self.exchanges: List[Exchange] = []
self.rm_engine = None
self.algo_engine = None
self.rpc_service = None
os.chdir(TRADER_DIR)
self.init_engines()
def add_engine(self, engine_class: Any) -> "BaseEngine":
engine = engine_class(self, self.event_engine)
self.engines[engine.engine_name] = engine
return engine
def add_gateway(self, gateway_class: Type[BaseGateway], gateway_name: str = None) -> BaseGateway:
if gateway_name:
gateway = gateway_class(self.event_engine, gateway_name=gateway_name)
else:
gateway = gateway_class(self.event_engine)
gateway_name = gateway.gateway_name
self.gateways[gateway_name] = gateway
for exchange in gateway.exchanges:
if exchange not in self.exchanges:
self.exchanges.append(exchange)
return gateway
def add_app(self, app_class: Type[BaseApp]) -> "BaseEngine":
app = app_class()
self.apps[app.app_name] = app
engine = self.add_engine(app.engine_class)
if app.app_name == "RiskManager":
self.rm_engine = engine
elif app.app_name == "AlgoTrading":
self.algo_engine = engine
elif app.app_name == 'RpcService':
self.rpc_service = engine
return engine
def init_engines(self) -> None:
self.add_engine(LogEngine)
self.add_engine(OmsEngine)
self.add_engine(EmailEngine)
def write_log(self, msg: str, source: str = "") -> None:
log = LogData(msg=msg, gateway_name=source)
event = Event(EVENT_LOG, log)
self.event_engine.put(event)
def get_gateway(self, gateway_name: str) -> BaseGateway:
gateway = self.gateways.get(gateway_name, None)
if not gateway:
self.write_log(f"找不到底层接口:{gateway_name}")
return gateway
def get_engine(self, engine_name: str) -> "BaseEngine":
engine = self.engines.get(engine_name, None)
if not engine:
self.write_log(f"找不到引擎:{engine_name}")
return engine
def get_default_setting(self, gateway_name: str) -> Optional[Dict[str, Any]]:
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.get_default_setting()
return None
def get_all_gateway_names(self) -> List[str]:
return list(self.gateways.keys())
def get_all_gateway_status(self) -> List[dict]:
return list([{k: v.get_status()} for k, v in self.gateways.items()])
def get_all_apps(self) -> List[BaseApp]:
return list(self.apps.values())
def get_all_exchanges(self) -> List[Exchange]:
return self.exchanges
def connect(self, setting: dict, gateway_name: str) -> None:
gateway = self.get_gateway(gateway_name)
if gateway:
gateway.connect(setting)
def subscribe(self, req: SubscribeRequest, gateway_name: str) -> None:
if gateway_name:
gateway = self.get_gateway(gateway_name)
if gateway:
gateway.subscribe(req)
else:
for gateway in self.gateways.values():
if gateway:
gateway.subscribe(req)
def send_order(self, req: OrderRequest, gateway_name: str) -> str:
if self.algo_engine and req.exchange == Exchange.SPD:
return self.algo_engine.send_spd_order(
req=req,
gateway_name=gateway_name)
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.send_order(req)
else:
return ""
def cancel_order(self, req: CancelRequest, gateway_name: str) -> bool:
if self.algo_engine and req.exchange == Exchange.SPD:
return self.algo_engine.cancel_spd_order(
req=req)
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.cancel_order(req)
return False
def send_orders(self, reqs: Sequence[OrderRequest], gateway_name: str) -> List[str]:
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.send_orders(reqs)
else:
return ["" for req in reqs]
def cancel_orders(self, reqs: Sequence[CancelRequest], gateway_name: str) -> None:
gateway = self.get_gateway(gateway_name)
if gateway:
gateway.cancel_orders(reqs)
def query_history(self, req: HistoryRequest, gateway_name: str) -> Optional[List[BarData]]:
gateway = self.get_gateway(gateway_name)
if gateway:
return gateway.query_history(req)
else:
self.write_log(f'网关为空,请检查合约得网关是否与连接得网关一致')
return None
def close(self) -> None:
if hasattr(self, 'save_contracts'):
self.save_contracts()
self.event_engine.stop()
for engine in self.engines.values():
engine.close()
for gateway in self.gateways.values():
gateway.close()
class BaseEngine(ABC):
def __init__(
self,
main_engine: MainEngine,
event_engine: EventEngine,
engine_name: str,
):
self.main_engine = main_engine
self.event_engine = event_engine
self.engine_name = engine_name
self.logger = None
self.create_logger(engine_name)
def create_logger(self, logger_name: str = 'base_engine'):
log_path = get_folder_path("log")
log_filename = str(log_path.joinpath(logger_name))
print(u'create logger:{}'.format(log_filename))
self.logger = setup_logger(file_name=log_filename, name=logger_name,
log_level=SETTINGS.get('log.level', logging.DEBUG))
def write_log(self, msg: str, source: str = "", level: int = logging.DEBUG):
if self.logger:
if len(source) > 0:
msg = f'[{source}]{msg}'
self.logger.log(level, msg)
else:
log = LogData(msg=msg, level=level, gateway_name='')
event = Event(EVENT_LOG, log)
self.event_engine.put(event)
def close(self):
pass
class LogEngine(BaseEngine):
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
super(LogEngine, self).__init__(main_engine, event_engine, "log")
if not SETTINGS["log.active"]:
return
self.level: int = SETTINGS["log.level"]
self.logger: Logger = logging.getLogger("VN Trader")
self.logger.setLevel(self.level)
self.formatter = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s"
)
self.add_null_handler()
if SETTINGS["log.console"]:
self.add_console_handler()
if SETTINGS["log.file"]:
self.add_file_handler()
self.register_event()
def add_null_handler(self) -> None:
null_handler = logging.NullHandler()
self.logger.addHandler(null_handler)
def add_console_handler(self) -> None:
console_handler = logging.StreamHandler()
console_handler.setLevel(self.level)
console_handler.setFormatter(self.formatter)
self.logger.addHandler(console_handler)
def add_file_handler(self) -> None:
today_date = datetime.now().strftime("%Y%m%d")
filename = f"vt_{today_date}.log"
log_path = get_folder_path("log")
file_path = log_path.joinpath(filename)
file_handler = logging.FileHandler(
file_path, mode="a", encoding="utf8"
)
file_handler.setLevel(self.level)
file_handler.setFormatter(self.formatter)
self.logger.addHandler(file_handler)
def register_event(self) -> None:
self.event_engine.register(EVENT_LOG, self.process_log_event)
def process_log_event(self, event: Event) -> None:
log = event.data
self.logger.log(log.level, log.msg)
class OmsEngine(BaseEngine):
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
super(OmsEngine, self).__init__(main_engine, event_engine, "oms")
self.ticks: Dict[str, TickData] = {}
self.orders: Dict[str, OrderData] = {}
self.trades: Dict[str, TradeData] = {}
self.positions: Dict[str, PositionData] = {}
self.accounts: Dict[str, AccountData] = {}
self.contracts: Dict[str, ContractData] = {}
self.today_contracts: Dict[str, ContractData] = {}
self.custom_contracts = {}
self.custom_settings = {}
self.symbol_spd_maping = {}
self.prices = {}
self.active_orders: Dict[str, OrderData] = {}
self.add_function()
self.register_event()
self.load_contracts()
def __del__(self):
self.save_contracts()
def load_contracts(self) -> None:
import bz2
import pickle
contract_file_name = 'vn_contract.pkb2'
if os.path.exists(contract_file_name):
try:
with bz2.BZ2File(contract_file_name, 'rb') as f:
self.contracts = pickle.load(f)
self.write_log(f'加载缓存合约字典:{contract_file_name}')
except Exception as ex:
self.write_log(f'加载缓存合约异常:{str(ex)}')
custom_contracts = self.get_all_custom_contracts()
self.get_all_custom_contracts(rtn_setting=True)
for contract in custom_contracts.values():
self.contracts.update({contract.symbol: contract})
self.contracts.update({contract.vt_symbol: contract})
self.today_contracts[contract.vt_symbol] = contract
self.today_contracts[contract.symbol] = contract
setting = self.custom_settings.get(contract.symbol, {})
leg1_symbol = setting.get('leg1_symbol')
leg2_symbol = setting.get('leg2_symbol')
for symbol in [leg1_symbol, leg2_symbol]:
spd_mapping_list = self.symbol_spd_maping.get(symbol, [])
if (not contract.symbol.endswith('.SPD')) and contract.symbol not in spd_mapping_list:
spd_mapping_list.append(contract.symbol)
self.symbol_spd_maping.update({symbol: spd_mapping_list})
def save_contracts(self) -> None:
import bz2
import pickle
contract_file_name = 'vn_contract.pkb2'
with bz2.BZ2File(contract_file_name, 'wb') as f:
if len(self.today_contracts) > 0:
self.write_log(f'保存今日合约对象到缓存文件')
pickle.dump(self.today_contracts, f)
else:
pickle.dump(self.contracts, f)
def add_function(self) -> None:
self.main_engine.get_tick = self.get_tick
self.main_engine.get_order = self.get_order
self.main_engine.get_price = self.get_price
self.main_engine.get_trade = self.get_trade
self.main_engine.get_position = self.get_position
self.main_engine.get_account = self.get_account
self.main_engine.get_contract = self.get_contract
self.main_engine.get_exchange = self.get_exchange
self.main_engine.get_custom_contract = self.get_custom_contract
self.main_engine.get_all_ticks = self.get_all_ticks
self.main_engine.get_all_orders = self.get_all_orders
self.main_engine.get_all_trades = self.get_all_trades
self.main_engine.get_all_positions = self.get_all_positions
self.main_engine.get_all_accounts = self.get_all_accounts
self.main_engine.get_all_contracts = self.get_all_contracts
self.main_engine.get_all_active_orders = self.get_all_active_orders
self.main_engine.get_all_custom_contracts = self.get_all_custom_contracts
self.main_engine.get_mapping_spd = self.get_mapping_spd
self.main_engine.save_contracts = self.save_contracts
def register_event(self) -> None:
self.event_engine.register(EVENT_TICK, self.process_tick_event)
self.event_engine.register(EVENT_ORDER, self.process_order_event)
self.event_engine.register(EVENT_TRADE, self.process_trade_event)
self.event_engine.register(EVENT_POSITION, self.process_position_event)
self.event_engine.register(EVENT_ACCOUNT, self.process_account_event)
self.event_engine.register(EVENT_CONTRACT, self.process_contract_event)
def process_tick_event(self, event: Event) -> None:
tick = event.data
self.ticks[tick.vt_symbol] = tick
if '&' not in tick.symbol and tick.last_price:
self.prices[tick.vt_symbol] = tick.last_price
def process_order_event(self, event: Event) -> None:
order = event.data
self.orders[order.vt_orderid] = order
if order.is_active():
self.active_orders[order.vt_orderid] = order
elif order.vt_orderid in self.active_orders:
self.active_orders.pop(order.vt_orderid)
def process_trade_event(self, event: Event) -> None:
trade = event.data
self.trades[trade.vt_tradeid] = trade
def process_position_event(self, event: Event) -> None:
position = event.data
self.positions[position.vt_positionid] = position
if position.exchange != Exchange.SPD:
self.create_spd_position_event(position.symbol, position.direction)
def reverse_direction(self, direction):
if direction == Direction.LONG:
return Direction.SHORT
elif direction == Direction.SHORT:
return Direction.LONG
return direction
def create_spd_position_event(self, symbol, direction):
spd_symbols = self.symbol_spd_maping.get(symbol, [])
if not spd_symbols:
return
for spd_symbol in spd_symbols:
spd_setting = self.custom_settings.get(spd_symbol, None)
if not spd_setting:
continue
leg1_symbol = spd_setting.get('leg1_symbol')
leg2_symbol = spd_setting.get('leg2_symbol')
leg1_contract = self.contracts.get(leg1_symbol)
leg2_contract = self.contracts.get(leg2_symbol)
spd_contract = self.contracts.get(spd_symbol)
if leg1_contract is None or leg2_contract is None:
continue
leg1_ratio = spd_setting.get('leg1_ratio', 1)
leg2_ratio = spd_setting.get('leg2_ratio', 1)
spd_pos = None
if leg1_symbol == symbol:
k1 = f"{leg1_contract.gateway_name}.{leg1_contract.vt_symbol}.{direction.value}"
leg1_pos = self.positions.get(k1)
k2 = f"{leg2_contract.gateway_name}.{leg2_contract.vt_symbol}.{self.reverse_direction(direction).value}"
leg2_pos = self.positions.get(k2)
spd_direction = direction
k3 = f"{spd_contract.gateway_name}.{spd_symbol}.{Exchange.SPD.value}.{spd_direction.value}"
spd_pos = self.positions.get(k3)
elif leg2_symbol == symbol:
k1 = f"{leg1_contract.gateway_name}.{leg1_contract.vt_symbol}.{self.reverse_direction(direction).value}"
leg1_pos = self.positions.get(k1)
k2 = f"{leg2_contract.gateway_name}.{leg2_contract.vt_symbol}.{direction.value}"
leg2_pos = self.positions.get(k2)
spd_direction = self.reverse_direction(direction)
k3 = f"{spd_contract.gateway_name}.{spd_symbol}.{Exchange.SPD.value}.{spd_direction.value}"
spd_pos = self.positions.get(k3)
else:
continue
if leg1_pos is None or leg2_pos is None:
continue
spd_volume = min(int(leg1_pos.volume / leg1_ratio), int(leg2_pos.volume / leg2_ratio))
if spd_volume <= 0 and spd_pos is None:
continue
if spd_setting.get('is_ratio', False) and leg2_pos.price > 0:
spd_price = 100 * (leg2_pos.price * leg1_ratio) / (leg2_pos.price * leg2_ratio)
elif spd_setting.get('is_spread', False):
spd_price = leg1_pos.price * leg1_ratio - leg2_pos.price * leg2_ratio
else:
spd_price = 0
spd_pos = PositionData(
gateway_name=spd_contract.gateway_name,
accountid=leg1_pos.accountid,
symbol=spd_symbol,
exchange=Exchange.SPD,
direction=spd_direction,
volume=spd_volume,
price=spd_price
)
event = Event(EVENT_POSITION, data=spd_pos)
self.event_engine.put(event)
def process_account_event(self, event: Event) -> None:
account = event.data
self.accounts[account.vt_accountid] = account
def process_contract_event(self, event: Event) -> None:
contract = event.data
self.contracts[contract.vt_symbol] = contract
self.contracts[contract.symbol] = contract
self.today_contracts[contract.vt_symbol] = contract
self.today_contracts[contract.symbol] = contract
def get_exchange(self, symbol: str) -> Exchange:
contract = self.contracts.get(symbol, None)
if contract is None:
return Exchange.LOCAL
return contract.exchange
def get_tick(self, vt_symbol: str) -> Optional[TickData]:
return self.ticks.get(vt_symbol, None)
def get_price(self, vt_symbol):
return self.prices.get(vt_symbol, None)
def get_order(self, vt_orderid) -> Optional[OrderData]:
return self.orders.get(vt_orderid, None)
def get_trade(self, vt_tradeid: str) -> Optional[TradeData]:
return self.trades.get(vt_tradeid, None)
def get_position(self, vt_positionid: str) -> Optional[PositionData]:
return self.positions.get(vt_positionid, None)
def get_account(self, vt_accountid: str) -> Optional[AccountData]:
return self.accounts.get(vt_accountid, None)
def get_contract(self, vt_symbol: str) -> Optional[ContractData]:
return self.contracts.get(vt_symbol, None)
def get_all_ticks(self) -> List[TickData]:
return list(self.ticks.values())
def get_all_orders(self) -> List[OrderData]:
return list(self.orders.values())
def get_all_trades(self) -> List[TradeData]:
return list(self.trades.values())
def get_all_positions(self) -> List[PositionData]:
return list(self.positions.values())
def get_all_accounts(self) -> List[AccountData]:
return list(self.accounts.values())
def get_all_contracts(self) -> List[ContractData]:
return list(self.contracts.values())
def get_all_active_orders(self, vt_symbol: str = "") -> List[OrderData]:
if not vt_symbol:
return list(self.active_orders.values())
else:
active_orders = [
order
for order in self.active_orders.values()
if order.vt_symbol == vt_symbol
]
return active_orders
def get_custom_contract(self, symbol):
return self.custom_settings.get(symbol, None)
def get_all_custom_contracts(self, rtn_setting=False):
if rtn_setting:
if len(self.custom_settings) == 0:
c = CustomContract()
self.custom_settings = c.get_config()
return self.custom_settings
if len(self.custom_contracts) == 0:
c = CustomContract()
self.custom_settings = c.get_config()
self.custom_contracts = c.get_contracts()
return self.custom_contracts
def get_mapping_spd(self, symbol):
return self.symbol_spd_maping.get(symbol, [])
class CustomContract(object):
file_name = 'custom_contracts.json'
def __init__(self):
from vnpy.trader.utility import load_json
self.setting = load_json(self.file_name)
def get_config(self):
return self.setting
def get_contracts(self):
d = {}
from vnpy.trader.object import ContractData, Exchange
for symbol, setting in self.setting.items():
gateway_name = setting.get('gateway_name', None)
if gateway_name is None:
gateway_name = SETTINGS.get('gateway_name', '')
vn_exchange = Exchange(setting.get('exchange', 'SPD'))
contract = ContractData(
gateway_name=gateway_name,
symbol=symbol,
exchange=vn_exchange,
name=setting.get('name', symbol),
size=setting.get('size', 100),
product=None,
pricetick=setting.get('price_tick', 0.01),
margin_rate=setting.get('margin_rate', 0.1)
)
d[contract.vt_symbol] = contract
return d
class EmailEngine(BaseEngine):
def __init__(self, main_engine: MainEngine, event_engine: EventEngine):
super(EmailEngine, self).__init__(main_engine, event_engine, "email")
self.thread: Thread = Thread(target=self.run)
self.queue: Queue = Queue()
self.active: bool = False
self.main_engine.send_email = self.send_email
def send_email(self, subject: str, content: str, receiver: str = "") -> None:
if not self.active:
self.start()
if not receiver:
receiver = SETTINGS["email.receiver"]
msg = EmailMessage()
msg["From"] = SETTINGS["email.sender"]
msg["To"] = receiver
msg["Subject"] = subject
msg.set_content(content)
self.queue.put(msg)
def run(self) -> None:
while self.active:
try:
msg = self.queue.get(block=True, timeout=1)
with smtplib.SMTP_SSL(
SETTINGS["email.server"], SETTINGS["email.port"]
) as smtp:
smtp.login(
SETTINGS["email.username"], SETTINGS["email.password"]
)
smtp.send_message(msg)
except Empty:
pass
def start(self) -> None:
self.active = True
self.thread.start()
def close(self) -> None:
if not self.active:
return
self.active = False
self.thread.join()
| true
| true
|
1c4871f0c878ab1c35feefbb79fe50299bde90c4
| 729
|
py
|
Python
|
apex/functions/notify/line_notify.py
|
Koichi-Kobayashi/aws_iot_line_notify
|
a745db492d01107b2b8675f6febe332bbb4307af
|
[
"Apache-2.0"
] | null | null | null |
apex/functions/notify/line_notify.py
|
Koichi-Kobayashi/aws_iot_line_notify
|
a745db492d01107b2b8675f6febe332bbb4307af
|
[
"Apache-2.0"
] | null | null | null |
apex/functions/notify/line_notify.py
|
Koichi-Kobayashi/aws_iot_line_notify
|
a745db492d01107b2b8675f6febe332bbb4307af
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import requests
import conf
def create_message(button_event):
if button_event['clickType'] == 'SINGLE':
msg = conf.clickType_single
elif button_event['clickType'] == 'DOUBLE':
msg = conf.clickType_double
else:
msg = conf.clickType_long
return msg
def lambda_handler(event, context):
url = "https://notify-api.line.me/api/notify"
token = conf.token
headers = {"Authorization": "Bearer " + token}
button_event = event['deviceEvent']['buttonClicked']
payload = {"message": create_message(button_event)}
r = requests.post(url, headers=headers, params=payload)
if __name__ == '__main__':
event = ''
lambda_handler(event, None)
| 22.090909
| 59
| 0.657064
|
import requests
import conf
def create_message(button_event):
if button_event['clickType'] == 'SINGLE':
msg = conf.clickType_single
elif button_event['clickType'] == 'DOUBLE':
msg = conf.clickType_double
else:
msg = conf.clickType_long
return msg
def lambda_handler(event, context):
url = "https://notify-api.line.me/api/notify"
token = conf.token
headers = {"Authorization": "Bearer " + token}
button_event = event['deviceEvent']['buttonClicked']
payload = {"message": create_message(button_event)}
r = requests.post(url, headers=headers, params=payload)
if __name__ == '__main__':
event = ''
lambda_handler(event, None)
| true
| true
|
1c48739fad3abf032b3b1f31759a4c33c5bb2c20
| 398
|
py
|
Python
|
Chapter-2/Example_2-2.py
|
vishwesh5/Learning-OpenCV-Book
|
59ad4e190274f92735075013e9276b8159d6ca2e
|
[
"MIT"
] | 2
|
2018-12-21T17:36:53.000Z
|
2018-12-22T17:13:00.000Z
|
Chapter-2/Example_2-2.py
|
vishwesh5/Learning-OpenCV-Book
|
59ad4e190274f92735075013e9276b8159d6ca2e
|
[
"MIT"
] | null | null | null |
Chapter-2/Example_2-2.py
|
vishwesh5/Learning-OpenCV-Book
|
59ad4e190274f92735075013e9276b8159d6ca2e
|
[
"MIT"
] | null | null | null |
import cv2 as cv
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-i","--image",required=True,\
help="Path to image")
args = vars(parser.parse_args())
img = cv.imread(args["image"])
if img is None:
print("Image not found")
else:
cv.namedWindow("Example2",cv.WINDOW_AUTOSIZE)
cv.imshow("Example2",img)
cv.waitKey(0)
cv.destroyWindow("Example2")
| 22.111111
| 50
| 0.68593
|
import cv2 as cv
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-i","--image",required=True,\
help="Path to image")
args = vars(parser.parse_args())
img = cv.imread(args["image"])
if img is None:
print("Image not found")
else:
cv.namedWindow("Example2",cv.WINDOW_AUTOSIZE)
cv.imshow("Example2",img)
cv.waitKey(0)
cv.destroyWindow("Example2")
| true
| true
|
1c48748ca246fef140c24024d9868d5f3ce278fb
| 2,341
|
py
|
Python
|
data_array.py
|
fogcitymarathoner/coconuts
|
b0d72c171dbc61c089ec3a352604093fbafc1f5d
|
[
"Unlicense"
] | null | null | null |
data_array.py
|
fogcitymarathoner/coconuts
|
b0d72c171dbc61c089ec3a352604093fbafc1f5d
|
[
"Unlicense"
] | 1
|
2021-06-01T22:00:23.000Z
|
2021-06-01T22:00:23.000Z
|
data_array.py
|
fogcitymarathoner/coconuts
|
b0d72c171dbc61c089ec3a352604093fbafc1f5d
|
[
"Unlicense"
] | null | null | null |
__author__ = 'marc'
import json
from lib import load_flight_file
from lib import sort_streams_on_start_point
from lib import streams_average
from settings import FLIGHT_PATHS_FILE_JSON
from Data import Stream
class StreamArrayCalculator:
def __init__(self, raw_data):
sorted_streams = sort_streams_on_start_point(raw_data['streams'])
"""
check_bad_next_streams(sorted_streams)
unproductive_streams_removed = throw_away_unproductive_next_streams(sorted_streams)
"""
self.streams = []
i = 0
for stream in sorted_streams:
self.streams.append(Stream(stream[0], stream[1], stream[2], i))
i += 1
self.consumption = raw_data['consumption']
self.average = streams_average(raw_data['streams'])
def to_dict(self):
streams = []
for s in self.streams:
stream = {
"start_point": s.start_point,
"end_point": s.end_point,
"consumption": s.consumption,
"index": s.index
}
streams.append(stream)
return {'consumption': self.consumption,
"average": self.average,
'streams': streams
}
def is_starts_within(self, current_stream_index, next_candidate_stream_index):
"""
return False if next_candidate start is out of current range
"""
# True if next starts in current
if self.streams[next_candidate_stream_index].start_point >= self.streams[current_stream_index].start_point and \
self.streams[next_candidate_stream_index].start_point <= self.streams[current_stream_index].end_point:
return True
else:
return False
def start_point_streams_list(self, start_point):
"""
start_point_streams_list - return list of streams containing start point.
"""
streams_within = []
for s in self.streams:
# return streams at first start out of range
if start_point < s.start_point:
return streams_within
if start_point <= s.end_point and start_point >= s.start_point:
streams_within.append(s)
return streams_within
| 33.927536
| 121
| 0.608714
|
__author__ = 'marc'
import json
from lib import load_flight_file
from lib import sort_streams_on_start_point
from lib import streams_average
from settings import FLIGHT_PATHS_FILE_JSON
from Data import Stream
class StreamArrayCalculator:
def __init__(self, raw_data):
sorted_streams = sort_streams_on_start_point(raw_data['streams'])
self.streams = []
i = 0
for stream in sorted_streams:
self.streams.append(Stream(stream[0], stream[1], stream[2], i))
i += 1
self.consumption = raw_data['consumption']
self.average = streams_average(raw_data['streams'])
def to_dict(self):
streams = []
for s in self.streams:
stream = {
"start_point": s.start_point,
"end_point": s.end_point,
"consumption": s.consumption,
"index": s.index
}
streams.append(stream)
return {'consumption': self.consumption,
"average": self.average,
'streams': streams
}
def is_starts_within(self, current_stream_index, next_candidate_stream_index):
if self.streams[next_candidate_stream_index].start_point >= self.streams[current_stream_index].start_point and \
self.streams[next_candidate_stream_index].start_point <= self.streams[current_stream_index].end_point:
return True
else:
return False
def start_point_streams_list(self, start_point):
streams_within = []
for s in self.streams:
if start_point < s.start_point:
return streams_within
if start_point <= s.end_point and start_point >= s.start_point:
streams_within.append(s)
return streams_within
| true
| true
|
1c4874eef0849b4e7264b8fcb01cd8ffc5d9ca50
| 2,425
|
py
|
Python
|
openbb_terminal/stocks/discovery/fidelity_view.py
|
joshuabuildsthings/GamestonkTerminal
|
385d12803ae1725a22b0a440c3b88bffa974edcd
|
[
"MIT"
] | 255
|
2022-03-29T16:43:51.000Z
|
2022-03-31T23:57:08.000Z
|
openbb_terminal/stocks/discovery/fidelity_view.py
|
joshuabuildsthings/GamestonkTerminal
|
385d12803ae1725a22b0a440c3b88bffa974edcd
|
[
"MIT"
] | 14
|
2022-03-29T14:20:33.000Z
|
2022-03-31T23:39:20.000Z
|
openbb_terminal/stocks/discovery/fidelity_view.py
|
joshuabuildsthings/GamestonkTerminal
|
385d12803ae1725a22b0a440c3b88bffa974edcd
|
[
"MIT"
] | 24
|
2022-03-29T15:28:56.000Z
|
2022-03-31T23:54:15.000Z
|
""" Fidelity View """
__docformat__ = "numpy"
import logging
import os
import re
import pandas as pd
from openbb_terminal.decorators import log_start_end
from openbb_terminal.helper_funcs import export_data, print_rich_table
from openbb_terminal.stocks.discovery import fidelity_model
from openbb_terminal import rich_config
logger = logging.getLogger(__name__)
def lambda_buy_sell_ratio_color_red_green(val: str) -> str:
"""Add color tags to the Buys/Sells ratio cell
Parameters
----------
val : str
Buys/Sells ratio cell
Returns
-------
str
Buys/Sells ratio cell with color tags
"""
buy_sell_match = re.match(r"(\d+)% Buys, (\d+)% Sells", val, re.M | re.I)
if not buy_sell_match:
return val
buys = int(buy_sell_match.group(1))
sells = int(buy_sell_match.group(2))
if buys >= sells:
return f"[green]{buys}%[/green] Buys, {sells}% Sells"
return f"{buys}% Buys, [red]{sells}%[/red] Sells"
def lambda_price_change_color_red_green(val: str) -> str:
"""Add color tags to the price change cell
Parameters
----------
val : str
Price change cell
Returns
-------
str
Price change cell with color tags
"""
val_float = float(val.split(" ")[0])
if val_float > 0:
return f"[green]{val}[/green]"
return f"[red]{val}[/red]"
@log_start_end(log=logger)
def orders_view(num: int, export: str):
"""Prints last N orders by Fidelity customers. [Source: Fidelity]
Parameters
----------
num: int
Number of stocks to display
export : str
Export dataframe data to csv,json,xlsx file
"""
order_header, df_orders = fidelity_model.get_orders()
pd.set_option("display.max_colwidth", None)
if rich_config.USE_COLOR:
df_orders["Buy / Sell Ratio"] = df_orders["Buy / Sell Ratio"].apply(
lambda_buy_sell_ratio_color_red_green
)
df_orders["Price Change"] = df_orders["Price Change"].apply(
lambda_price_change_color_red_green
)
df_orders = df_orders.head(n=num).iloc[:, :-1]
print_rich_table(
df_orders,
headers=[x.title() for x in df_orders.columns],
show_index=False,
title=f"{order_header}:",
)
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"ford",
df_orders,
)
| 23.317308
| 77
| 0.629278
|
__docformat__ = "numpy"
import logging
import os
import re
import pandas as pd
from openbb_terminal.decorators import log_start_end
from openbb_terminal.helper_funcs import export_data, print_rich_table
from openbb_terminal.stocks.discovery import fidelity_model
from openbb_terminal import rich_config
logger = logging.getLogger(__name__)
def lambda_buy_sell_ratio_color_red_green(val: str) -> str:
buy_sell_match = re.match(r"(\d+)% Buys, (\d+)% Sells", val, re.M | re.I)
if not buy_sell_match:
return val
buys = int(buy_sell_match.group(1))
sells = int(buy_sell_match.group(2))
if buys >= sells:
return f"[green]{buys}%[/green] Buys, {sells}% Sells"
return f"{buys}% Buys, [red]{sells}%[/red] Sells"
def lambda_price_change_color_red_green(val: str) -> str:
val_float = float(val.split(" ")[0])
if val_float > 0:
return f"[green]{val}[/green]"
return f"[red]{val}[/red]"
@log_start_end(log=logger)
def orders_view(num: int, export: str):
order_header, df_orders = fidelity_model.get_orders()
pd.set_option("display.max_colwidth", None)
if rich_config.USE_COLOR:
df_orders["Buy / Sell Ratio"] = df_orders["Buy / Sell Ratio"].apply(
lambda_buy_sell_ratio_color_red_green
)
df_orders["Price Change"] = df_orders["Price Change"].apply(
lambda_price_change_color_red_green
)
df_orders = df_orders.head(n=num).iloc[:, :-1]
print_rich_table(
df_orders,
headers=[x.title() for x in df_orders.columns],
show_index=False,
title=f"{order_header}:",
)
export_data(
export,
os.path.dirname(os.path.abspath(__file__)),
"ford",
df_orders,
)
| true
| true
|
1c487618650e746bc8f6ffa044bd3bdf064b87c9
| 5,429
|
py
|
Python
|
onmt/modules/linear.py
|
tuannamnguyen93/NMTGMinor
|
acde3454343bda7060fae541c110d0ad1a8ac4f4
|
[
"MIT"
] | 4
|
2021-07-06T11:39:41.000Z
|
2022-02-23T10:48:59.000Z
|
onmt/modules/linear.py
|
tuannamnguyen93/NMTGMinor
|
acde3454343bda7060fae541c110d0ad1a8ac4f4
|
[
"MIT"
] | 2
|
2021-09-14T03:18:12.000Z
|
2022-01-25T16:19:51.000Z
|
onmt/modules/linear.py
|
tuannamnguyen93/NMTGMinor
|
acde3454343bda7060fae541c110d0ad1a8ac4f4
|
[
"MIT"
] | 1
|
2019-09-15T17:22:58.000Z
|
2019-09-15T17:22:58.000Z
|
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.utils.weight_norm as WeightNorm
import onmt
import torch.nn.functional as F
from onmt.modules.swish import Swish
from onmt.modules.dropout import VariationalDropout
# different linears for the same input
def group_linear(linears, input, bias=False):
weights = [linear.weight for linear in linears]
weight = torch.cat(weights, dim=0)
if bias:
biases = [linear.bias for linear in linears]
bias_ = torch.cat(biases)
else:
bias_ = None
return F.linear(input, weight, bias_)
class XavierLinear(nn.Module):
''' Simple Linear layer with xavier init '''
def __init__(self, d_in, d_out, bias=True, nonlinearity='linear'):
super(XavierLinear, self).__init__()
linear = nn.Linear(d_in, d_out, bias=bias)
weight_norm = onmt.constants.weight_norm
self.weight_norm = weight_norm
if weight_norm:
self.linear = WeightNorm(linear, name='weight')
else:
self.linear = linear
# init.xavier_uniform_(self.linear.weight)
#
# if bias:
# self.linear.bias.data.zero_()
def forward(self, x):
return self.linear(x)
def __repr__(self):
return self.__class__.__name__ + '(' \
+ 'in_features=' + str(self.linear.in_features) \
+ ', out_features=' + str(self.linear.out_features) \
+ ', bias=' + str(self.linear.bias is not None) \
+ ', weight_norm=' + str(self.weight_norm) + ')'
Linear = XavierLinear
class MaxOut(nn.Module):
def __init__(self, d, m, k):
super(MaxOut, self).__init__()
self.d_in, self.d_out, self.pool_size = d, m, k
self.lin = Linear(d, m * k)
def forward(self, inputs):
original_size = inputs.size()
inputs = inputs.view(-1, inputs.size(-1))
shape = list(inputs.size())
shape[-1] = self.d_out
shape.append(self.pool_size)
max_dim = len(shape) - 1
out = self.lin(inputs)
m, i = out.view(*shape).max(dim=max_dim)
m = m.view(*original_size[:-1], m.size(-1))
return m
class FeedForwardSwish(nn.Module):
"""Applies position-wise feed forward to inputs
Args:
d_model: dimension of model
d_ff: dimension of feed forward
p: dropout probability
Params:
fc_1: FC layer from d_model to d_ff
fc_2: FC layer from d_ff to d_model
Input Shapes:
input: batch_size x len x d_model or len x batch_size x d_model
Output Shapes:
out: batch_size x len x d_model or len x batch_size x d_model
"""
def __init__(self, d_model, d_ff, p, variational=False):
super(FeedForwardSwish, self).__init__()
self.d_model = d_model
self.d_ff = d_ff
self.fc_1 = XavierLinear(d_model, d_ff)
self.fc_2 = XavierLinear(d_ff, d_model)
self.swish = Swish()
if variational:
self.dropout = VariationalDropout(p)
else:
self.dropout = nn.Dropout(p)
def forward(self, input):
out = self.swish(self.fc_1(input))
out = self.dropout(out)
out = self.fc_2(out)
return out
class FeedForward(nn.Module):
"""Applies position-wise feed forward to inputs
Args:
d_model: dimension of model
d_ff: dimension of feed forward
p: dropout probability
Params:
fc_1: FC layer from d_model to d_ff
fc_2: FC layer from d_ff to d_model
Input Shapes:
input: batch_size x len x d_model or len x batch_size x d_model
Output Shapes:
out: batch_size x len x d_model or len x batch_size x d_model
"""
def __init__(self, d_model, d_ff, p, variational=False):
super(FeedForward, self).__init__()
self.d_model = d_model
self.d_ff = d_ff
self.fc_1 = Linear(d_model, d_ff, nonlinearity="relu")
self.fc_2 = Linear(d_ff, d_model)
if variational:
self.dropout = VariationalDropout(p)
else:
self.dropout = nn.Dropout(p)
def forward(self, input):
out = F.relu(self.fc_1(input), inplace=True)
out = self.dropout(out)
out = self.fc_2(out)
return out
# class ChunkFeedForward(nn.Module):
# """Applies position-wise feed forward to CHUNKs of inputs
#
# Args:
# d_model: dimension of model
# d_ff: dimension of feed forward
# p: dropout probability
#
# Params:
# fc_1: FC layer from d_model to d_ff
# fc_2: FC layer from d_ff to d_model
#
# Input Shapes:
# input: batch_size x len x d_model or len x batch_size x d_model
#
# Output Shapes:
# out: batch_size x len x d_model or len x batch_size x d_model
# """
# def __init__(self, d_model, d_ff, p, **kwargs):
# super(ChunkFeedForward, self).__init__()
# self.d_model = d_model
# self.d_ff = d_ff
# self.fc_1 = Linear(d_model, d_ff, nonlinearity="relu")
# self.fc_2 = Linear(d_ff, d_model)
#
# i
#
# def forward(self, input):
#
# out = F.relu(self.fc_1(input), inplace=True)
# out = self.dropout(out)
# out = self.fc_2(out)
# return out
| 27.841026
| 77
| 0.596242
|
import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.utils.weight_norm as WeightNorm
import onmt
import torch.nn.functional as F
from onmt.modules.swish import Swish
from onmt.modules.dropout import VariationalDropout
def group_linear(linears, input, bias=False):
weights = [linear.weight for linear in linears]
weight = torch.cat(weights, dim=0)
if bias:
biases = [linear.bias for linear in linears]
bias_ = torch.cat(biases)
else:
bias_ = None
return F.linear(input, weight, bias_)
class XavierLinear(nn.Module):
def __init__(self, d_in, d_out, bias=True, nonlinearity='linear'):
super(XavierLinear, self).__init__()
linear = nn.Linear(d_in, d_out, bias=bias)
weight_norm = onmt.constants.weight_norm
self.weight_norm = weight_norm
if weight_norm:
self.linear = WeightNorm(linear, name='weight')
else:
self.linear = linear
def forward(self, x):
return self.linear(x)
def __repr__(self):
return self.__class__.__name__ + '(' \
+ 'in_features=' + str(self.linear.in_features) \
+ ', out_features=' + str(self.linear.out_features) \
+ ', bias=' + str(self.linear.bias is not None) \
+ ', weight_norm=' + str(self.weight_norm) + ')'
Linear = XavierLinear
class MaxOut(nn.Module):
def __init__(self, d, m, k):
super(MaxOut, self).__init__()
self.d_in, self.d_out, self.pool_size = d, m, k
self.lin = Linear(d, m * k)
def forward(self, inputs):
original_size = inputs.size()
inputs = inputs.view(-1, inputs.size(-1))
shape = list(inputs.size())
shape[-1] = self.d_out
shape.append(self.pool_size)
max_dim = len(shape) - 1
out = self.lin(inputs)
m, i = out.view(*shape).max(dim=max_dim)
m = m.view(*original_size[:-1], m.size(-1))
return m
class FeedForwardSwish(nn.Module):
def __init__(self, d_model, d_ff, p, variational=False):
super(FeedForwardSwish, self).__init__()
self.d_model = d_model
self.d_ff = d_ff
self.fc_1 = XavierLinear(d_model, d_ff)
self.fc_2 = XavierLinear(d_ff, d_model)
self.swish = Swish()
if variational:
self.dropout = VariationalDropout(p)
else:
self.dropout = nn.Dropout(p)
def forward(self, input):
out = self.swish(self.fc_1(input))
out = self.dropout(out)
out = self.fc_2(out)
return out
class FeedForward(nn.Module):
def __init__(self, d_model, d_ff, p, variational=False):
super(FeedForward, self).__init__()
self.d_model = d_model
self.d_ff = d_ff
self.fc_1 = Linear(d_model, d_ff, nonlinearity="relu")
self.fc_2 = Linear(d_ff, d_model)
if variational:
self.dropout = VariationalDropout(p)
else:
self.dropout = nn.Dropout(p)
def forward(self, input):
out = F.relu(self.fc_1(input), inplace=True)
out = self.dropout(out)
out = self.fc_2(out)
return out
#
# Args:
# d_model: dimension of model
# d_ff: dimension of feed forward
# p: dropout probability
#
# Params:
# fc_1: FC layer from d_model to d_ff
# fc_2: FC layer from d_ff to d_model
#
# Input Shapes:
# input: batch_size x len x d_model or len x batch_size x d_model
#
# Output Shapes:
# out: batch_size x len x d_model or len x batch_size x d_model
# """
| true
| true
|
1c487757682dc2e74fb94446cd1a0561e99ca49e
| 3,616
|
py
|
Python
|
tests/py/test_teams.py
|
webmaven/gratipay.com
|
31f6bcf903029895a4c56290aedde755e852c82f
|
[
"CC0-1.0"
] | 1
|
2019-10-09T10:13:53.000Z
|
2019-10-09T10:13:53.000Z
|
tests/py/test_teams.py
|
webmaven/gratipay.com
|
31f6bcf903029895a4c56290aedde755e852c82f
|
[
"CC0-1.0"
] | null | null | null |
tests/py/test_teams.py
|
webmaven/gratipay.com
|
31f6bcf903029895a4c56290aedde755e852c82f
|
[
"CC0-1.0"
] | null | null | null |
from __future__ import unicode_literals
from gratipay.models._mixin_team import StubParticipantAdded
from gratipay.testing import Harness
from gratipay.security.user import User
class Tests(Harness):
def setUp(self):
Harness.setUp(self)
self.team = self.make_participant('A-Team', number='plural')
def test_is_team(self):
expeted = True
actual = self.team.IS_PLURAL
assert actual == expeted
def test_show_as_team_to_admin(self):
self.make_participant('alice', is_admin=True)
user = User.from_username('alice')
assert self.team.show_as_team(user)
def test_show_as_team_to_team_member(self):
self.make_participant('alice')
self.team.add_member(self.make_participant('bob', claimed_time='now'))
user = User.from_username('bob')
assert self.team.show_as_team(user)
def test_show_as_team_to_non_team_member(self):
self.make_participant('alice')
self.team.add_member(self.make_participant('bob', claimed_time='now'))
user = User.from_username('alice')
assert self.team.show_as_team(user)
def test_show_as_team_to_anon(self):
self.make_participant('alice')
self.team.add_member(self.make_participant('bob', claimed_time='now'))
assert self.team.show_as_team(User())
def test_dont_show_individuals_as_team(self):
alice = self.make_participant('alice', number='singular')
assert not alice.show_as_team(User())
def test_dont_show_plural_no_members_as_team_to_anon(self):
group = self.make_participant('Group', number='plural')
assert not group.show_as_team(User())
def test_dont_show_plural_no_members_as_team_to_auth(self):
group = self.make_participant('Group', number='plural')
self.make_participant('alice')
assert not group.show_as_team(User.from_username('alice'))
def test_show_plural_no_members_as_team_to_self(self):
group = self.make_participant('Group', number='plural')
assert group.show_as_team(User.from_username('Group'))
def test_show_plural_no_members_as_team_to_admin(self):
group = self.make_participant('Group', number='plural')
self.make_participant('Admin', is_admin=True)
assert group.show_as_team(User.from_username('Admin'))
def test_can_add_members(self):
alice = self.make_participant('alice', claimed_time='now')
expected = True
self.team.add_member(alice)
actual = alice.member_of(self.team)
assert actual == expected
def test_get_teams_for_member(self):
alice = self.make_participant('alice', claimed_time='now')
bob = self.make_participant('bob', claimed_time='now')
team = self.make_participant('B-Team', number='plural')
self.team.add_member(alice)
team.add_member(bob)
expected = 1
actual = alice.get_teams().pop().nmembers
assert actual == expected
def test_preclude_adding_stub_participant(self):
stub_participant = self.make_participant('stub')
with self.assertRaises(StubParticipantAdded):
self.team.add_member(stub_participant)
def test_remove_all_members(self):
alice = self.make_participant('alice', claimed_time='now')
self.team.add_member(alice)
bob = self.make_participant('bob', claimed_time='now')
self.team.add_member(bob)
assert len(self.team.get_current_takes()) == 2 # sanity check
self.team.remove_all_members()
assert len(self.team.get_current_takes()) == 0
| 38.468085
| 78
| 0.692754
|
from __future__ import unicode_literals
from gratipay.models._mixin_team import StubParticipantAdded
from gratipay.testing import Harness
from gratipay.security.user import User
class Tests(Harness):
def setUp(self):
Harness.setUp(self)
self.team = self.make_participant('A-Team', number='plural')
def test_is_team(self):
expeted = True
actual = self.team.IS_PLURAL
assert actual == expeted
def test_show_as_team_to_admin(self):
self.make_participant('alice', is_admin=True)
user = User.from_username('alice')
assert self.team.show_as_team(user)
def test_show_as_team_to_team_member(self):
self.make_participant('alice')
self.team.add_member(self.make_participant('bob', claimed_time='now'))
user = User.from_username('bob')
assert self.team.show_as_team(user)
def test_show_as_team_to_non_team_member(self):
self.make_participant('alice')
self.team.add_member(self.make_participant('bob', claimed_time='now'))
user = User.from_username('alice')
assert self.team.show_as_team(user)
def test_show_as_team_to_anon(self):
self.make_participant('alice')
self.team.add_member(self.make_participant('bob', claimed_time='now'))
assert self.team.show_as_team(User())
def test_dont_show_individuals_as_team(self):
alice = self.make_participant('alice', number='singular')
assert not alice.show_as_team(User())
def test_dont_show_plural_no_members_as_team_to_anon(self):
group = self.make_participant('Group', number='plural')
assert not group.show_as_team(User())
def test_dont_show_plural_no_members_as_team_to_auth(self):
group = self.make_participant('Group', number='plural')
self.make_participant('alice')
assert not group.show_as_team(User.from_username('alice'))
def test_show_plural_no_members_as_team_to_self(self):
group = self.make_participant('Group', number='plural')
assert group.show_as_team(User.from_username('Group'))
def test_show_plural_no_members_as_team_to_admin(self):
group = self.make_participant('Group', number='plural')
self.make_participant('Admin', is_admin=True)
assert group.show_as_team(User.from_username('Admin'))
def test_can_add_members(self):
alice = self.make_participant('alice', claimed_time='now')
expected = True
self.team.add_member(alice)
actual = alice.member_of(self.team)
assert actual == expected
def test_get_teams_for_member(self):
alice = self.make_participant('alice', claimed_time='now')
bob = self.make_participant('bob', claimed_time='now')
team = self.make_participant('B-Team', number='plural')
self.team.add_member(alice)
team.add_member(bob)
expected = 1
actual = alice.get_teams().pop().nmembers
assert actual == expected
def test_preclude_adding_stub_participant(self):
stub_participant = self.make_participant('stub')
with self.assertRaises(StubParticipantAdded):
self.team.add_member(stub_participant)
def test_remove_all_members(self):
alice = self.make_participant('alice', claimed_time='now')
self.team.add_member(alice)
bob = self.make_participant('bob', claimed_time='now')
self.team.add_member(bob)
assert len(self.team.get_current_takes()) == 2
self.team.remove_all_members()
assert len(self.team.get_current_takes()) == 0
| true
| true
|
1c4877c974b684dbdb7f4e4b039aebf0f1bb0990
| 2,923
|
py
|
Python
|
client_code/ContactForm/_anvil_designer.py
|
benlawraus/pyDALAnvilWorks
|
8edc67b0fbe65bdcc0ef6fd2424f55046cacba7c
|
[
"MIT"
] | 6
|
2021-11-14T22:49:40.000Z
|
2022-03-26T17:40:40.000Z
|
client_code/ContactForm/_anvil_designer.py
|
benlawraus/pyDALAnvilWorks
|
8edc67b0fbe65bdcc0ef6fd2424f55046cacba7c
|
[
"MIT"
] | null | null | null |
client_code/ContactForm/_anvil_designer.py
|
benlawraus/pyDALAnvilWorks
|
8edc67b0fbe65bdcc0ef6fd2424f55046cacba7c
|
[
"MIT"
] | 1
|
2022-01-31T01:18:32.000Z
|
2022-01-31T01:18:32.000Z
|
from anvil import *
from dataclasses import dataclass, field
from ..EmailDisplayForm import EmailDisplayForm
label_name = dict(
role=None,
align='left',
tooltip='',
border='',
foreground='',
visible=True,
text='Name',
font_size=None,
font='',
spacing_above='small',
icon_align='left',
spacing_below='small',
italic=False,
background='',
bold=False,
underline=False,
icon='',
parent=Container(),
)
text_box_name = dict(
)
label_phone = dict(
role=None,
align='left',
tooltip='',
border='',
foreground='',
visible=True,
text='Telephone Number',
font_size=None,
font='',
spacing_above='small',
icon_align='left',
spacing_below='small',
italic=False,
background='',
bold=False,
underline=False,
icon='',
parent=Container(),
)
text_box_phone = dict(
)
column_panel_1 = dict(
col_widths='{"NWGMBY":15,"VPHMVV":45,"MDNMFP":15,"LOBHDT":45}',
parent=Container(),
)
repeating_panel_email = dict(
role=None,
tooltip='',
border='',
foreground='',
items=None,
visible=True,
spacing_above='small',
spacing_below='small',
item_template='EmailItemForm',
background='',
parent=Container(),
)
button_save = dict(
role='primary-color',
align='center',
tooltip='',
border='',
enabled=True,
foreground='',
visible=True,
text='Save Contact',
font_size=None,
font='',
spacing_above='small',
icon_align='left',
spacing_below='small',
italic=False,
background='',
bold=False,
underline=False,
icon='',
parent=Container(),
)
email_display_form = dict(
)
column_panel_email_lists = dict(
col_widths='{}',
parent=Container(),
)
content_panel = dict(
col_widths='{}',
parent=Container(),
)
@dataclass
class ContactFormTemplate(HtmlTemplate):
label_name: Label = field(default_factory=lambda: Label(**label_name))
text_box_name: TextBox = field(default_factory=lambda: TextBox(**text_box_name))
label_phone: Label = field(default_factory=lambda: Label(**label_phone))
text_box_phone: TextBox = field(default_factory=lambda: TextBox(**text_box_phone))
column_panel_1: ColumnPanel = field(default_factory=lambda: ColumnPanel(**column_panel_1))
repeating_panel_email: RepeatingPanel = field(default_factory=lambda: RepeatingPanel(**repeating_panel_email))
button_save: Button = field(default_factory=lambda: Button(**button_save))
email_display_form: EmailDisplayForm = field(default_factory=lambda: EmailDisplayForm(**email_display_form))
column_panel_email_lists: ColumnPanel = field(default_factory=lambda: ColumnPanel(**column_panel_email_lists))
content_panel: ColumnPanel = field(default_factory=lambda: ColumnPanel(**content_panel))
def init_components(self, **kwargs):
ContactFormTemplate.__init__(self)
| 25.640351
| 114
| 0.677386
|
from anvil import *
from dataclasses import dataclass, field
from ..EmailDisplayForm import EmailDisplayForm
label_name = dict(
role=None,
align='left',
tooltip='',
border='',
foreground='',
visible=True,
text='Name',
font_size=None,
font='',
spacing_above='small',
icon_align='left',
spacing_below='small',
italic=False,
background='',
bold=False,
underline=False,
icon='',
parent=Container(),
)
text_box_name = dict(
)
label_phone = dict(
role=None,
align='left',
tooltip='',
border='',
foreground='',
visible=True,
text='Telephone Number',
font_size=None,
font='',
spacing_above='small',
icon_align='left',
spacing_below='small',
italic=False,
background='',
bold=False,
underline=False,
icon='',
parent=Container(),
)
text_box_phone = dict(
)
column_panel_1 = dict(
col_widths='{"NWGMBY":15,"VPHMVV":45,"MDNMFP":15,"LOBHDT":45}',
parent=Container(),
)
repeating_panel_email = dict(
role=None,
tooltip='',
border='',
foreground='',
items=None,
visible=True,
spacing_above='small',
spacing_below='small',
item_template='EmailItemForm',
background='',
parent=Container(),
)
button_save = dict(
role='primary-color',
align='center',
tooltip='',
border='',
enabled=True,
foreground='',
visible=True,
text='Save Contact',
font_size=None,
font='',
spacing_above='small',
icon_align='left',
spacing_below='small',
italic=False,
background='',
bold=False,
underline=False,
icon='',
parent=Container(),
)
email_display_form = dict(
)
column_panel_email_lists = dict(
col_widths='{}',
parent=Container(),
)
content_panel = dict(
col_widths='{}',
parent=Container(),
)
@dataclass
class ContactFormTemplate(HtmlTemplate):
label_name: Label = field(default_factory=lambda: Label(**label_name))
text_box_name: TextBox = field(default_factory=lambda: TextBox(**text_box_name))
label_phone: Label = field(default_factory=lambda: Label(**label_phone))
text_box_phone: TextBox = field(default_factory=lambda: TextBox(**text_box_phone))
column_panel_1: ColumnPanel = field(default_factory=lambda: ColumnPanel(**column_panel_1))
repeating_panel_email: RepeatingPanel = field(default_factory=lambda: RepeatingPanel(**repeating_panel_email))
button_save: Button = field(default_factory=lambda: Button(**button_save))
email_display_form: EmailDisplayForm = field(default_factory=lambda: EmailDisplayForm(**email_display_form))
column_panel_email_lists: ColumnPanel = field(default_factory=lambda: ColumnPanel(**column_panel_email_lists))
content_panel: ColumnPanel = field(default_factory=lambda: ColumnPanel(**content_panel))
def init_components(self, **kwargs):
ContactFormTemplate.__init__(self)
| true
| true
|
1c487922611e17b5fa948c3273cb3ae9b3d05521
| 4,944
|
py
|
Python
|
Young_Massive_Stars/Code_v31_multiple/galaxy_generator/run_several_gala_v31.py
|
lhquirogan/Galactic_Maser_Simulator
|
cb74afd40b6d99429219c44114c74e150d3f5189
|
[
"MIT"
] | null | null | null |
Young_Massive_Stars/Code_v31_multiple/galaxy_generator/run_several_gala_v31.py
|
lhquirogan/Galactic_Maser_Simulator
|
cb74afd40b6d99429219c44114c74e150d3f5189
|
[
"MIT"
] | null | null | null |
Young_Massive_Stars/Code_v31_multiple/galaxy_generator/run_several_gala_v31.py
|
lhquirogan/Galactic_Maser_Simulator
|
cb74afd40b6d99429219c44114c74e150d3f5189
|
[
"MIT"
] | null | null | null |
import sys
from shutil import move
import os
import itertools
import shutil
kvar = raw_input("Do you want to make multiple file parameters (yes or no)? ")
if (kvar=='yes' or kvar=='y'):
def replace(file_path,file_path_old_, pattern0, pattern1, pattern2, pattern3, subst0, subst1, subst2,subst3):
with open(file_path,'w') as new_file:
with open(file_path_old_) as old_file:
for line in old_file:
if (line==pattern0):
new_file.write(line.replace(pattern0, subst0))
elif (line==pattern1):
new_file.write(line.replace(pattern1, subst1))
elif (line==pattern2):
new_file.write(line.replace(pattern2, subst2))
elif (line==pattern3):
new_file.write(line.replace(pattern3, subst3))
else:
new_file.write(line)
return
def seek(filename,parame):
coin_line=[]
with open(filename) as f:
for line in f:
if parame in line:
#if 'r0=' in line:
coin_line.append(line)
return(coin_line[0])
def add_equal(par):
for i in range(len(par)):
par[i]=par[i]+str('=')
return(par)
generator_folder=os.getcwd()+'/'
para_folder=os.getcwd()+'/parameters/'
try:
os.makedirs('parameters/')
except OSError:
if not os.path.isdir('parameters/'):
raise
varss = raw_input("Please enter the old parameter's file (should be in this directory): ")
#varss='para_v31_BeSSeLmimic.txt'
shutil.copy2(generator_folder+varss,para_folder+varss)
file_path_old=para_folder+varss
var_para = raw_input("Parameters to change? (e.g r0, v0t, vsun, v0t_vs): ")
parameterss=[x.strip() for x in var_para.split(',')]
parametersss=add_equal(parameterss)
val=[]
for i in range(len(parametersss)):
val_para= raw_input("Parameters values for %s" %parametersss[i])
values=[x.strip() for x in val_para.split(',')]
point=parametersss[i],values
val.append(point)
'''
para_change=[]
for i in range(len(val)):
for j in range(len(val[i][1])):
para_change.append(val[i][0]+val[i][1][j])
'''
parametersss_values=[]
for i in range(len(val)):
parametersss_values.append(val[i][1])
z=list(itertools.product(parametersss_values[0],parametersss_values[1], parametersss_values[2],parametersss_values[3]))
for m in range(len(z)):
name_dummy_file='/para_v31_'+str(m)+'.txt'
file_path=para_folder+name_dummy_file
pattern0=seek(file_path_old,parametersss[0])
subst0=pattern0[0:3]+z[m][0]+pattern0[-63:]
pattern1=seek(file_path_old,parametersss[1])
subst1=pattern1[0:4]+z[m][1]+pattern1[-55:]
pattern2=seek(file_path_old,parametersss[2])
subst2=pattern2[0:5]+z[m][2]+pattern2[-40:]
pattern3=seek(file_path_old,parametersss[3])
subst3=pattern3[0:7]+z[m][3]+pattern3[-98:]
replace(file_path,file_path_old, pattern0,pattern1,pattern2,pattern3,subst0,subst1,subst2,subst3)
var2 = raw_input("How many galaxies do you want per set of parameters?: ")
try:
os.remove(para_folder+varss)
except OSError:
pass
for m in range(len(z)):
print ('#################################################################')
print('Galaxy set number %s of %s "' % (m+1, len(z)))
print ('#################################################################')
var='/para_v31_'+str(m)+'.txt'
sys.argv = ['GaMe_LHQN_v31.py', var]
num_gala=int(var2)
for i in range(num_gala):
print ('#################################################################')
print('Galaxy simulation number %s of %s "' % (i+1, num_gala))
print ('#################################################################')
execfile('GaMe_LHQN_v31.py')
else:
var = raw_input("Please enter the parameter's file: ")
sys.argv = ['GaMe_LHQN_v31.py', var]
var2 = raw_input("How many galaxies do you want?: ")
num_gala=int(var2)
for i in range(num_gala):
print ('#################################################################')
print('Galaxy simulation number %s of %s "' % (i+1, num_gala))
print ('#################################################################')
execfile('GaMe_LHQN_v31.py')
| 43.368421
| 169
| 0.499798
|
import sys
from shutil import move
import os
import itertools
import shutil
kvar = raw_input("Do you want to make multiple file parameters (yes or no)? ")
if (kvar=='yes' or kvar=='y'):
def replace(file_path,file_path_old_, pattern0, pattern1, pattern2, pattern3, subst0, subst1, subst2,subst3):
with open(file_path,'w') as new_file:
with open(file_path_old_) as old_file:
for line in old_file:
if (line==pattern0):
new_file.write(line.replace(pattern0, subst0))
elif (line==pattern1):
new_file.write(line.replace(pattern1, subst1))
elif (line==pattern2):
new_file.write(line.replace(pattern2, subst2))
elif (line==pattern3):
new_file.write(line.replace(pattern3, subst3))
else:
new_file.write(line)
return
def seek(filename,parame):
coin_line=[]
with open(filename) as f:
for line in f:
if parame in line:
coin_line.append(line)
return(coin_line[0])
def add_equal(par):
for i in range(len(par)):
par[i]=par[i]+str('=')
return(par)
generator_folder=os.getcwd()+'/'
para_folder=os.getcwd()+'/parameters/'
try:
os.makedirs('parameters/')
except OSError:
if not os.path.isdir('parameters/'):
raise
varss = raw_input("Please enter the old parameter's file (should be in this directory): ")
#varss='para_v31_BeSSeLmimic.txt'
shutil.copy2(generator_folder+varss,para_folder+varss)
file_path_old=para_folder+varss
var_para = raw_input("Parameters to change? (e.g r0, v0t, vsun, v0t_vs): ")
parameterss=[x.strip() for x in var_para.split(',')]
parametersss=add_equal(parameterss)
val=[]
for i in range(len(parametersss)):
val_para= raw_input("Parameters values for %s" %parametersss[i])
values=[x.strip() for x in val_para.split(',')]
point=parametersss[i],values
val.append(point)
parametersss_values=[]
for i in range(len(val)):
parametersss_values.append(val[i][1])
z=list(itertools.product(parametersss_values[0],parametersss_values[1], parametersss_values[2],parametersss_values[3]))
for m in range(len(z)):
name_dummy_file='/para_v31_'+str(m)+'.txt'
file_path=para_folder+name_dummy_file
pattern0=seek(file_path_old,parametersss[0])
subst0=pattern0[0:3]+z[m][0]+pattern0[-63:]
pattern1=seek(file_path_old,parametersss[1])
subst1=pattern1[0:4]+z[m][1]+pattern1[-55:]
pattern2=seek(file_path_old,parametersss[2])
subst2=pattern2[0:5]+z[m][2]+pattern2[-40:]
pattern3=seek(file_path_old,parametersss[3])
subst3=pattern3[0:7]+z[m][3]+pattern3[-98:]
replace(file_path,file_path_old, pattern0,pattern1,pattern2,pattern3,subst0,subst1,subst2,subst3)
var2 = raw_input("How many galaxies do you want per set of parameters?: ")
try:
os.remove(para_folder+varss)
except OSError:
pass
for m in range(len(z)):
print ('
| true
| true
|
1c4879ab2de863efcac4c0599614b5a3a22db222
| 2,720
|
py
|
Python
|
webapp/test_before_deploy.py
|
Kandy16/img-search-cnn
|
afc787e9284f1c8cb28beb2224bf076c04bb931f
|
[
"Apache-2.0"
] | 3
|
2017-12-04T12:31:18.000Z
|
2018-08-12T23:45:55.000Z
|
webapp/test_before_deploy.py
|
Kandy16/img-search-cnn
|
afc787e9284f1c8cb28beb2224bf076c04bb931f
|
[
"Apache-2.0"
] | 1
|
2018-02-03T13:59:55.000Z
|
2018-02-03T13:59:55.000Z
|
webapp/test_before_deploy.py
|
Kandy16/img-search-cnn
|
afc787e9284f1c8cb28beb2224bf076c04bb931f
|
[
"Apache-2.0"
] | 1
|
2019-03-12T10:43:06.000Z
|
2019-03-12T10:43:06.000Z
|
import os
import config
from application.images_youtube_extract import ImagesYoutubeExtract
# --------------------Testing KNNN-----------------
#from ml.knn import knn
#obj = knn.KNN()
# print(obj.get_random_images(10))
#obj.prepare_data_for_KNN(config.KNN_IMG_VECTORS_FILEPATH , config.KNN_DATASET_PATH , "" , "fc8")
# relevant_images = ["test_1742.txt"]
# print(obj.get_feedback(relevant_images , config.KNN_DATASET_PATH))
# -------------TESTING COSINE---------------
# Lets test cosine similarity which first needs to create the nearest neighbour for each image vector. This is a pre process.
#from ml.cosine import cosine_similarity_cluster
#obj_cosine = cosine_similarity_cluster.CosineSimilarityCluster()
#obj_cosine.nearest_neighbours_for_each_imagevector(config.COSINE_IMG_VECTORS_FILEPATH , config.COSINE_NEAREST_NEIGHBOUR_SAVE_PATH , model = "" , layer = "fc8")
#print(obj_cosine.get_feedback("/var/www/clone-img-search-cnn/img-search-cnn/webapp/dataset/cosine/cosine_nearest_neighbors/" , ["014999.jpg"]))
#-----------------Testing DATABASE ------------------
#from database.database import Database
#obj = Database()
#obj.fillin_database()
# ------------------Testing Database update and delete
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database.models.models import QueryString, FeatureVectorsQueryString, ApplicationVideo, Base
engine = create_engine('sqlite:///data.sqlite3')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
not_processed_queries = [x.query_string for x in session.query(QueryString).filter_by(application_data_collected=False).all()]
print(not_processed_queries)
# Now we will use each of these query words and update database with youtube urls
images_save_location = "/var/www/img-search-cnn/webapp/dataset/applicationData"
obj_iye = ImagesYoutubeExtract(images_save_location)
MAX_NUMBER_OF_URLS = 15
if not_processed_queries:
for query in not_processed_queries:
embed_urls , origurls = obj_iye.get_urls_search_query(query , MAX_NUMBER_OF_URLS)
print (len(origurls))
#get object for the query in database
obj_query_string = session.query(QueryString).filter_by(query_string=query).first()
obj_query_string.application_data_collected = True
session.commit()
for idx, url in enumerate(origurls):
obj_application_data = ApplicationVideo(youtube_url=url , youtube_embed_url = embed_urls[idx] ,application_videos = obj_query_string)
session.add(obj_application_data)
session.commit()
#obj_iye.extract_images_youtube(url , query) ## ORIGINAL
else:
print("Everything up to date..")
| 36.266667
| 161
| 0.744853
|
import os
import config
from application.images_youtube_extract import ImagesYoutubeExtract
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database.models.models import QueryString, FeatureVectorsQueryString, ApplicationVideo, Base
engine = create_engine('sqlite:///data.sqlite3')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
not_processed_queries = [x.query_string for x in session.query(QueryString).filter_by(application_data_collected=False).all()]
print(not_processed_queries)
images_save_location = "/var/www/img-search-cnn/webapp/dataset/applicationData"
obj_iye = ImagesYoutubeExtract(images_save_location)
MAX_NUMBER_OF_URLS = 15
if not_processed_queries:
for query in not_processed_queries:
embed_urls , origurls = obj_iye.get_urls_search_query(query , MAX_NUMBER_OF_URLS)
print (len(origurls))
obj_query_string = session.query(QueryString).filter_by(query_string=query).first()
obj_query_string.application_data_collected = True
session.commit()
for idx, url in enumerate(origurls):
obj_application_data = ApplicationVideo(youtube_url=url , youtube_embed_url = embed_urls[idx] ,application_videos = obj_query_string)
session.add(obj_application_data)
session.commit()
ything up to date..")
| true
| true
|
1c487ab562135f15791357682cea196226852034
| 11,883
|
py
|
Python
|
nbconvert/exporters/exporter.py
|
nelsonuhan/nbconvert
|
a534fb901ff83e0b0c0c082ff47f3de01dc651b1
|
[
"BSD-3-Clause-Clear"
] | 2
|
2020-07-22T09:28:31.000Z
|
2020-08-17T01:19:42.000Z
|
nbconvert/exporters/exporter.py
|
nelsonuhan/nbconvert
|
a534fb901ff83e0b0c0c082ff47f3de01dc651b1
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
nbconvert/exporters/exporter.py
|
nelsonuhan/nbconvert
|
a534fb901ff83e0b0c0c082ff47f3de01dc651b1
|
[
"BSD-3-Clause-Clear"
] | 3
|
2020-08-04T02:48:32.000Z
|
2020-08-17T01:20:09.000Z
|
"""This module defines a base Exporter class. For Jinja template-based export,
see templateexporter.py.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function, absolute_import
import io
import os
import copy
import collections
import datetime
import sys
import nbformat
from traitlets.config.configurable import LoggingConfigurable
from traitlets.config import Config
from traitlets import Bool, HasTraits, Unicode, List, TraitError
from traitlets.utils.importstring import import_item
from typing import Optional
class ResourcesDict(collections.defaultdict):
def __missing__(self, key):
return ''
class FilenameExtension(Unicode):
"""A trait for filename extensions."""
default_value = u''
info_text = 'a filename extension, beginning with a dot'
def validate(self, obj, value):
# cast to proper unicode
value = super().validate(obj, value)
# check that it starts with a dot
if value and not value.startswith('.'):
msg = "FileExtension trait '{}' does not begin with a dot: {!r}"
raise TraitError(msg.format(self.name, value))
return value
class Exporter(LoggingConfigurable):
"""
Class containing methods that sequentially run a list of preprocessors on a
NotebookNode object and then return the modified NotebookNode object and
accompanying resources dict.
"""
enabled = Bool(True,
help = "Disable this exporter (and any exporters inherited from it)."
).tag(config=True)
file_extension = FilenameExtension(
help="Extension of the file that should be written to disk"
).tag(config=True)
# MIME type of the result file, for HTTP response headers.
# This is *not* a traitlet, because we want to be able to access it from
# the class, not just on instances.
output_mimetype = ''
# Should this converter be accessible from the notebook front-end?
# If so, should be a friendly name to display (and possibly translated).
export_from_notebook = None
#Configurability, allows the user to easily add filters and preprocessors.
preprocessors = List(
help="""List of preprocessors, by name or namespace, to enable."""
).tag(config=True)
_preprocessors = List()
default_preprocessors = List([
'nbconvert.preprocessors.TagRemovePreprocessor',
'nbconvert.preprocessors.RegexRemovePreprocessor',
'nbconvert.preprocessors.ClearOutputPreprocessor',
'nbconvert.preprocessors.ExecutePreprocessor',
'nbconvert.preprocessors.coalesce_streams',
'nbconvert.preprocessors.SVG2PDFPreprocessor',
'nbconvert.preprocessors.LatexPreprocessor',
'nbconvert.preprocessors.HighlightMagicsPreprocessor',
'nbconvert.preprocessors.ExtractOutputPreprocessor',
'nbconvert.preprocessors.ClearMetadataPreprocessor',
],
help="""List of preprocessors available by default, by name, namespace,
instance, or type."""
).tag(config=True)
def __init__(self, config=None, **kw):
"""
Public constructor
Parameters
----------
config : ``traitlets.config.Config``
User configuration instance.
`**kw`
Additional keyword arguments passed to parent __init__
"""
with_default_config = self.default_config
if config:
with_default_config.merge(config)
super().__init__(config=with_default_config, **kw)
self._init_preprocessors()
self._nb_metadata = {}
@property
def default_config(self):
return Config()
def from_notebook_node(self, nb, resources=None, **kw):
"""
Convert a notebook from a notebook node instance.
Parameters
----------
nb : `nbformat.NotebookNode`
Notebook node (dict-like with attr-access)
resources : dict
Additional resources that can be accessed read/write by
preprocessors and filters.
`**kw`
Ignored
"""
nb_copy = copy.deepcopy(nb)
resources = self._init_resources(resources)
if 'language' in nb['metadata']:
resources['language'] = nb['metadata']['language'].lower()
# Preprocess
nb_copy, resources = self._preprocess(nb_copy, resources)
notebook_name = ''
if resources is not None:
name = resources.get('metadata', {}).get('name', '')
path = resources.get('metadata', {}).get('path', '')
notebook_name = os.path.join(path, name)
self._nb_metadata[notebook_name] = nb_copy.metadata
return nb_copy, resources
def from_filename(self, filename: str, resources: Optional[dict] = None, **kw):
"""
Convert a notebook from a notebook file.
Parameters
----------
filename : str
Full filename of the notebook file to open and convert.
resources : dict
Additional resources that can be accessed read/write by
preprocessors and filters.
`**kw`
Ignored
"""
# Pull the metadata from the filesystem.
if resources is None:
resources = ResourcesDict()
if not 'metadata' in resources or resources['metadata'] == '':
resources['metadata'] = ResourcesDict()
path, basename = os.path.split(filename)
notebook_name = os.path.splitext(basename)[0]
resources['metadata']['name'] = notebook_name
resources['metadata']['path'] = path
modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename))
# datetime.strftime date format for ipython
if sys.platform == 'win32':
date_format = "%B %d, %Y"
else:
date_format = "%B %-d, %Y"
resources['metadata']['modified_date'] = modified_date.strftime(date_format)
with io.open(filename, encoding='utf-8') as f:
return self.from_file(f, resources=resources, **kw)
def from_file(self, file_stream, resources=None, **kw):
"""
Convert a notebook from a notebook file.
Parameters
----------
file_stream : file-like object
Notebook file-like object to convert.
resources : dict
Additional resources that can be accessed read/write by
preprocessors and filters.
`**kw`
Ignored
"""
return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw)
def register_preprocessor(self, preprocessor, enabled=False):
"""
Register a preprocessor.
Preprocessors are classes that act upon the notebook before it is
passed into the Jinja templating engine. Preprocessors are also
capable of passing additional information to the Jinja
templating engine.
Parameters
----------
preprocessor : `nbconvert.preprocessors.Preprocessor`
A dotted module name, a type, or an instance
enabled : bool
Mark the preprocessor as enabled
"""
if preprocessor is None:
raise TypeError('preprocessor must not be None')
isclass = isinstance(preprocessor, type)
constructed = not isclass
# Handle preprocessor's registration based on it's type
if constructed and isinstance(preprocessor, str,):
# Preprocessor is a string, import the namespace and recursively call
# this register_preprocessor method
preprocessor_cls = import_item(preprocessor)
return self.register_preprocessor(preprocessor_cls, enabled)
if constructed and hasattr(preprocessor, '__call__'):
# Preprocessor is a function, no need to construct it.
# Register and return the preprocessor.
if enabled:
preprocessor.enabled = True
self._preprocessors.append(preprocessor)
return preprocessor
elif isclass and issubclass(preprocessor, HasTraits):
# Preprocessor is configurable. Make sure to pass in new default for
# the enabled flag if one was specified.
self.register_preprocessor(preprocessor(parent=self), enabled)
elif isclass:
# Preprocessor is not configurable, construct it
self.register_preprocessor(preprocessor(), enabled)
else:
# Preprocessor is an instance of something without a __call__
# attribute.
raise TypeError('preprocessor must be callable or an importable constructor, got %r' % preprocessor)
def _init_preprocessors(self):
"""
Register all of the preprocessors needed for this exporter, disabled
unless specified explicitly.
"""
self._preprocessors = []
# Load default preprocessors (not necessarily enabled by default).
for preprocessor in self.default_preprocessors:
self.register_preprocessor(preprocessor)
# Load user-specified preprocessors. Enable by default.
for preprocessor in self.preprocessors:
self.register_preprocessor(preprocessor, enabled=True)
def _init_resources(self, resources):
#Make sure the resources dict is of ResourcesDict type.
if resources is None:
resources = ResourcesDict()
if not isinstance(resources, ResourcesDict):
new_resources = ResourcesDict()
new_resources.update(resources)
resources = new_resources
#Make sure the metadata extension exists in resources
if 'metadata' in resources:
if not isinstance(resources['metadata'], ResourcesDict):
new_metadata = ResourcesDict()
new_metadata.update(resources['metadata'])
resources['metadata'] = new_metadata
else:
resources['metadata'] = ResourcesDict()
if not resources['metadata']['name']:
resources['metadata']['name'] = 'Notebook'
#Set the output extension
resources['output_extension'] = self.file_extension
return resources
def _preprocess(self, nb, resources):
"""
Preprocess the notebook before passing it into the Jinja engine.
To preprocess the notebook is to successively apply all the
enabled preprocessors. Output from each preprocessor is passed
along to the next one.
Parameters
----------
nb : notebook node
notebook that is being exported.
resources : a dict of additional resources that
can be accessed read/write by preprocessors
"""
# Do a copy.deepcopy first,
# we are never safe enough with what the preprocessors could do.
nbc = copy.deepcopy(nb)
resc = copy.deepcopy(resources)
# Run each preprocessor on the notebook. Carry the output along
# to each preprocessor
for preprocessor in self._preprocessors:
nbc, resc = preprocessor(nbc, resc)
try:
nbformat.validate(nbc, relax_add_props=True)
except nbformat.ValidationError:
self.log.error('Notebook is invalid after preprocessor %s',
preprocessor)
raise
return nbc, resc
| 35.792169
| 112
| 0.621813
|
from __future__ import print_function, absolute_import
import io
import os
import copy
import collections
import datetime
import sys
import nbformat
from traitlets.config.configurable import LoggingConfigurable
from traitlets.config import Config
from traitlets import Bool, HasTraits, Unicode, List, TraitError
from traitlets.utils.importstring import import_item
from typing import Optional
class ResourcesDict(collections.defaultdict):
def __missing__(self, key):
return ''
class FilenameExtension(Unicode):
default_value = u''
info_text = 'a filename extension, beginning with a dot'
def validate(self, obj, value):
value = super().validate(obj, value)
if value and not value.startswith('.'):
msg = "FileExtension trait '{}' does not begin with a dot: {!r}"
raise TraitError(msg.format(self.name, value))
return value
class Exporter(LoggingConfigurable):
enabled = Bool(True,
help = "Disable this exporter (and any exporters inherited from it)."
).tag(config=True)
file_extension = FilenameExtension(
help="Extension of the file that should be written to disk"
).tag(config=True)
output_mimetype = ''
export_from_notebook = None
preprocessors = List(
help="""List of preprocessors, by name or namespace, to enable."""
).tag(config=True)
_preprocessors = List()
default_preprocessors = List([
'nbconvert.preprocessors.TagRemovePreprocessor',
'nbconvert.preprocessors.RegexRemovePreprocessor',
'nbconvert.preprocessors.ClearOutputPreprocessor',
'nbconvert.preprocessors.ExecutePreprocessor',
'nbconvert.preprocessors.coalesce_streams',
'nbconvert.preprocessors.SVG2PDFPreprocessor',
'nbconvert.preprocessors.LatexPreprocessor',
'nbconvert.preprocessors.HighlightMagicsPreprocessor',
'nbconvert.preprocessors.ExtractOutputPreprocessor',
'nbconvert.preprocessors.ClearMetadataPreprocessor',
],
help="""List of preprocessors available by default, by name, namespace,
instance, or type."""
).tag(config=True)
def __init__(self, config=None, **kw):
with_default_config = self.default_config
if config:
with_default_config.merge(config)
super().__init__(config=with_default_config, **kw)
self._init_preprocessors()
self._nb_metadata = {}
@property
def default_config(self):
return Config()
def from_notebook_node(self, nb, resources=None, **kw):
nb_copy = copy.deepcopy(nb)
resources = self._init_resources(resources)
if 'language' in nb['metadata']:
resources['language'] = nb['metadata']['language'].lower()
nb_copy, resources = self._preprocess(nb_copy, resources)
notebook_name = ''
if resources is not None:
name = resources.get('metadata', {}).get('name', '')
path = resources.get('metadata', {}).get('path', '')
notebook_name = os.path.join(path, name)
self._nb_metadata[notebook_name] = nb_copy.metadata
return nb_copy, resources
def from_filename(self, filename: str, resources: Optional[dict] = None, **kw):
if resources is None:
resources = ResourcesDict()
if not 'metadata' in resources or resources['metadata'] == '':
resources['metadata'] = ResourcesDict()
path, basename = os.path.split(filename)
notebook_name = os.path.splitext(basename)[0]
resources['metadata']['name'] = notebook_name
resources['metadata']['path'] = path
modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename))
if sys.platform == 'win32':
date_format = "%B %d, %Y"
else:
date_format = "%B %-d, %Y"
resources['metadata']['modified_date'] = modified_date.strftime(date_format)
with io.open(filename, encoding='utf-8') as f:
return self.from_file(f, resources=resources, **kw)
def from_file(self, file_stream, resources=None, **kw):
return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw)
def register_preprocessor(self, preprocessor, enabled=False):
if preprocessor is None:
raise TypeError('preprocessor must not be None')
isclass = isinstance(preprocessor, type)
constructed = not isclass
if constructed and isinstance(preprocessor, str,):
preprocessor_cls = import_item(preprocessor)
return self.register_preprocessor(preprocessor_cls, enabled)
if constructed and hasattr(preprocessor, '__call__'):
if enabled:
preprocessor.enabled = True
self._preprocessors.append(preprocessor)
return preprocessor
elif isclass and issubclass(preprocessor, HasTraits):
self.register_preprocessor(preprocessor(parent=self), enabled)
elif isclass:
self.register_preprocessor(preprocessor(), enabled)
else:
raise TypeError('preprocessor must be callable or an importable constructor, got %r' % preprocessor)
def _init_preprocessors(self):
self._preprocessors = []
for preprocessor in self.default_preprocessors:
self.register_preprocessor(preprocessor)
for preprocessor in self.preprocessors:
self.register_preprocessor(preprocessor, enabled=True)
def _init_resources(self, resources):
if resources is None:
resources = ResourcesDict()
if not isinstance(resources, ResourcesDict):
new_resources = ResourcesDict()
new_resources.update(resources)
resources = new_resources
if 'metadata' in resources:
if not isinstance(resources['metadata'], ResourcesDict):
new_metadata = ResourcesDict()
new_metadata.update(resources['metadata'])
resources['metadata'] = new_metadata
else:
resources['metadata'] = ResourcesDict()
if not resources['metadata']['name']:
resources['metadata']['name'] = 'Notebook'
resources['output_extension'] = self.file_extension
return resources
def _preprocess(self, nb, resources):
nbc = copy.deepcopy(nb)
resc = copy.deepcopy(resources)
for preprocessor in self._preprocessors:
nbc, resc = preprocessor(nbc, resc)
try:
nbformat.validate(nbc, relax_add_props=True)
except nbformat.ValidationError:
self.log.error('Notebook is invalid after preprocessor %s',
preprocessor)
raise
return nbc, resc
| true
| true
|
1c487b0f893f8f2f4cdc98e8592bea273cb2890d
| 1,069
|
py
|
Python
|
hypha/apply/activity/migrations/0029_migrate_old_submission_relation.py
|
maxpearl/hypha
|
e181ebadfb744aab34617bb766e746368d6f2de0
|
[
"BSD-3-Clause"
] | 20
|
2021-04-08T16:38:49.000Z
|
2022-02-09T20:05:57.000Z
|
hypha/apply/activity/migrations/0029_migrate_old_submission_relation.py
|
maxpearl/hypha
|
e181ebadfb744aab34617bb766e746368d6f2de0
|
[
"BSD-3-Clause"
] | 1,098
|
2017-12-15T11:23:03.000Z
|
2020-01-24T07:58:07.000Z
|
hypha/apply/activity/migrations/0029_migrate_old_submission_relation.py
|
maxpearl/hypha
|
e181ebadfb744aab34617bb766e746368d6f2de0
|
[
"BSD-3-Clause"
] | 17
|
2020-02-07T14:55:54.000Z
|
2021-04-04T19:32:38.000Z
|
# Generated by Django 2.0.13 on 2019-07-10 17:33
from django.db import migrations
from django.db.models import F
def submission_to_source(apps, schema_editor):
Activity = apps.get_model('activity', 'Activity')
if Activity.objects.exists():
ContentType = apps.get_model('contenttypes', 'ContentType')
content_type = ContentType.objects.get(model='applicationsubmission', app_label='funds')
Activity.objects.update(
source_object_id=F('submission_id'),
source_content_type=content_type,
)
def source_to_submission(apps, schema_editor):
Activity = apps.get_model('activity', 'Activity')
Activity.objects.update(submission_id=F('source_object_id'))
class Migration(migrations.Migration):
dependencies = [
('activity', '0028_add_new_generic_relation'),
('funds', '0065_applicationsubmission_meta_categories'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.RunPython(submission_to_source, source_to_submission)
]
| 31.441176
| 96
| 0.708138
|
from django.db import migrations
from django.db.models import F
def submission_to_source(apps, schema_editor):
Activity = apps.get_model('activity', 'Activity')
if Activity.objects.exists():
ContentType = apps.get_model('contenttypes', 'ContentType')
content_type = ContentType.objects.get(model='applicationsubmission', app_label='funds')
Activity.objects.update(
source_object_id=F('submission_id'),
source_content_type=content_type,
)
def source_to_submission(apps, schema_editor):
Activity = apps.get_model('activity', 'Activity')
Activity.objects.update(submission_id=F('source_object_id'))
class Migration(migrations.Migration):
dependencies = [
('activity', '0028_add_new_generic_relation'),
('funds', '0065_applicationsubmission_meta_categories'),
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.RunPython(submission_to_source, source_to_submission)
]
| true
| true
|
1c487b221654ef04a5d3189bcad8709a2d896649
| 16,573
|
py
|
Python
|
travis_debug_session.py
|
ascdso2020/ascdso-devops-python-tools
|
a5cfe0579f7c52ac861c92044b3d7215af0b8918
|
[
"MIT"
] | null | null | null |
travis_debug_session.py
|
ascdso2020/ascdso-devops-python-tools
|
a5cfe0579f7c52ac861c92044b3d7215af0b8918
|
[
"MIT"
] | null | null | null |
travis_debug_session.py
|
ascdso2020/ascdso-devops-python-tools
|
a5cfe0579f7c52ac861c92044b3d7215af0b8918
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# vim:ts=4:sts=4:sw=4:et
#
# Author: Hari Sekhon
# Date: 2016-08-10 18:18:03 +0100 (Wed, 10 Aug 2016)
#
# https://github.com/HariSekhon/DevOps-Python-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn
# and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/HariSekhon
#
"""
Tool to automate initiating a Travis CI interactive debug build session via the Travis API
Tracks the creation of the debug build and drops you in to an SSH shell as soon as it's available
If you get an access denied error but are sure your Travis API token is correct then it could be because the repo hasn't
been enabled for debugging yet (you may need to contact Travis at support@travis-ci.com for them to enable it for you)
If specifying a --repo be aware the API is case sensitive for repo names
As a convenience you may supply either job id or repo as an argument without any switch and it'll infer it as a repo if
if contains a slash but no url (eg. HariSekhon/Nagios-Plugins) otherwise it'll assume it's a job id, strip any leading
URL so you can simply paste the path to a failing build and it'll just work. The switch versions of --job-id and --repo
take priority as they're more explicit
Travis CI doc on debug builds:
https://docs.travis-ci.com/user/running-build-in-debug-mode/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
#from __future__ import unicode_literals
import json
import logging
import os
import re
import sys
import time
import traceback
import git
try:
import requests
except ImportError:
print(traceback.format_exc(), end='')
sys.exit(4)
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
# pylint: disable=wrong-import-position
from harisekhon.utils import prog, log, support_msg_api, jsonpp, qquit, isInt, isStr
from harisekhon.utils import CriticalError, UnknownError, code_error
from harisekhon.utils import validate_chars, validate_alnum, host_regex
from harisekhon import CLI
from harisekhon import RequestHandler
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.9.1'
class TravisDebugSession(CLI):
def __init__(self):
# Python 2.x
super(TravisDebugSession, self).__init__()
# Python 3.x
# super().__init__()
self.timeout_default = 600
self.verbose_default = 2
self.job_id = None
self.travis_token = None
self.repo = None
self.headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Travis-API-Version': '3',
'User-Agent': prog
}
self.request_handler = RequestHandler()
def check_job_launch_response_code(self, req):
if req.status_code == 409:
error_message = self.parse_travis_error(req)
error_message += " (if you've just retriggered this you can avoid this error " + \
"using the --ignore-running switch)"
if self.get_opt('ignore_running'):
log.info('job already running (ignoring)')
else:
log.info('job already running')
raise CriticalError('{0} {1}: {2}'.format(req.status_code, req.reason, error_message))
elif req.status_code != 202:
error_message = self.parse_travis_error(req)
raise CriticalError("{0} {1}: {2}".format(req.status_code, req.reason, error_message))
def add_options(self):
self.add_opt('-J', '--job-id', default=os.getenv('JOB_ID'),
help='Travis Job ID to initiate the debug session ($JOB_ID)')
self.add_opt('-T', '--travis-token', default=os.getenv('TRAVIS_TOKEN'),
help='Travis token required to authenticate to the API ($TRAVIS_TOKEN)')
self.add_opt('-R', '--repo', default=os.getenv('TRAVIS_REPO'),
help='Travis CI repo to find last failed build and re-execute a job from it ($TRAVIS_REPO)' + \
', easier alternative to specifying a specific --job-id' + \
', convenient if working with the same repo over and over and don\'t want to copy the ' + \
'job id each time (--job-id takes priority if given as it\'s more specific). ' + \
'Be aware if running this quickly in succession you will get older and older failed ' + \
'builds as the last one will still be running, will only re-trigger finished failed builds')
self.add_opt('-i', '--ignore-running', action='store_true',
help='Ignore job already running error (avoids 409 error if you try to restart debug job)')
def process_options(self):
self.job_id = self.get_opt('job_id')
self.travis_token = self.get_opt('travis_token')
self.repo = self.get_opt('repo')
#if travis_token is None:
# self.usage('--travis-token option or ' +
# '$TRAVIS_TOKEN environment variable required to authenticate to the API')
if self.args:
# assume arg is a repo in form of HariSekhon/Nagios-Plugins but do not use url which we are more likely to
# have pasted a travis-ci url to a job, see a few lines further down
if '/' in self.args[0]:
if not self.repo:
log.info('using argument as --repo')
self.repo = self.args[0]
elif not self.job_id:
log.info('using argument as --job-id')
self.job_id = self.args[0]
if self.job_id:
# convenience to be able to lazily paste a URL like the following and still have it extract the job_id
# https://travis-ci.org/HariSekhon/Nagios-Plugins/jobs/283840596#L1079
self.job_id = self.job_id.split('/')[-1].split('#')[0]
validate_chars(self.job_id, 'job id', '0-9')
elif self.repo:
self.repo = re.sub(r'https?://travis-ci\.org/', '', self.repo)
travis_user = os.getenv('TRAVIS_USER')
if '/' not in self.repo:
self.repo = '/' + self.repo
if self.repo[0] == '/' and travis_user:
self.repo = travis_user + self.repo
validate_chars(self.repo, 'repo', r'\/\w\.-')
else:
self.repo = self.get_local_repo_name()
if not self.repo:
self.usage('--job-id / --repo not specified')
validate_alnum(self.travis_token, 'travis token', is_secret=True)
self.headers['Authorization'] = 'token {0}'.format(self.travis_token)
@staticmethod
def get_local_repo_name():
try:
_ = git.Repo('.')
for remote in _.remotes:
for url in remote.urls:
repo = '/'.join(url.split('/')[-2:])
log.debug('determined repo to be {} from remotes'.format(repo))
return repo
except git.InvalidGitRepositoryError:
log.debug('failed to determine git repository locally: %s', _)
def run(self):
if not self.job_id:
if self.repo:
latest_failed_build = self.get_latest_failed_build()
self.job_id = self.get_failing_job_id_from_build(latest_failed_build)
else:
code_error('--job-id / --repo not specified, caught late')
if self.job_id is None:
raise UnknownError('no job id was found, aborting getting SSH address')
self.launch_job()
ssh_address = self.get_ssh_address(job_id=self.job_id)
log.info('Executing: ssh -- {0}'.format(ssh_address))
sys.stdout.flush()
sys.stderr.flush()
self.disable_timeout()
os.execvp('ssh', ['--', ssh_address])
def launch_job(self):
log.info('triggering debug job {job_id}'.format(job_id=self.job_id))
url = 'https://api.travis-ci.org/job/{job_id}/debug'.format(job_id=self.job_id)
self.request_handler.check_response_code = self.check_job_launch_response_code
self.request_handler.post(url, headers=self.headers)
@staticmethod
def parse_travis_error(req):
error_message = ''
try:
_ = json.loads(req.content)
error_message = _['error_message']
except ValueError:
if isStr(req.content) and len(req.content.split('\n')) == 1:
error_message = req.content
return error_message
def get_latest_failed_build(self):
log.info('getting latest failed build')
# gets 404 unless replacing the slash
url = 'https://api.travis-ci.org/repo/{repo}/builds'.format(repo=self.repo.replace('/', '%2F'))
# request returns blank without authorization header
req = self.request_handler.get(url, headers=self.headers)
if log.isEnabledFor(logging.DEBUG):
log.debug("\n%s", jsonpp(req.content))
try:
latest_build = self.parse_latest_failed_build(req.content)
except (KeyError, ValueError):
exception = traceback.format_exc().split('\n')[-2]
# this covers up the traceback info and makes it harder to debug
#raise UnknownError('failed to parse expected json response from Travis CI API: {0}'.format(exception))
qquit('UNKNOWN', 'failed to parse expected json response from Travis CI API: {0}. {1}'.
format(exception, support_msg_api()))
return latest_build
def parse_latest_failed_build(self, content):
log.debug('parsing latest failed build info')
build = None
json_data = json.loads(content)
if not json_data or \
'builds' not in json_data or \
not json_data['builds']:
qquit('UNKNOWN', "no Travis CI builds returned by the Travis API."
+ " Either the specified repo '{0}' doesn't exist".format(self.repo)
+ " or no builds have happened yet?"
+ " Also remember the repo is case sensitive, for example 'harisekhon/nagios-plugins' returns this"
+ " blank build set whereas 'HariSekhon/Nagios-Plugins' succeeds"
+ " in returning latest builds information"
)
builds = json_data['builds']
# get latest finished failed build
last_build_number = None
found_newer_passing_build = False
for _ in builds:
# API returns most recent build first so just take the first one that is completed
# extra check to make sure we're getting the very latest build number and API hasn't changed
build_number = _['number']
if not isInt(build_number):
raise UnknownError('build number returned is not an integer!')
build_number = int(build_number)
if last_build_number is None:
last_build_number = int(build_number) + 1
if build_number >= last_build_number:
raise UnknownError('build number returned is out of sequence, cannot be >= last build returned' + \
'{0}'.format(support_msg_api()))
last_build_number = build_number
if _['state'] == 'passed':
if build is None and not found_newer_passing_build:
log.warning("found more recent successful build #%s with state = '%s'" + \
", you may not need to debug this build any more", _['number'], _['state'])
found_newer_passing_build = True
elif _['state'] in ('failed', 'errored'):
if build is None:
build = _
# by continuing to iterate through the rest of the builds we can check
# their last_build numbers are descending for extra sanity checking
#break
if build is None:
qquit('UNKNOWN', 'no recent failed builds found' + \
', you may need to specify the --job-id explicitly as shown in the Travis CI UI')
if log.isEnabledFor(logging.DEBUG):
log.debug("latest failed build:\n%s", jsonpp(build))
return build
def get_failing_job_id_from_build(self, build):
log.info('getting failed job id for build #%s', build['number'])
if 'jobs' not in build:
raise UnknownError('no jobs field found in build, {0}'.format(support_msg_api))
for _ in build['jobs']:
_id = _['id']
url = 'https://api.travis-ci.org/jobs/{id}'.format(id=_id)
req = self.request_handler.get(url)
# if this raises ValueError it'll be caught by run handler
job = json.loads(req.content)
if log.isEnabledFor(logging.DEBUG):
log.debug("job id %s status:\n%s", _id, jsonpp(job))
if job['state'] == 'finished' and job['status'] in (None, 1, '1'):
return _id
raise UnknownError('no failed job found in build {0}'.format(build['number']))
def get_ssh_address(self, job_id):
log.info('getting SSH address from triggered debug build')
max_tries = int(self.timeout / 4)
for i in range(1, max_tries + 1):
log.info('try {0}/{1}: checking job log for ssh address...'.format(i, max_tries))
ssh_address = self.get_ssh_address_attempt(job_id=job_id)
if ssh_address:
return ssh_address
time.sleep(3)
if ssh_address is None:
raise CriticalError('ssh address not found in output from Travis API. {0}'.format(support_msg_api()))
def get_ssh_address_attempt(self, job_id):
#url = 'https://travis-ci.org/{repo}/jobs/{job_id}'.format(repo=repo, job_id=job_id)
url = 'https://api.travis-ci.org/jobs/{job_id}/log.txt?deansi=true'.format(job_id=job_id)
log.debug('GET %s' % url)
try:
req = requests.get(url)
except requests.exceptions.RequestException as _:
raise CriticalError(_)
log.debug("response: %s %s", req.status_code, req.reason)
log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80)
# Travis CI behaviour has changed from 200 with no content indicating build log empty, not started yet
# to now returning "500 Internal Server Error", content: "Sorry, we experienced an error."
if req.status_code == 500:
# don't output 500 it will confuse users in to thinking there is a real error which 500 usually indicates
#log.info('500 internal server error, build not started yet')
log.info('build not started yet')
return None
if req.status_code != 200:
error_message = self.parse_travis_error(req)
raise CriticalError('{0} {1}: {2}'.format(req.status_code, req.reason, error_message))
content = req.content
if not content:
log.info('build log empty, build not started yet')
return None
# find last non-blank line - do this after checking for no content otherwise will hit StopIteration
last_line = next(_ for _ in reversed(content.split('\n')) if _)
#log.debug('last line: %s', last_line)
# 'Done: Job Cancelled'
if 'Job Cancelled' in last_line:
raise CriticalError(last_line)
elif 'Your build has been stopped' in last_line:
raise CriticalError(last_line)
# Done. Your build exited with 0
elif 'build exited with' in last_line:
raise CriticalError(last_line)
# The build has been terminated
elif 'build has been terminated' in last_line:
raise CriticalError(last_line)
ssh_address = None
regex_ssh = re.compile(r'^\s*ssh\s+(\w+\@{host_regex})\s*$'.format(host_regex=host_regex), re.I)
for line in content.split('\n'):
match = regex_ssh.match(line)
if match:
ssh_address = match.group(1)
break
return ssh_address
if __name__ == '__main__':
TravisDebugSession().main()
| 46.816384
| 120
| 0.612442
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import logging
import os
import re
import sys
import time
import traceback
import git
try:
import requests
except ImportError:
print(traceback.format_exc(), end='')
sys.exit(4)
srcdir = os.path.abspath(os.path.dirname(__file__))
libdir = os.path.join(srcdir, 'pylib')
sys.path.append(libdir)
try:
from harisekhon.utils import prog, log, support_msg_api, jsonpp, qquit, isInt, isStr
from harisekhon.utils import CriticalError, UnknownError, code_error
from harisekhon.utils import validate_chars, validate_alnum, host_regex
from harisekhon import CLI
from harisekhon import RequestHandler
except ImportError as _:
print(traceback.format_exc(), end='')
sys.exit(4)
__author__ = 'Hari Sekhon'
__version__ = '0.9.1'
class TravisDebugSession(CLI):
def __init__(self):
super(TravisDebugSession, self).__init__()
self.timeout_default = 600
self.verbose_default = 2
self.job_id = None
self.travis_token = None
self.repo = None
self.headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Travis-API-Version': '3',
'User-Agent': prog
}
self.request_handler = RequestHandler()
def check_job_launch_response_code(self, req):
if req.status_code == 409:
error_message = self.parse_travis_error(req)
error_message += " (if you've just retriggered this you can avoid this error " + \
"using the --ignore-running switch)"
if self.get_opt('ignore_running'):
log.info('job already running (ignoring)')
else:
log.info('job already running')
raise CriticalError('{0} {1}: {2}'.format(req.status_code, req.reason, error_message))
elif req.status_code != 202:
error_message = self.parse_travis_error(req)
raise CriticalError("{0} {1}: {2}".format(req.status_code, req.reason, error_message))
def add_options(self):
self.add_opt('-J', '--job-id', default=os.getenv('JOB_ID'),
help='Travis Job ID to initiate the debug session ($JOB_ID)')
self.add_opt('-T', '--travis-token', default=os.getenv('TRAVIS_TOKEN'),
help='Travis token required to authenticate to the API ($TRAVIS_TOKEN)')
self.add_opt('-R', '--repo', default=os.getenv('TRAVIS_REPO'),
help='Travis CI repo to find last failed build and re-execute a job from it ($TRAVIS_REPO)' + \
', easier alternative to specifying a specific --job-id' + \
', convenient if working with the same repo over and over and don\'t want to copy the ' + \
'job id each time (--job-id takes priority if given as it\'s more specific). ' + \
'Be aware if running this quickly in succession you will get older and older failed ' + \
'builds as the last one will still be running, will only re-trigger finished failed builds')
self.add_opt('-i', '--ignore-running', action='store_true',
help='Ignore job already running error (avoids 409 error if you try to restart debug job)')
def process_options(self):
self.job_id = self.get_opt('job_id')
self.travis_token = self.get_opt('travis_token')
self.repo = self.get_opt('repo')
#if travis_token is None:
# self.usage('--travis-token option or ' +
# '$TRAVIS_TOKEN environment variable required to authenticate to the API')
if self.args:
# assume arg is a repo in form of HariSekhon/Nagios-Plugins but do not use url which we are more likely to
# have pasted a travis-ci url to a job, see a few lines further down
if '/' in self.args[0]:
if not self.repo:
log.info('using argument as --repo')
self.repo = self.args[0]
elif not self.job_id:
log.info('using argument as --job-id')
self.job_id = self.args[0]
if self.job_id:
# convenience to be able to lazily paste a URL like the following and still have it extract the job_id
# https://travis-ci.org/HariSekhon/Nagios-Plugins/jobs/283840596#L1079
self.job_id = self.job_id.split('/')[-1].split('
validate_chars(self.job_id, 'job id', '0-9')
elif self.repo:
self.repo = re.sub(r'https?://travis-ci\.org/', '', self.repo)
travis_user = os.getenv('TRAVIS_USER')
if '/' not in self.repo:
self.repo = '/' + self.repo
if self.repo[0] == '/' and travis_user:
self.repo = travis_user + self.repo
validate_chars(self.repo, 'repo', r'\/\w\.-')
else:
self.repo = self.get_local_repo_name()
if not self.repo:
self.usage('--job-id / --repo not specified')
validate_alnum(self.travis_token, 'travis token', is_secret=True)
self.headers['Authorization'] = 'token {0}'.format(self.travis_token)
@staticmethod
def get_local_repo_name():
try:
_ = git.Repo('.')
for remote in _.remotes:
for url in remote.urls:
repo = '/'.join(url.split('/')[-2:])
log.debug('determined repo to be {} from remotes'.format(repo))
return repo
except git.InvalidGitRepositoryError:
log.debug('failed to determine git repository locally: %s', _)
def run(self):
if not self.job_id:
if self.repo:
latest_failed_build = self.get_latest_failed_build()
self.job_id = self.get_failing_job_id_from_build(latest_failed_build)
else:
code_error('--job-id / --repo not specified, caught late')
if self.job_id is None:
raise UnknownError('no job id was found, aborting getting SSH address')
self.launch_job()
ssh_address = self.get_ssh_address(job_id=self.job_id)
log.info('Executing: ssh -- {0}'.format(ssh_address))
sys.stdout.flush()
sys.stderr.flush()
self.disable_timeout()
os.execvp('ssh', ['--', ssh_address])
def launch_job(self):
log.info('triggering debug job {job_id}'.format(job_id=self.job_id))
url = 'https://api.travis-ci.org/job/{job_id}/debug'.format(job_id=self.job_id)
self.request_handler.check_response_code = self.check_job_launch_response_code
self.request_handler.post(url, headers=self.headers)
@staticmethod
def parse_travis_error(req):
error_message = ''
try:
_ = json.loads(req.content)
error_message = _['error_message']
except ValueError:
if isStr(req.content) and len(req.content.split('\n')) == 1:
error_message = req.content
return error_message
def get_latest_failed_build(self):
log.info('getting latest failed build')
# gets 404 unless replacing the slash
url = 'https://api.travis-ci.org/repo/{repo}/builds'.format(repo=self.repo.replace('/', '%2F'))
# request returns blank without authorization header
req = self.request_handler.get(url, headers=self.headers)
if log.isEnabledFor(logging.DEBUG):
log.debug("\n%s", jsonpp(req.content))
try:
latest_build = self.parse_latest_failed_build(req.content)
except (KeyError, ValueError):
exception = traceback.format_exc().split('\n')[-2]
# this covers up the traceback info and makes it harder to debug
#raise UnknownError('failed to parse expected json response from Travis CI API: {0}'.format(exception))
qquit('UNKNOWN', 'failed to parse expected json response from Travis CI API: {0}. {1}'.
format(exception, support_msg_api()))
return latest_build
def parse_latest_failed_build(self, content):
log.debug('parsing latest failed build info')
build = None
json_data = json.loads(content)
if not json_data or \
'builds' not in json_data or \
not json_data['builds']:
qquit('UNKNOWN', "no Travis CI builds returned by the Travis API."
+ " Either the specified repo '{0}' doesn't exist".format(self.repo)
+ " or no builds have happened yet?"
+ " Also remember the repo is case sensitive, for example 'harisekhon/nagios-plugins' returns this"
+ " blank build set whereas 'HariSekhon/Nagios-Plugins' succeeds"
+ " in returning latest builds information"
)
builds = json_data['builds']
last_build_number = None
found_newer_passing_build = False
for _ in builds:
build_number = _['number']
if not isInt(build_number):
raise UnknownError('build number returned is not an integer!')
build_number = int(build_number)
if last_build_number is None:
last_build_number = int(build_number) + 1
if build_number >= last_build_number:
raise UnknownError('build number returned is out of sequence, cannot be >= last build returned' + \
'{0}'.format(support_msg_api()))
last_build_number = build_number
if _['state'] == 'passed':
if build is None and not found_newer_passing_build:
log.warning("found more recent successful build #%s with state = '%s'" + \
", you may not need to debug this build any more", _['number'], _['state'])
found_newer_passing_build = True
elif _['state'] in ('failed', 'errored'):
if build is None:
build = _
if build is None:
qquit('UNKNOWN', 'no recent failed builds found' + \
', you may need to specify the --job-id explicitly as shown in the Travis CI UI')
if log.isEnabledFor(logging.DEBUG):
log.debug("latest failed build:\n%s", jsonpp(build))
return build
def get_failing_job_id_from_build(self, build):
log.info('getting failed job id for build #%s', build['number'])
if 'jobs' not in build:
raise UnknownError('no jobs field found in build, {0}'.format(support_msg_api))
for _ in build['jobs']:
_id = _['id']
url = 'https://api.travis-ci.org/jobs/{id}'.format(id=_id)
req = self.request_handler.get(url)
job = json.loads(req.content)
if log.isEnabledFor(logging.DEBUG):
log.debug("job id %s status:\n%s", _id, jsonpp(job))
if job['state'] == 'finished' and job['status'] in (None, 1, '1'):
return _id
raise UnknownError('no failed job found in build {0}'.format(build['number']))
def get_ssh_address(self, job_id):
log.info('getting SSH address from triggered debug build')
max_tries = int(self.timeout / 4)
for i in range(1, max_tries + 1):
log.info('try {0}/{1}: checking job log for ssh address...'.format(i, max_tries))
ssh_address = self.get_ssh_address_attempt(job_id=job_id)
if ssh_address:
return ssh_address
time.sleep(3)
if ssh_address is None:
raise CriticalError('ssh address not found in output from Travis API. {0}'.format(support_msg_api()))
def get_ssh_address_attempt(self, job_id):
#url = 'https://travis-ci.org/{repo}/jobs/{job_id}'.format(repo=repo, job_id=job_id)
url = 'https://api.travis-ci.org/jobs/{job_id}/log.txt?deansi=true'.format(job_id=job_id)
log.debug('GET %s' % url)
try:
req = requests.get(url)
except requests.exceptions.RequestException as _:
raise CriticalError(_)
log.debug("response: %s %s", req.status_code, req.reason)
log.debug("content:\n%s\n%s\n%s", '=' * 80, req.content.strip(), '=' * 80)
# Travis CI behaviour has changed from 200 with no content indicating build log empty, not started yet
# to now returning "500 Internal Server Error", content: "Sorry, we experienced an error."
if req.status_code == 500:
# don't output 500 it will confuse users in to thinking there is a real error which 500 usually indicates
log.info('build not started yet')
return None
if req.status_code != 200:
error_message = self.parse_travis_error(req)
raise CriticalError('{0} {1}: {2}'.format(req.status_code, req.reason, error_message))
content = req.content
if not content:
log.info('build log empty, build not started yet')
return None
last_line = next(_ for _ in reversed(content.split('\n')) if _)
if 'Job Cancelled' in last_line:
raise CriticalError(last_line)
elif 'Your build has been stopped' in last_line:
raise CriticalError(last_line)
elif 'build exited with' in last_line:
raise CriticalError(last_line)
elif 'build has been terminated' in last_line:
raise CriticalError(last_line)
ssh_address = None
regex_ssh = re.compile(r'^\s*ssh\s+(\w+\@{host_regex})\s*$'.format(host_regex=host_regex), re.I)
for line in content.split('\n'):
match = regex_ssh.match(line)
if match:
ssh_address = match.group(1)
break
return ssh_address
if __name__ == '__main__':
TravisDebugSession().main()
| true
| true
|
1c487c37c3fcbb9af30f2325f91729c3ffc8cac4
| 286
|
py
|
Python
|
tests/_support/docstrings.py
|
uttamrc/invoke
|
61a580fc9919700305411e492f6fbfee7f4912dc
|
[
"BSD-2-Clause"
] | 3,187
|
2015-01-02T13:41:50.000Z
|
2022-03-28T19:22:49.000Z
|
tests/_support/docstrings.py
|
uttamrc/invoke
|
61a580fc9919700305411e492f6fbfee7f4912dc
|
[
"BSD-2-Clause"
] | 648
|
2015-01-02T23:13:21.000Z
|
2022-03-30T23:32:13.000Z
|
tests/_support/docstrings.py
|
uttamrc/invoke
|
61a580fc9919700305411e492f6fbfee7f4912dc
|
[
"BSD-2-Clause"
] | 347
|
2015-01-03T23:04:05.000Z
|
2022-03-25T17:35:24.000Z
|
from invoke import task
@task
def no_docstring(c):
pass
@task
def one_line(c):
"""foo
"""
@task
def two_lines(c):
"""foo
bar
"""
@task
def leading_whitespace(c):
"""
foo
"""
@task(aliases=("a", "b"))
def with_aliases(c):
"""foo
"""
| 8.666667
| 26
| 0.513986
|
from invoke import task
@task
def no_docstring(c):
pass
@task
def one_line(c):
@task
def two_lines(c):
@task
def leading_whitespace(c):
@task(aliases=("a", "b"))
def with_aliases(c):
| true
| true
|
1c487c83405b4cc960c8239970d09f8563952a6b
| 2,233
|
py
|
Python
|
Cura/Cura/plugins/PreviewStage/PreviewStage.py
|
TIAO-JI-FU/3d-printing-with-moveo-1
|
100ecfd1208fe1890f8bada946145d716b2298eb
|
[
"MIT"
] | null | null | null |
Cura/Cura/plugins/PreviewStage/PreviewStage.py
|
TIAO-JI-FU/3d-printing-with-moveo-1
|
100ecfd1208fe1890f8bada946145d716b2298eb
|
[
"MIT"
] | null | null | null |
Cura/Cura/plugins/PreviewStage/PreviewStage.py
|
TIAO-JI-FU/3d-printing-with-moveo-1
|
100ecfd1208fe1890f8bada946145d716b2298eb
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import os.path
from UM.Qt.QtApplication import QtApplication
from cura.Stages.CuraStage import CuraStage
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from UM.View.View import View
## Displays a preview of what you're about to print.
#
# The Python component of this stage just loads PreviewMain.qml for display
# when the stage is selected, and makes sure that it reverts to the previous
# view when the previous stage is activated.
class PreviewStage(CuraStage):
def __init__(self, application: QtApplication, parent = None) -> None:
super().__init__(parent)
self._application = application
self._application.engineCreatedSignal.connect(self._engineCreated)
self._previously_active_view = None # type: Optional[View]
## When selecting the stage, remember which was the previous view so that
# we can revert to that view when we go out of the stage later.
def onStageSelected(self) -> None:
self._previously_active_view = self._application.getController().getActiveView()
## Called when going to a different stage (away from the Preview Stage).
#
# When going to a different stage, the view should be reverted to what it
# was before. Normally, that just reverts it to solid view.
def onStageDeselected(self) -> None:
if self._previously_active_view is not None:
self._application.getController().setActiveView(self._previously_active_view.getPluginId())
self._previously_active_view = None
## Delayed load of the QML files.
#
# We need to make sure that the QML engine is running before we can load
# these.
def _engineCreated(self) -> None:
plugin_path = self._application.getPluginRegistry().getPluginPath(self.getPluginId())
if plugin_path is not None:
menu_component_path = os.path.join(plugin_path, "PreviewMenu.qml")
main_component_path = os.path.join(plugin_path, "PreviewMain.qml")
self.addDisplayComponent("menu", menu_component_path)
self.addDisplayComponent("main", main_component_path)
| 42.942308
| 103
| 0.717868
|
import os.path
from UM.Qt.QtApplication import QtApplication
from cura.Stages.CuraStage import CuraStage
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from UM.View.View import View
PreviewMain.qml for display
# when the stage is selected, and makes sure that it reverts to the previous
# view when the previous stage is activated.
class PreviewStage(CuraStage):
def __init__(self, application: QtApplication, parent = None) -> None:
super().__init__(parent)
self._application = application
self._application.engineCreatedSignal.connect(self._engineCreated)
self._previously_active_view = None # type: Optional[View]
## When selecting the stage, remember which was the previous view so that
# we can revert to that view when we go out of the stage later.
def onStageSelected(self) -> None:
self._previously_active_view = self._application.getController().getActiveView()
## Called when going to a different stage (away from the Preview Stage).
#
# When going to a different stage, the view should be reverted to what it
# was before. Normally, that just reverts it to solid view.
def onStageDeselected(self) -> None:
if self._previously_active_view is not None:
self._application.getController().setActiveView(self._previously_active_view.getPluginId())
self._previously_active_view = None
## Delayed load of the QML files.
#
# We need to make sure that the QML engine is running before we can load
# these.
def _engineCreated(self) -> None:
plugin_path = self._application.getPluginRegistry().getPluginPath(self.getPluginId())
if plugin_path is not None:
menu_component_path = os.path.join(plugin_path, "PreviewMenu.qml")
main_component_path = os.path.join(plugin_path, "PreviewMain.qml")
self.addDisplayComponent("menu", menu_component_path)
self.addDisplayComponent("main", main_component_path)
| true
| true
|
1c487dc61b9ee3171cbff46d329fc9b97936f78e
| 386
|
py
|
Python
|
profiles_api/urls.py
|
doglzz0806/profiles-rest-api
|
11f9ee0ee6e278570b1edab30e27d0a41382ffca
|
[
"MIT"
] | 1
|
2021-03-17T00:21:20.000Z
|
2021-03-17T00:21:20.000Z
|
profiles_api/urls.py
|
doglzz0806/profiles-rest-api
|
11f9ee0ee6e278570b1edab30e27d0a41382ffca
|
[
"MIT"
] | null | null | null |
profiles_api/urls.py
|
doglzz0806/profiles-rest-api
|
11f9ee0ee6e278570b1edab30e27d0a41382ffca
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from profiles_api import views
router = DefaultRouter()
router.register('hello-viewset', views.HelloViewSet, base_name='hello-viewset')
router.register('profile', views.UserProfileViewSet)
urlpatterns = [
path('hello-view/',views.HelloApiView.as_view()),
path('', include(router.urls))
]
| 27.571429
| 79
| 0.772021
|
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from profiles_api import views
router = DefaultRouter()
router.register('hello-viewset', views.HelloViewSet, base_name='hello-viewset')
router.register('profile', views.UserProfileViewSet)
urlpatterns = [
path('hello-view/',views.HelloApiView.as_view()),
path('', include(router.urls))
]
| true
| true
|
1c487e4f6dde2b90b006bc713d842ceeb8176a70
| 4,484
|
py
|
Python
|
aiotruenas_client/websockets/jail.py
|
colemamd/aiotruenas-client
|
26f754fcceadbacbfc87a19465d5a8d035e4bd00
|
[
"MIT"
] | 11
|
2020-12-30T00:33:01.000Z
|
2022-01-25T07:56:55.000Z
|
aiotruenas_client/websockets/jail.py
|
colemamd/aiotruenas-client
|
26f754fcceadbacbfc87a19465d5a8d035e4bd00
|
[
"MIT"
] | 35
|
2020-09-29T07:45:49.000Z
|
2022-03-29T15:02:52.000Z
|
aiotruenas_client/websockets/jail.py
|
colemamd/aiotruenas-client
|
26f754fcceadbacbfc87a19465d5a8d035e4bd00
|
[
"MIT"
] | 3
|
2020-12-30T18:19:03.000Z
|
2021-09-18T17:32:22.000Z
|
from __future__ import annotations
from typing import Any, Dict, List
from ..jail import Jail, JailStatus
from .interfaces import StateFetcher, WebsocketMachine
class CachingJail(Jail):
def __init__(self, fetcher: CachingJailStateFetcher, name: str) -> None:
super().__init__(name=name)
self._fetcher = fetcher
self._cached_state = self._state
async def start(self) -> bool:
"""Starts a stopped jail."""
return await self._fetcher.start_jail(self)
async def stop(self, force: bool = False) -> bool:
"""Stops a running jail."""
return await self._fetcher.stop_jail(self, force)
async def restart(self) -> bool:
"""Restarts a running jail."""
return await self._fetcher.restart_jail(self)
@property
def available(self) -> bool:
"""If the jail exists on the server."""
return self._name in self._fetcher._state # type: ignore
@property
def status(self) -> JailStatus:
"""The status of the jail."""
assert self.available
return JailStatus.fromValue(self._state["state"])
@property
def _state(self) -> Dict[str, Any]:
"""The state of the jail, according to the Machine."""
return self._fetcher.get_cached_state(self)
class CachingJailStateFetcher(StateFetcher):
def __init__(self, machine: WebsocketMachine) -> None:
self._parent = machine
self._state: Dict[str, Dict[str, Any]] = {}
self._cached_jails: List[CachingJail] = []
@classmethod
async def create(
cls,
machine: WebsocketMachine,
) -> CachingJailStateFetcher:
cjsf = CachingJailStateFetcher(machine=machine)
return cjsf
async def get_jails(self) -> List[CachingJail]:
"""Returns a list of jails on the host."""
self._state = await self._fetch_jails()
self._update_properties_from_state()
return self.jails
@property
def jails(self) -> List[CachingJail]:
"""Returns a list of jails on the host."""
return self._cached_jails
async def start_jail(self, jail: Jail) -> bool:
if jail.status != JailStatus.DOWN:
raise RuntimeError(f"Jail {jail.name} is already running.")
job_id = await self._parent.invoke_method(
"jail.start",
[jail.name],
)
job = await self._parent.wait_for_job(id=job_id)
if job.result:
self._state[jail.name]["state"] = JailStatus.UP.value
return job.result_or_raise_error
async def stop_jail(self, jail: Jail, force: bool = False) -> bool:
if jail.status != JailStatus.UP:
raise RuntimeError(f"Jail {jail.name} is not running.")
job_id = await self._parent.invoke_method("jail.stop", [jail.name, force])
job = await self._parent.wait_for_job(id=job_id)
if job.result:
self._state[jail.name]["state"] = JailStatus.DOWN.value
# Stop seems to return `None`, so check for that if we are not throwing.
return job.result_or_raise_error == None
async def restart_jail(self, jail: Jail) -> bool:
if jail.status != JailStatus.UP:
raise RuntimeError(f"Jail {jail.name} is not running.")
job_id = await self._parent.invoke_method("jail.restart", [jail.name])
job = await self._parent.wait_for_job(id=job_id)
# TODO: update cached state
return job.result_or_raise_error
def get_cached_state(self, jail: Jail) -> Dict[str, Any]:
return self._state[jail.name]
async def _fetch_jails(self) -> Dict[str, Dict[str, Any]]:
jails = await self._parent.invoke_method(
"jail.query",
[
[],
{
"select": [
"id",
"state",
],
},
],
)
return {jail["id"]: jail for jail in jails}
def _update_properties_from_state(self) -> None:
available_jails_by_name = {
jail.name: jail for jail in self._cached_jails if jail.available
}
current_jail_names = {jail_name for jail_name in self._state}
jail_names_to_add = current_jail_names - set(available_jails_by_name)
self._cached_jails = [*available_jails_by_name.values()] + [
CachingJail(fetcher=self, name=jail_name) for jail_name in jail_names_to_add
]
| 34.75969
| 88
| 0.615076
|
from __future__ import annotations
from typing import Any, Dict, List
from ..jail import Jail, JailStatus
from .interfaces import StateFetcher, WebsocketMachine
class CachingJail(Jail):
def __init__(self, fetcher: CachingJailStateFetcher, name: str) -> None:
super().__init__(name=name)
self._fetcher = fetcher
self._cached_state = self._state
async def start(self) -> bool:
return await self._fetcher.start_jail(self)
async def stop(self, force: bool = False) -> bool:
return await self._fetcher.stop_jail(self, force)
async def restart(self) -> bool:
return await self._fetcher.restart_jail(self)
@property
def available(self) -> bool:
return self._name in self._fetcher._state
@property
def status(self) -> JailStatus:
assert self.available
return JailStatus.fromValue(self._state["state"])
@property
def _state(self) -> Dict[str, Any]:
return self._fetcher.get_cached_state(self)
class CachingJailStateFetcher(StateFetcher):
def __init__(self, machine: WebsocketMachine) -> None:
self._parent = machine
self._state: Dict[str, Dict[str, Any]] = {}
self._cached_jails: List[CachingJail] = []
@classmethod
async def create(
cls,
machine: WebsocketMachine,
) -> CachingJailStateFetcher:
cjsf = CachingJailStateFetcher(machine=machine)
return cjsf
async def get_jails(self) -> List[CachingJail]:
self._state = await self._fetch_jails()
self._update_properties_from_state()
return self.jails
@property
def jails(self) -> List[CachingJail]:
return self._cached_jails
async def start_jail(self, jail: Jail) -> bool:
if jail.status != JailStatus.DOWN:
raise RuntimeError(f"Jail {jail.name} is already running.")
job_id = await self._parent.invoke_method(
"jail.start",
[jail.name],
)
job = await self._parent.wait_for_job(id=job_id)
if job.result:
self._state[jail.name]["state"] = JailStatus.UP.value
return job.result_or_raise_error
async def stop_jail(self, jail: Jail, force: bool = False) -> bool:
if jail.status != JailStatus.UP:
raise RuntimeError(f"Jail {jail.name} is not running.")
job_id = await self._parent.invoke_method("jail.stop", [jail.name, force])
job = await self._parent.wait_for_job(id=job_id)
if job.result:
self._state[jail.name]["state"] = JailStatus.DOWN.value
return job.result_or_raise_error == None
async def restart_jail(self, jail: Jail) -> bool:
if jail.status != JailStatus.UP:
raise RuntimeError(f"Jail {jail.name} is not running.")
job_id = await self._parent.invoke_method("jail.restart", [jail.name])
job = await self._parent.wait_for_job(id=job_id)
return job.result_or_raise_error
def get_cached_state(self, jail: Jail) -> Dict[str, Any]:
return self._state[jail.name]
async def _fetch_jails(self) -> Dict[str, Dict[str, Any]]:
jails = await self._parent.invoke_method(
"jail.query",
[
[],
{
"select": [
"id",
"state",
],
},
],
)
return {jail["id"]: jail for jail in jails}
def _update_properties_from_state(self) -> None:
available_jails_by_name = {
jail.name: jail for jail in self._cached_jails if jail.available
}
current_jail_names = {jail_name for jail_name in self._state}
jail_names_to_add = current_jail_names - set(available_jails_by_name)
self._cached_jails = [*available_jails_by_name.values()] + [
CachingJail(fetcher=self, name=jail_name) for jail_name in jail_names_to_add
]
| true
| true
|
1c4880339b3def518db0fff6ca51864258ccf0db
| 1,059
|
py
|
Python
|
cari-pakar web/caripakar_app/migrations/0020_auto_20180706_0736.py
|
eightfold28/Expert-Finding
|
09ebbad9162eb4c3481eb98d5f6a47e37820be6f
|
[
"MIT"
] | null | null | null |
cari-pakar web/caripakar_app/migrations/0020_auto_20180706_0736.py
|
eightfold28/Expert-Finding
|
09ebbad9162eb4c3481eb98d5f6a47e37820be6f
|
[
"MIT"
] | 5
|
2021-03-31T19:02:32.000Z
|
2022-03-02T14:56:02.000Z
|
cari-pakar web/caripakar_app/migrations/0020_auto_20180706_0736.py
|
eightfold28/Expert-Finding
|
09ebbad9162eb4c3481eb98d5f6a47e37820be6f
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.6 on 2018-07-06 07:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('caripakar_app', '0019_auto_20180705_2120'),
]
operations = [
migrations.AlterField(
model_name='dosenskor',
name='skor_pendidikan',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='dosenskor',
name='skor_penelitian',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='dosenskor',
name='skor_pengajaran',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='jabatanfungsional',
name='skor_jabatanfungsional',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='jenjang',
name='skor_jenjang',
field=models.FloatField(null=True),
),
]
| 27.153846
| 53
| 0.570349
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('caripakar_app', '0019_auto_20180705_2120'),
]
operations = [
migrations.AlterField(
model_name='dosenskor',
name='skor_pendidikan',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='dosenskor',
name='skor_penelitian',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='dosenskor',
name='skor_pengajaran',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='jabatanfungsional',
name='skor_jabatanfungsional',
field=models.FloatField(null=True),
),
migrations.AlterField(
model_name='jenjang',
name='skor_jenjang',
field=models.FloatField(null=True),
),
]
| true
| true
|
1c4880371ff1c3a6905dd35eaf4f8627f756e035
| 205
|
py
|
Python
|
tests/test_dummy.py
|
tyty999/py3startapp
|
199766f7b7a96fd16cb04a58eb381946337b568f
|
[
"MIT"
] | 13
|
2018-10-10T03:31:13.000Z
|
2022-03-27T22:44:37.000Z
|
tests/test_dummy.py
|
tyty999/py3startapp
|
199766f7b7a96fd16cb04a58eb381946337b568f
|
[
"MIT"
] | 6
|
2019-11-16T17:11:27.000Z
|
2021-05-30T12:33:11.000Z
|
tests/test_dummy.py
|
tyty999/py3startapp
|
199766f7b7a96fd16cb04a58eb381946337b568f
|
[
"MIT"
] | 2
|
2020-10-02T07:01:12.000Z
|
2021-08-06T08:21:31.000Z
|
import unittest
from app import dummy
class DummyTest(unittest.TestCase):
"""Replace this with a real unit test class."""
def test_dummy(self):
self.assertEqual('dummy', dummy.dummy())
| 18.636364
| 51
| 0.692683
|
import unittest
from app import dummy
class DummyTest(unittest.TestCase):
def test_dummy(self):
self.assertEqual('dummy', dummy.dummy())
| true
| true
|
1c4880dea3d807aef22bc60eb07879ed3ad7d5c1
| 2,130
|
py
|
Python
|
lib/node_modules/@stdlib/random/base/randu/benchmark/python/benchmark.py
|
andbmme/stdlib
|
c1994db25727c32cd0065cdc5f0c019dd4fc855c
|
[
"Apache-2.0"
] | 1
|
2020-03-02T15:44:24.000Z
|
2020-03-02T15:44:24.000Z
|
lib/node_modules/@stdlib/random/base/randu/benchmark/python/benchmark.py
|
andbmme/stdlib
|
c1994db25727c32cd0065cdc5f0c019dd4fc855c
|
[
"Apache-2.0"
] | null | null | null |
lib/node_modules/@stdlib/random/base/randu/benchmark/python/benchmark.py
|
andbmme/stdlib
|
c1994db25727c32cd0065cdc5f0c019dd4fc855c
|
[
"Apache-2.0"
] | 1
|
2020-11-23T03:46:24.000Z
|
2020-11-23T03:46:24.000Z
|
#!/usr/bin/env python
#
# @license Apache-2.0
#
# Copyright (c) 2018 The Stdlib Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Benchmark random."""
from __future__ import print_function
import timeit
NAME = "random"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from random import random;"
stmt = "y = random()"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(REPEATS):
print("# python::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| 21.734694
| 74
| 0.635211
|
from __future__ import print_function
import timeit
NAME = "random"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
print("TAP version 13")
def print_summary(total, passing):
print("#")
print("1.." + str(total))
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
setup = "from random import random;"
stmt = "y = random()"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(REPEATS):
print("# python::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
benchmark()
if __name__ == "__main__":
main()
| true
| true
|
1c48820a1288bf08c53464e11ff1f0382ae8b770
| 4,907
|
py
|
Python
|
bilibili/data_extractor.py
|
DANancy/Web-Scraper-Starter
|
bfde0c67dd004bd065f084b57040ed644bfab2fd
|
[
"MIT"
] | 1
|
2020-04-26T05:27:55.000Z
|
2020-04-26T05:27:55.000Z
|
bilibili/data_extractor.py
|
DANancy/Web-Scraper-Starter
|
bfde0c67dd004bd065f084b57040ed644bfab2fd
|
[
"MIT"
] | null | null | null |
bilibili/data_extractor.py
|
DANancy/Web-Scraper-Starter
|
bfde0c67dd004bd065f084b57040ed644bfab2fd
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# load .env variables
import os
from dotenv import load_dotenv
from pathlib import Path
# load libs
import sys
import time
import random
from datetime import datetime
import re
import requests
from pymongo import MongoClient
# load self-defined modules
import bilibili.helper as h
import proxy.proxy_manager as PM
def get_urls(starttime, endtime, h_dict, c_dict):
pageNum = 1
urllst = []
testTotal = 1000
while True:
time.sleep(random.random())
r = h.api_call(pageNum, starttime, endtime, h_dict, c_dict)
# print(r)
totalPages = r['numPages']
pageSize = r['pagesize']
for j in range(pageSize):
url = r['result'][j]['arcurl']
urllst.append(url)
if (pageNum < min(totalPages, testTotal)):
pageNum += 1
else:
break
return urllst
def get_info(starttime, endtime, h_dict, c_dict, table):
pageNum = 1
testTotal = 1000
while True:
time.sleep(random.random())
r = h.api_call(pageNum, starttime, endtime, h_dict, c_dict)
totalPages = r['numPages']
pageSize = r['pagesize']
n = 0
for j in range(pageSize):
infos = {}
infos['url'] = r['result'][j]['arcurl']
infos['title'] = r['result'][j]['title']
infos['video_id'] = r['result'][j]['id']
infos['type'] = r['result'][j]['type']
infos['tag'] = r['result'][j]['tag']
infos['video_review'] = r['result'][j]['video_review']
infos['is_pay'] = r['result'][j]['is_pay']
infos['description'] = r['result'][j]['description']
infos['play'] = r['result'][j]['play']
infos['favorites'] = r['result'][j]['favorites']
infos['rank_score'] = r['result'][j]['rank_score']
infos['duration'] = r['result'][j]['duration']
infos['pubdate'] = datetime.strptime(r['result'][j]['pubdate'] + ':+0800',
'%Y-%m-%d %H:%M:%S:%z').isoformat()
infos['author'] = r['result'][j]['author']
infos['insert time'] = datetime.now().isoformat()
print(infos)
n += 1
table.insert_one(infos)
if (pageNum < min(totalPages, testTotal)):
pageNum += 1
else:
break
return n
def get_danmaku(search_url, h_dict, c_dict, table):
r = requests.get(url=search_url, headers=h_dict, cookies=c_dict, proxies=PM.myProxy.get_proxy())
if r == 200:
r.encoding = r.apparent_encoding
cid = re.search(r'"cid":(\d*)', r.text).group(1)
danmaku_url = "https://comment.bilibili.com/{}.xml".format(cid)
r2 = requests.get(danmaku_url)
r2.encoding = r2.apparent_encoding
n = 0
items = re.findall(r'<d p=.*?</d>', r2.text)
for i in items:
details = {}
details['video_id'] = re.search(r'av(\d.*)', search_url).group(1)
details['cid'] = cid
details['comment'] = re.search(r'>(.*)</d>', i).group(1)
details['other info'] = re.search(r'<d p="(.*)"', i).group(1)
details['Insert Time'] = datetime.now().isoformat()
print(details)
table.insert_one(details)
n += 1
return n
if __name__ == "__main__":
env_path = Path('..') / '.env'
load_dotenv(dotenv_path=env_path)
starttime = input("Start Time: ")
endtime = input("End Time: ")
url = "https://www.bilibili.com/v/douga/mad/?spm_id_from=333.5.b_646f7567615f6d6164.38#/all/click/0/1"
h_dict = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.92 Safari/537.36'}
cookies = os.getenv('BILICOOKIES')
c_dict = h.get_cookies(cookies)
myClient = MongoClient(os.getenv("DBCONNECT"))
db = myClient['bilibili']
table_infos = db['infos']
table_details = db['details']
urls = get_urls(starttime, endtime, h_dict, c_dict)
try:
timestart = time.time()
count = 0
count += get_info(starttime, endtime, h_dict, c_dict, table_infos)
print("Insert {} videos with {}s".format(count, (time.time() - timestart)))
except ValueError as err:
print(err)
except PM.NoProxyException as err:
print(err)
except:
print("Unexpected error:", sys.exc_info())
for u in urls:
count = 0
time.sleep(random.random())
try:
timestart = time.time()
count += get_danmaku(u, h_dict, c_dict, table_details)
print("Insert {} danmakus with {}s".format(count, (time.time() - timestart)))
except ValueError as err:
print(err)
except PM.NoProxyException as err:
print(err)
except:
print("Unexpected error:", sys.exc_info())
| 32.713333
| 139
| 0.567557
|
import os
from dotenv import load_dotenv
from pathlib import Path
import sys
import time
import random
from datetime import datetime
import re
import requests
from pymongo import MongoClient
import bilibili.helper as h
import proxy.proxy_manager as PM
def get_urls(starttime, endtime, h_dict, c_dict):
pageNum = 1
urllst = []
testTotal = 1000
while True:
time.sleep(random.random())
r = h.api_call(pageNum, starttime, endtime, h_dict, c_dict)
totalPages = r['numPages']
pageSize = r['pagesize']
for j in range(pageSize):
url = r['result'][j]['arcurl']
urllst.append(url)
if (pageNum < min(totalPages, testTotal)):
pageNum += 1
else:
break
return urllst
def get_info(starttime, endtime, h_dict, c_dict, table):
pageNum = 1
testTotal = 1000
while True:
time.sleep(random.random())
r = h.api_call(pageNum, starttime, endtime, h_dict, c_dict)
totalPages = r['numPages']
pageSize = r['pagesize']
n = 0
for j in range(pageSize):
infos = {}
infos['url'] = r['result'][j]['arcurl']
infos['title'] = r['result'][j]['title']
infos['video_id'] = r['result'][j]['id']
infos['type'] = r['result'][j]['type']
infos['tag'] = r['result'][j]['tag']
infos['video_review'] = r['result'][j]['video_review']
infos['is_pay'] = r['result'][j]['is_pay']
infos['description'] = r['result'][j]['description']
infos['play'] = r['result'][j]['play']
infos['favorites'] = r['result'][j]['favorites']
infos['rank_score'] = r['result'][j]['rank_score']
infos['duration'] = r['result'][j]['duration']
infos['pubdate'] = datetime.strptime(r['result'][j]['pubdate'] + ':+0800',
'%Y-%m-%d %H:%M:%S:%z').isoformat()
infos['author'] = r['result'][j]['author']
infos['insert time'] = datetime.now().isoformat()
print(infos)
n += 1
table.insert_one(infos)
if (pageNum < min(totalPages, testTotal)):
pageNum += 1
else:
break
return n
def get_danmaku(search_url, h_dict, c_dict, table):
r = requests.get(url=search_url, headers=h_dict, cookies=c_dict, proxies=PM.myProxy.get_proxy())
if r == 200:
r.encoding = r.apparent_encoding
cid = re.search(r'"cid":(\d*)', r.text).group(1)
danmaku_url = "https://comment.bilibili.com/{}.xml".format(cid)
r2 = requests.get(danmaku_url)
r2.encoding = r2.apparent_encoding
n = 0
items = re.findall(r'<d p=.*?</d>', r2.text)
for i in items:
details = {}
details['video_id'] = re.search(r'av(\d.*)', search_url).group(1)
details['cid'] = cid
details['comment'] = re.search(r'>(.*)</d>', i).group(1)
details['other info'] = re.search(r'<d p="(.*)"', i).group(1)
details['Insert Time'] = datetime.now().isoformat()
print(details)
table.insert_one(details)
n += 1
return n
if __name__ == "__main__":
env_path = Path('..') / '.env'
load_dotenv(dotenv_path=env_path)
starttime = input("Start Time: ")
endtime = input("End Time: ")
url = "https://www.bilibili.com/v/douga/mad/?spm_id_from=333.5.b_646f7567615f6d6164.38#/all/click/0/1"
h_dict = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.92 Safari/537.36'}
cookies = os.getenv('BILICOOKIES')
c_dict = h.get_cookies(cookies)
myClient = MongoClient(os.getenv("DBCONNECT"))
db = myClient['bilibili']
table_infos = db['infos']
table_details = db['details']
urls = get_urls(starttime, endtime, h_dict, c_dict)
try:
timestart = time.time()
count = 0
count += get_info(starttime, endtime, h_dict, c_dict, table_infos)
print("Insert {} videos with {}s".format(count, (time.time() - timestart)))
except ValueError as err:
print(err)
except PM.NoProxyException as err:
print(err)
except:
print("Unexpected error:", sys.exc_info())
for u in urls:
count = 0
time.sleep(random.random())
try:
timestart = time.time()
count += get_danmaku(u, h_dict, c_dict, table_details)
print("Insert {} danmakus with {}s".format(count, (time.time() - timestart)))
except ValueError as err:
print(err)
except PM.NoProxyException as err:
print(err)
except:
print("Unexpected error:", sys.exc_info())
| true
| true
|
1c48833ff1861f379326aee5c2cd912a1565f34c
| 57,720
|
py
|
Python
|
tools/lambda-gen/microGateway.py
|
brandonkgarner/CEDAR
|
2fb1ac69d1b888214e03b7cd8e756a44cbb9cd2c
|
[
"MIT"
] | 2
|
2020-10-22T20:04:28.000Z
|
2021-01-16T15:59:26.000Z
|
tools/lambda-gen/microGateway.py
|
brandonkgarner/CEDAR
|
2fb1ac69d1b888214e03b7cd8e756a44cbb9cd2c
|
[
"MIT"
] | null | null | null |
tools/lambda-gen/microGateway.py
|
brandonkgarner/CEDAR
|
2fb1ac69d1b888214e03b7cd8e756a44cbb9cd2c
|
[
"MIT"
] | 1
|
2021-02-25T17:40:12.000Z
|
2021-02-25T17:40:12.000Z
|
# This code is used to create Ansible files for deploying Lambda's
# all that is needed is a target Lambda, tests, and it will do the rest.
# finds associate roles and policies
# creates Ansible modules based on those policies and roles
# defines the Lambdas and creates them with tests
# finds api-gateways or other events
# if api found defines the security needed. creates modules for deployment with templates
import re
import os
# import ValueError
# import time
# import random
# from time import sleep
# from datetime import datetime, date
import json
# import shutil
# import boto3
from botocore.exceptions import ClientError
# import sys
from shutil import copyfile
# import fileinput
# import logging
# import urllib
import distutils
# from distutils import dir_util
# import awsconnect
# from awsconnect import awsConnect
from microUtils import writeYaml, account_replace, loadServicesMap, ansibleSetup, serviceID
from microUtils import describe_role, roleCleaner
# sudo ansible-playbook -i windows-servers CR-Admin-Users.yml -vvvv
dir_path = os.path.dirname(__file__)
class ApiGatewayMolder():
origin = None
temp = None
def Cooker(self, target):
lambda_describe(target)
def __init__(self, directory, islambda=False):
global dir_path
if not islambda:
temp = "%s/%s" % (dir_path, directory)
else:
temp = '/tmp'
self.temp = temp
if not os.path.exists(temp):
os.makedirs(temp)
else:
print(" directory %s already exists... remove or change name." % temp)
#############################################
# ##### [ USAGE PLAN KEYs ]#################
#############################################
#############################################
def describe_apiKey(self, client, apiKey, position=None):
rlist = [] # client.get_api_key(apiKey='9j5xhi7ene')
if position is None:
response = client.get_api_key(apiKey=apiKey, limit=500)
else:
response = client.get_api_key(
apiKey=apiKey, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_apiKey(client, apiKey, response['position'])
keys = baseList + rlist
return keys
def keysMapAPI(self, client, baseList):
pass
# for base in baseList:
def describe_planKey(self, client, usagePlanID, position=None):
rlist = []
if position is None:
response = client.get_usage_plan_keys(
usagePlanId=usagePlanID, limit=500)
else:
response = client.get_usage_plan_keys(
usagePlanId=usagePlanID, position=position, limit=500)
temp = response['items']
baseList = self.keysMapAPI(client, temp)
if "position" in response:
rlist = self.describe_planKey(
client, usagePlanID, response['position'])
keys = baseList + rlist
return keys
def describe_apiUsage(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_usage_plans(keyId=restApiId, limit=500)
else:
response = client.get_usage_plans(
keyId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_apiUsage(
client, restApiId, response['position'])
usagePlans = baseList + rlist
return usagePlans
def put_UsagePlan(self, client, usagePlans):
for plans in usagePlans:
# delete first if exists... or ignore....
# response = client.delete_usage_plan(
# usagePlanId='string'
# )
client.create_usage_plan(name=plans['name'], description=plans['description'],
apiStages=plans['apiStages'],
throttle=plans['throttle'],
quota=plans['quota']
)
def put_PlanKeys(self, client, keys, usagePlanId):
for key in keys:
# delete first if exists... or ignore....
# response = client.delete_usage_plan(
# usagePlanId='string'
# )
response = client.create_usage_plan_key(usagePlanId=usagePlanId, keyId=key['id'],
keyType=key['type']
)
def put_APIKeys(self, client, keys, usagePlanId):
for key in keys:
response = client.create_api_key(name=key['name'],
description=key['description'],
enabled=key['enabled'],
generateDistinctId=key['generateDistinctId'],
value=key['value'],
# customerId=key['customerId']
)
#############################################
###### [ USAGE PLAN KEYs ]#################
#############################################
#############################################
#############################################
###### [ API AUTHORIZERS ]#################
#############################################
#############################################
def describe_authorizers(self, client, restApiId, name, auths):
auth_list = self.describe_Allauths(client, restApiId)
aList = []
for auth in auth_list:
auth.update({'state': 'present', 'apiName': name})
auths.append(auth)
# auths.update({apiStage:{'stage':stageLabel,'api':apiName, 'state':'present'}})
return auths
def describe_Allauths(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_authorizers(restApiId=restApiId, limit=500)
else:
response = client.get_authorizers(
restApiId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_Allmodels(
client, restApiId, response['position'])
models = baseList + rlist
return models
#############################################
#### E N D [ API AUTHORIZERS ]##############
#############################################
#############################################
#############################################
###### [ API MODELS ]#################
#############################################
#############################################
def describe_Allmodels(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_models(restApiId=restApiId, limit=500)
else:
response = client.get_models(
restApiId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_Allmodels(
client, restApiId, response['position'])
models = baseList + rlist
return models
def get_attrModel(self, client, target, restApiId, init=False):
rawSchema = client.get_model(
restApiId=restApiId, modelName=target, flatten=True)['schema']
rr = json.loads(rawSchema)
mType = rr['type']
# print(" - - - - START attrModel - - - - -")
# print(rr)
if 'properties' in rr:
response = rr['properties']
else:
response = rr['items']
definitions = rr['definitions']
oFinal = {}
for k, v in response.items():
value = v
name = k
print(" key: %s, value: %s" % (k, v))
if "$ref" in k:
vmodel = os.path.basename(v)
oValue = {'items': None}
value = self.get_attrModel(client, vmodel, restApiId)
name = target
oValue['items'] = value
# print(" - - - - START attrModel - - - -%s -"%target)
# print(response)
# print(oValue)
# raise
if init:
return {'type': mType, 'items': value['items']}
elif len(v) > 1 and type(v) is dict:
# 'type': 'object', 'additionalProperties': {'$ref': '#/definitions/address'}}
print(v)
oType = v['type']
# if 'items' in v:
for oK, oV in v.items():
if 'type' in oK:
continue
# itemKey = oK
# oValue=v['items']
if "$ref" in oV:
vmodel = os.path.basename(oV['$ref'])
value = self.get_attrModel(client, vmodel, restApiId)
# oValue['items']=value
oFinal.update({name: {'type': oType, 'items': oV}})
oFinal.update({name: value})
return {'type': mType, 'items': oFinal}
def describe_modelInTarget(self, client, apiName, target, svcType='dynamodb'):
apis = self.getAll_rest_apis(client)
restApiId = None
for api in apis:
name = api['name']
if name in apiName:
restApiId = api['id']
break
if restApiId is None:
return None
mapped = None
if svcType in 'dynamodb':
obj = {}
begin = ""
mapped = self.get_attrModel(client, target, restApiId, True)
# print("--------COMPLETED----------")
# print(" ----------------------")
# print(" ----------------")
# print(mapped)
return mapped
# def models_filter(self,models):
# return models
def describe_models(self, client, restApiId, name, models):
model_list = self.describe_Allmodels(client, restApiId)
print("START")
print(restApiId)
basic = []
advanced = []
advTemp = []
models.update({'basic': basic, 'dependant': advanced})
basicNames = []
for model in model_list:
rawSchema = model['schema'].replace(
restApiId, "%s_id" % (name)) # gets converted in real time
# model['schema']=model['schema'].replace("\n", "" ).replace( "\\", "" )
# model['schema'] =model['schema'].replace('{', "{'$schema':'http://json-schema.org/draft-04/schema#', 'title': '%s' ,"%(model['name']), 1)
model['schema'] = json.loads(rawSchema)
model.update({'api': name, 'state': 'present'})
apiModel = "%s_%s" % (name, model['name'])
if '$ref' in rawSchema:
advTemp.append(model)
else:
basic.append(model)
basicNames.append(model['name'])
# models.update({apiModel:model})
# print(advTemp)
# raise ValueError('A very specific bad thing happened.')
# print("-------000-----~~~----~~~----~~~")
# print(advTemp)
# print("-------000b-----~~~----~~~----~~~")
advTemp = self.relocRefsOnDependencies(advTemp, basicNames)
advanced.extend(advTemp)
# raise
# advanced=advTemp
print(advanced)
print("-------000b-----~~~----~~~----~~~")
# print(models)
# print("--------001----~~~----~~~----~~~")
# raise ValueError('A very specific bad thing happened.')
return models
def describeRefs(self, key, value):
refs = []
if 'type' in key:
return None
if isinstance(value, (list, tuple, dict)):
# print("%s:%s"%(key, value))
if '$ref' in value:
refs.append(value['$ref'])
else:
for k, v in value.items():
dd = self.describeRefs(k, v)
if dd:
refs = refs + dd
else:
return None
return refs
##################################################
####### BELOW REPOSITION ########################
###### BASED ON DEPENDENCIES ##################
##################################################
def modelRefs(self, schema):
refs = []
if 'properties' in schema:
for k, v in schema['properties'].items():
tempRefs = self.describeRefs(k, v)
if tempRefs is None:
continue
for ref in tempRefs:
refs.append(os.path.basename(ref))
elif 'items' in schema:
for k, v in schema['items'].items():
if "$ref" in k:
refs.append(os.path.basename(v))
if refs:
refs = list(set(refs))
return refs
#
def relocRefsOnDependencies(self, array, basicNames):
additionali = 0
for i, adv in enumerate(array):
if additionali > 0:
i = additionali + i
if i >= len(array):
break
# print("--= %s"%( array[i] ) )
sSchema = array[i]['schema']
if 'properties' in sSchema or 'items' in sSchema:
refsIn = []
model = array[i]
IName = model['name']
if 'items' in sSchema:
refsIn.append(os.path.basename(sSchema['items']['$ref']))
else:
props = sSchema['properties']
# print(props)
for k, v in props.items():
refs = self.describeRefs(k, v)
if refs is None:
continue
for ref in refs:
refM = os.path.basename(ref)
refsIn.append(refM)
if refsIn:
for rName in refsIn:
if rName in basicNames:
print(" --: continue %s" % (rName))
continue
else:
for num, atemp in enumerate(array):
# aprops = atemp['schema']#['properties']
mname = atemp['name']
# print(mname)
if mname == rName: # found match for reference
if num > i: # reference is AFTER i and MUST be before
additionali = additionali + 1
del array[num]
# Item now just before reference
array.insert(i, atemp)
if 'client' in mname and 'clients' in IName:
print(i)
print(num)
print(model)
print("### client &*&*&*&****&&&&&")
# raise
if additionali > 0:
print("additional found ... rerun")
self.relocRefsOnDependencies(array, basicNames)
return array
##################################################
####### ABOVE REPOSITION ########################
###### BASED ON DEPENDENCIES ##################
##################################################
def put_models(self, client, models):
modelsAdded = []
for model in models:
# current= client.get_model(restApiId=restApiId,modelName=modelName)
# if model.name == current.modelName:
# delete model first
modelName = model.name
try:
response = client.delete_model(
restApiId=model.restApiId, modelName=modelName)
print("[W] found MODEL %s and deleted..." % modelName)
except ClientError as e:
print(" -[W]- NOT found MODEL %s .." % modelName)
print(e.response['Error']['Message'])
try:
response = client.create_model(restApiId=model.restApiId, name=modelName, description=model.description,
schema=model.schema, contentType=model.contentType)
modelsAdded.append()
except ClientError as e:
print(e.response['Error']['Message'])
return modelsAdded
#############################################
##### [ API MODELS ]#### ABOVE ##########
#############################################
#############################################
def put_resources(self, client, resources):
modelsAdded = []
for model in models:
# current= client.get_model(restApiId=restApiId,modelName=modelName)
# if model.name == current.modelName:
# delete model first
modelName = model.name
try:
response = client.delete_model(
restApiId=model.restApiId, modelName=modelName)
print("[W] found MODEL %s and deleted..." % modelName)
except ClientError as e:
print(" -[W]- NOT found MODEL %s .." % modelName)
print(e.response['Error']['Message'])
try:
response = client.create_model(restApiId=model.restApiId, name=modelName, description=model.description,
schema=model.schema, contentType=model.contentType)
modelsAdded.append()
except ClientError as e:
print(e.response['Error']['Message'])
return modelsAdded
def describe_stages(self, client, apiID, apiName, stages):
# client = aconnect.__get_client__('apigateway')
# client = boto3.client('apigateway')
usage = client.get_usage_plans()['items']
for use in usage:
for stage in use['apiStages']:
if apiID in stage['apiId']:
stageLabel = stage['stage']
apiStage = "%s_%s" % (apiName, stageLabel)
if apiStage in stages:
continue
stages.update(
{apiStage: {'stage': stageLabel, 'api': apiName, 'state': 'present'}})
return stages
def getAllResources(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_resources(restApiId=restApiId, limit=500)
else:
response = client.get_resources(
restApiId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.getAllResources(
client, restApiId, response['position'])
final = baseList + rlist
return final
def getAll_rest_apis(self, client, position=None):
rlist = []
if position is None:
response = client.get_rest_apis(limit=500)
else:
response = client.get_rest_apis(position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.getAll_rest_apis(client, response['position'])
final = baseList + rlist
return final
# get api, resource, method, integration and responses, models
def describe_gateway(self, resourceNname, resourceType, aconnect, resourceRole=None, targetAPI=None):
DEBUG = False
client = aconnect.__get_client__('apigateway')
# client = boto3.client('apigateway')
# apis=client.get_rest_apis()
apis = self.getAll_rest_apis(client)
integratedAPIs = []
stages = {}
models = {}
auths = []
addedResource = {}
possibleOptions = {}
print("*********************************************************************")
print(" API GATEWAY ")
print("*********************************************************************")
print(apis)
print("*********************************************************************")
print(targetAPI)
GREEDY = False
if '*' == resourceNname:
GREEDY = True
# for api in apis['items']:
for api in apis:
id = api['id']
name = api['name']
if targetAPI is not None:
if name.lower() != targetAPI.lower():
continue
# resources = client.get_resources( restApiId=id,limit=500)
resources = self.getAllResources(client, id)
final_MODELS = {'basic': [], 'dependant': []}
final_AUTHS = []
# api id, stage id, model id
stages = self.describe_stages(client, id, name, stages)
models = self.describe_models(client, id, name, models)
auths = self.describe_authorizers(client, id, name, auths)
if GREEDY:
final_MODELS = models
final_AUTHS = auths
print(
" API GATEWAY resources ")
print(" resources %s " % resources)
# for rest in resources['items']:
for rest in resources:
path = rest['path']
mId = rest['id']
parentId = None
if 'parentId' in rest:
parentId = rest['parentId']
if 'resourceMethods' not in rest:
parent = rest
continue
if "xxx/" in path:
print(path)
DEBUG = True
else:
DEBUG = False
pathString = path
resourceString = resourceNname
pathString = roleCleaner(pathString)
if '_*' not in resourceNname:
resourceString = roleCleaner(resourceString)
if (resourceString != pathString and resourceType != 'lambda' and not GREEDY):
continue
else:
tempString = resourceString[:-2]
resourceString = roleCleaner(tempString)
if (resourceString not in pathString) and resourceType != 'lambda' and not GREEDY:
continue
print(rest['resourceMethods'])
for key, value in rest['resourceMethods'].items():
tempResourceRole = resourceRole
# ONLY FOR TESTINNG
# if not 'GET' in key:
# continue
# DELETE ABOVE !!!!!!!!!
if DEBUG:
print(" @#@#. 002. #@#@ %s %s client.get_method(restApiId=%s,resourceId=%s,httpMethod=%s)" % (
name, path, id, mId, key))
# integrated = None
# mInfo = value
print(value)
# if (resourceString != pathString and resourceType != 'lambda' and not GREEDY):
if DEBUG:
print(" *API* [%s][%s]" %
(resourceNname, pathString))
# try:
# integrated = client.get_integration(restApiId=id, resourceId=mId, httpMethod=key)
# del integrated['ResponseMetadata']
# except ClientError as e:
# print(e.response['Error']['Message'])
# print integrated
method = client.get_method(
restApiId=id, resourceId=mId, httpMethod=key)
del method['ResponseMetadata']
authType = method['authorizationType']
keyRequired = method['apiKeyRequired']
function = method['httpMethod']
print(". %s. [%s]" % (function, resourceType))
if function.lower() not in resourceType.lower() and resourceType != 'lambda' and resourceType != '*':
if not 'options' in function.lower():
continue
integratedType = None
if not 'methodIntegration' in method:
continue
methodIntegration = method['methodIntegration']
print(" method:")
# print (method)
print("----====-----====----====----")
# method = client.get_method( restApiId='20iv84vxh9',resourceId='i8b9of',httpMethod='GET')
# if not 'uri' in methodIntegration:
# # print(". - -- - 002-- - ?")
# # print ( methodIntegration )
# continue
operationName = rModels = sModels = authName = requestParameters = requestValidator = authScope = None
methodResponse = None
if 'methodResponses' in method:
methodResponse = method['methodResponses']
# print("~~~~~1")
# print (methodResponse)
# print("~~~~~1bb")
if '200' in methodResponse:
if 'responseModels' in methodResponse['200']:
mm = methodResponse['200']['responseModels']
sModels = {}
for mkey, mvalue in mm.items():
sModels.update({mkey: mvalue})
# if 'GET' in key:
# raise
if 'requestModels' in method:
sm = method['requestModels']
rModels = {}
# print("~~~~~2")
# print(sm)
# print("~~~~~2bb")
# raise
# FIND MODELS AND ADD ONLY WHATS NEEDED IF NOT GREEDY
for rkey, rvalue in sm.items():
# print(rkey)
# raise
if 'empty' in rvalue.lower():
continue
rModels.update({rkey: rvalue})
if not GREEDY: # ONLY ADD MODELS NEEDED FOR API
found = False
heritage = []
for md in models['dependant']:
if md['name'] == rvalue:
if md not in final_MODELS['dependant']:
final_MODELS['dependant'].append(
md)
heritage = self.modelRefs(
md['schema'])
found = True
break
if not found or heritage:
for md in models['basic']:
if md['name'] == rvalue:
if md not in final_MODELS['basic']:
final_MODELS['basic'].append(
md)
break
if heritage: # IF MODEL HAS REFS GRAB THEM HERE
for md in models['basic']:
if md['name'] in heritage:
if md not in final_MODELS['basic']:
final_MODELS['basic'].append(
md)
print(final_MODELS)
# raise
integration = None
if 'uri' in methodIntegration:
integration = methodIntegration['uri']
if 'credentials' in methodIntegration:
tempResourceRole = methodIntegration['credentials']
if rModels is None:
rModels = {}
if sModels is None:
sModels = {}
if requestParameters is None:
requestParameters = {}
if requestValidator is None:
requestValidator = {}
if methodResponse is None:
methodResponse = {}
if integration is None:
integration = {}
if 'requestValidatorId' in method:
requestValidator = client.get_request_validator(restApiId=id,
requestValidatorId=method['requestValidatorId'])
del requestValidator['ResponseMetadata']
if 'requestParameters' in method:
requestParameters = method['requestParameters']
if 'authorizerId' in method:
auth = client.get_authorizer(
restApiId=id, authorizerId=method['authorizerId'])
authName = auth['name']
if 'authorizationScopes' in method:
authScope = method['authorizationScopes']
add = False
print("%s[%s] type:[%s]. name:[%s]" % (
integration, function, resourceType, resourceNname))
if DEBUG:
print('if (%s in %s and %s in %s) or %s == "*"' % (resourceType,
integration, resourceNname, integration, resourceNname))
print("-------------------")
print(". %s == %s" % (resourceString, pathString))
# raise
if (resourceType in integration and resourceNname in integration) or resourceNname == "*":
add = True
elif '_*' in resourceNname and resourceType == '*' and pathString.startswith(resourceString):
# print('if (%s in %s and %s in %s) or %s == "*"' % (resourceType, integration, resourceNname, integration, resourceNname))
# print("-------------------")
# print(". %s == %s" % (resourceString , pathString))
# raise
add = True
elif '_*' in resourceNname and resourceType.lower() == function.lower() and pathString.startswith(resourceString):
# print('if (%s in %s and %s in %s) or %s == "*"' % (resourceType, integration, resourceNname, integration, resourceNname))
# print("-------------------")
# print(". %s == %s" % (resourceString , pathString))
raise
add = True
elif resourceType == '*' and resourceString == pathString:
add = True
elif resourceType.lower() == function.lower() and resourceString == pathString:
add = True
elif key == "OPTIONS":
possibleOptions.update({path: {'name': name,
'parentid': parentId,
'credentials': None,
'state': 'present',
'id': mId,
'operationlabel': operationName,
'requestparameters': requestParameters,
'requestvalidator': requestValidator,
'authscope': authScope,
'requestmodels': rModels,
'responsemodels': sModels,
'authorizationType': authType,
'authName': authName,
'apiKeyRequired': keyRequired,
'type': integratedType,
'path': path,
'httpMethod': function,
'methodIn': methodIntegration,
'methodResponse': methodResponse}})
# if 'COGNITO' in authType:
# print (" . . .1 . . ")
# print (method)
# print (" . . .2 . . ")
if add:
addedResource.update({path: mId})
if not 'credentials' in methodIntegration:
methodIntegration.update(
{'credentials': tempResourceRole})
integratedAPIs.append(
{'name': name,
'parentid': parentId,
'credentials': tempResourceRole,
'state': 'present',
'id': mId,
'operationlabel': operationName,
'requestparameters': requestParameters,
'requestvalidator': requestValidator,
'authscope': authScope,
'requestmodels': rModels,
'responsemodels': sModels,
'authorizationType': authType,
'authName': authName,
'apiKeyRequired': keyRequired,
'type': integratedType,
'path': path,
'httpMethod': function,
'methodIn': methodIntegration,
'methodResponse': methodResponse})
# print(addedResource)
# raise
for rK, rV in addedResource.items(): # Ensure OPTIONS picked up for Methods gathered
if rK in possibleOptions:
integratedAPIs.append(possibleOptions[rK])
print("=====>>> !!")
print("completed!!")
print("completed!!")
print(integratedAPIs)
# print (addedResource)
# raise ValueError(" stopping now for check...")
if len(integratedAPIs) == 0:
return None, None, None, None
return integratedAPIs, stages, final_MODELS, final_AUTHS
def summary_gateway(self, client, targetAPI):
apis = self.getAll_rest_apis(client)
id = None
for api in apis:
if targetAPI in api['name']:
id = api['id']
break
if id is None:
print("[E] no api tree found with name: %s" % (targetAPI))
raise
resources = self.getAllResources(client, id)
integratedAPIs = {}
print(" API GATEWAY resources ")
print(" resources %s " % resources)
# for rest in resources['items']:
for rest in resources:
path = rest['path']
mId = rest['id']
if not 'resourceMethods' in rest:
parent = rest
continue
for key, value in rest['resourceMethods'].items():
# print integrated
method = client.get_method(
restApiId=id, resourceId=mId, httpMethod=key)
del method['ResponseMetadata']
authType = method['authorizationType']
keyRequired = method['apiKeyRequired']
integratedType = None
if not 'methodIntegration' in method:
continue
if 'requestParameters' in method:
requestParameters = method['requestParameters']
if 'authorizationScopes' in method:
authScope = method['authorizationScopes']
add = False
if (resourceType in integration and resourceNname in integration) or resourceNname == "*":
add = True
# elif key == "OPTIONS":
# continue
if add:
integratedAPIs.update({path:
{
'id': mId,
'requestparameters': requestParameters,
'authscope': authScope,
'authorizationType': authType,
'apiKeyRequired': keyRequired,
'path': path,
'httpMethod': method['httpMethod']}})
return integratedAPIs
def describe_GatewayALL(self, target, aconnect, accountOrigin, accounts=[], types=[], sendto=None, targetAPI=None, isFullUpdate=False, needDirs=False):
print("describe_GatewayALL")
# describe_gateway(self, resourceNname, resourceType, aconnect , resourceRole=None,targetAPI=None):
allAccounts = True
directorysNeeded = needDirs
skipFiles = True
acctTitle = None
if directorysNeeded:
skipFiles = False
# tmp="/tmp"
#
self.origin = accountOrigin
acctID = accountOrigin['account']
assumeRole = accountOrigin['assume_role']
NETWORK_MAP = loadServicesMap(accountOrigin['services_map'], 'RDS')
COGNITO_MAP = loadServicesMap(accountOrigin['services_map'], 'cognito')
BUCKET_MAP = loadServicesMap(accountOrigin['services_map'], 'S3')
# self.origin['account']
iamRole = "CR-Lambda-VPC"
print("### CREATING IAM ROLE: %s" % (iamRole))
targetString = roleCleaner(target)
roles, resourceRole = describe_role(
iamRole, aconnect, acctID, True if 'api' in types else False)
# (target,'lambda', aconnect, resourceRole, targetAPI)
apis, stages, models, auths = self.describe_gateway(
'*', '*', aconnect, resourceRole, targetAPI)
taskMain, rootFolder, targetLabel = ansibleSetup(
self.temp, target, isFullUpdate, skipFiles)
taskMain = taskMain[0:2]
taskMain.append({"import_tasks": "../aws/agw_authorizer.yml",
"vars": {"project": '{{ project }}'}})
taskMain.append({"import_tasks": "../aws/agw_model.yml",
"vars": {"project": '{{ project }}'}})
taskMain.append({"import_tasks": "../aws/_agw.yml",
"vars": {"project": '{{ project }}'}})
skipping = error_path = None
if 'error_path' in accountOrigin:
error_path = accountOrigin['error_path']
if 'skipping' in accountOrigin:
skipping = accountOrigin['skipping']
# error_path: /Users/astro_sk/Documents/TFS/Ansible_Deployer
if not skipping:
skipping = {
"methods": False,
"options": False,
"models": False,
"stage": False,
"resources": False
}
# ONLY needs two files default definition and tasks
#############################################
#############################################
######## write YAML to file in tasks #######
#############################################
#############################################
# rootFolder=tmp
option = "main"
if directorysNeeded:
mainIn = "%s/%s/%s" % (rootFolder, 'tasks', option)
else:
option = "tasks_main"
mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s" % (rootFolder, option)
writeYaml(taskMain, mainIn)
file_tasks = "%s.yaml" % mainIn
file_defaults = None
if 'services_map' in accountOrigin:
mapfile = accountOrigin['services_map']
serviceMap = loadServicesMap(mapfile, None)
for akey, account in accounts.items():
# if not account in acctID:
if acctID == akey:
acctTitle = account['title']
if not allAccounts:
if not acctID in akey:
continue
eID = serviceID(akey, None, account['all'], serviceMap)
accDetail = {
"account_id": akey,
"error_path": error_path,
"skipping": skipping,
"env": account['title'],
"role_duration": 3600,
"region": "us-east-1",
"eid": eID,
"roles": [],
"policies": []
}
if assumeRole:
accDetail.update({"cross_acct_role": account['role']})
defaultVar = {targetLabel: accDetail}
########################################################
############# API GATEWAY METHODS ###################
########################################################
# print (" A P I. see below. ......===---->>>")
api_list = []
stage_list = [] #
model_list = models # [] #
# stages.update({apiStage:{'stage':stageLabel,'api':apiName}})
if not apis is None:
# for mk,mv in models.items():
# model_list.append(mv)
for sk, sv in stages.items():
stage_list.append(sv)
for api in apis:
oApi = {
'name': api['name'],
'id': api['id'],
'credentials': "%s" % api['credentials'],
'authorizationType': api['authorizationType'],
'apiKeyRequired': api['apiKeyRequired'],
'type': api['type'],
'path': api['path'],
'operational_name': api['operationlabel'],
'request_valid': api['requestvalidator'],
'request_params': api['requestparameters'],
'auth_scope': api['authscope'],
'authName': api['authName'],
'request_models': api['requestmodels'],
'response_models': api['responsemodels'],
'httpMethod': api['httpMethod'],
'parentid': api['parentid'],
'method_response': api['methodResponse'],
'method_integration': api['methodIn'],
'state': api['state']
}
api_list.append(oApi)
defaultVar[targetLabel].update({"api_gw": api_list})
defaultVar[targetLabel].update({"api_stages": stage_list})
defaultVar[targetLabel].update({"api_models": model_list})
defaultVar[targetLabel].update({"api_authorizers": auths})
# defaultVar[targetLabel].update({ "api_domains": stage_list })
# defaultVar[targetLabel].update({ "api_usage": stage_list })
# option = "defaults_main%s"%account['all']
# option = "defaults_main"
# mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
# # mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
# file_defaults = "%s.yaml" % mainIn
# # CREATE default with all vars
# writeYaml(defaultVar, mainIn)
# account_replace(file_defaults, str(acctID), str(akey))
#
if directorysNeeded:
networkObj = NETWORK_MAP[akey]
bucketObj = BUCKET_MAP[akey]
cognitoObj = COGNITO_MAP[akey]
option = "main_%s" % account['all']
mainIn = "%s/%s/%s" % (rootFolder, 'defaults', option)
writeYaml(defaultVar, mainIn)
print("----> file: %s" % (mainIn))
account_replace("%s.yaml" % mainIn, str(acctID), str(akey))
for key, value in BUCKET_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(bucketObj[key]))
for key, value in NETWORK_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(networkObj[key]))
for key, value in COGNITO_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(cognitoObj[key]))
if directorysNeeded:
if not sendto is None:
print(" .... creating a main.yaml for ansible using dev")
opt = "main_%s.yaml" % accountOrigin['all']
src = "%s/%s/%s" % (rootFolder, 'defaults', opt)
opt2 = "main.yaml"
dst = "%s/%s/%s" % (rootFolder, 'defaults', opt2)
print("----> src: %s" % (src))
print("----> dst: %s" % (dst))
copyfile(src, dst)
print(" -------==------===---- COPY ALL. START....")
print(" sending to %s. from %s" % (sendto, rootFolder))
distutils.dir_util.copy_tree(rootFolder, sendto)
print(" -------==------===---- FINAL YAML file....")
ansibleRoot = sendto.split('roles/')[0]
targets = ['%s' % targetString]
rootYML = [{"name": "micro modler for ALL gateways resource -%s" % target,
"hosts": "dev",
"remote_user": "root",
"roles": targets}]
# ansibleRoot
writeYaml(rootYML, ansibleRoot, targetString)
else:
option = "defaults_main"
mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
file_defaults = "%s.yaml" % mainIn
# CREATE default with all vars
writeYaml(defaultVar, mainIn)
account_replace(file_defaults, str(acctID), str(akey))
print(file_tasks)
# return file_tasks, file_defaults
return acctID, target, acctTitle, True
def describe_GwResource(self, target, aconnect, accountOrigin, accounts=[], types=[], sendto=None, targetAPI=None, isFullUpdate=False, needDirs=False):
print("describe_GwResource for target deployments")
# describe_gateway(self, resourceNname, resourceType, aconnect , resourceRole=None,targetAPI=None):
# isFullUpdate = False
directorysNeeded = needDirs
skipFiles = True
if directorysNeeded:
skipFiles = False
acctTitle = None
# tmp="/tmp"
self.origin = accountOrigin
acctID = accountOrigin['account']
assumeRole = accountOrigin['assume_role']
NETWORK_MAP = loadServicesMap(accountOrigin['services_map'], 'RDS')
COGNITO_MAP = loadServicesMap(accountOrigin['services_map'], 'cognito')
BUCKET_MAP = loadServicesMap(accountOrigin['services_map'], 'S3')
# self.origin['account']
iamRole = "CR-Lambda-VPC"
print("### CREATING IAM ROLE: %s" % (iamRole))
roles, resourceRole = describe_role(
iamRole, aconnect, acctID, True if 'api' in types else False)
targetString = roleCleaner(target)
if not "[" in target:
msg = "[E] arguments givent do not contain methods for resource %s" % (
target)
print(msg)
raise
method = re.search(r'\[(.*?)\]', target).group(1)
print("==-=-=-===-=--=-==--=-=>>>>> YMB")
# (target,'lambda', aconnect, resourceRole, targetAPI)
if '/*[' in target: # this means we must recursively find all lower paths
apis, stages, models, auths = self.describe_gateway(
targetString, method, aconnect, resourceRole, targetAPI)
else:
apis, stages, models, auths = self.describe_gateway(
targetString, method, aconnect, resourceRole, targetAPI)
print("======================")
print(len(apis))
print("======================")
taskMain, rootFolder, targetLabel = ansibleSetup(
self.temp, targetString, isFullUpdate, skipFiles)
taskMain = taskMain[0:2]
taskMain.append({"import_tasks": "../aws/agw_model.yml",
"vars": {"project": '{{ project }}'}})
taskMain.append({"import_tasks": "../aws/_agw.yml",
"vars": {"project": '{{ project }}'}})
skipping = error_path = None
if 'error_path' in accountOrigin:
error_path = accountOrigin['error_path']
if 'skipping' in accountOrigin:
skipping = accountOrigin['skipping']
# error_path: /Users/astro_sk/Documents/TFS/Ansible_Deployer
if skipping:
skipping = {
"methods": False,
"options": False,
"models": False,
"stage": False,
"resources": False
}
if not apis:
msg = "[E] missing apis please fix "
print(msg)
raise
# ONLY needs two files default definition and tasks
#############################################
#############################################
######## write YAML to file in tasks #######
#############################################
#############################################
# rootFolder=tmp
option = "main"
# mainIn = "%s/%s/%s"%(rootFolder,'tasks',option)
if directorysNeeded:
mainIn = "%s/%s/%s" % (rootFolder, 'tasks', option)
else:
option = "tasks_main"
mainIn = "%s/%s" % (rootFolder, option)
writeYaml(taskMain, mainIn)
file_tasks = "%s.yaml" % mainIn
file_defaults = None
for akey, account in accounts.items():
# if not account in acctID:
if acctID == akey:
acctTitle = account['title']
accDetail = {
"account_id": akey,
"error_path": error_path,
"skipping": skipping,
"env": account['title'],
"role_duration": 3600,
"region": "us-east-1",
"eid": account['eID'],
"roles": [],
"policies": []
}
if assumeRole:
accDetail.update({"cross_acct_role": account['role']})
defaultVar = {targetLabel: accDetail}
########################################################
############# API GATEWAY METHODS ###################
########################################################
# print (" A P I. see below. ......===---->>>")
api_list = []
stage_list = [] #
model_list = models # [] #
# stages.update({apiStage:{'stage':stageLabel,'api':apiName}})
if not apis is None:
# for mk,mv in models.items():
# model_list.append(mv)
for sk, sv in stages.items():
stage_list.append(sv)
for api in apis:
oApi = {
'name': api['name'],
'id': api['id'],
'credentials': "%s" % api['credentials'],
'authorizationType': api['authorizationType'],
'apiKeyRequired': api['apiKeyRequired'],
'type': api['type'],
'path': api['path'],
'operational_name': api['operationlabel'],
'request_valid': api['requestvalidator'],
'request_params': api['requestparameters'],
'auth_scope': api['authscope'],
'authName': api['authName'],
'request_models': api['requestmodels'],
'response_models': api['responsemodels'],
'httpMethod': api['httpMethod'],
'parentid': api['parentid'],
'method_response': api['methodResponse'],
'method_integration': api['methodIn'],
'state': api['state']
}
api_list.append(oApi)
defaultVar[targetLabel].update({"api_gw": api_list})
defaultVar[targetLabel].update({"api_stages": stage_list})
defaultVar[targetLabel].update({"api_models": model_list})
defaultVar[targetLabel].update({"api_authorizers": auths})
# defaultVar[targetLabel].update({ "api_domains": stage_list })
# defaultVar[targetLabel].update({ "api_usage": stage_list })
#
if directorysNeeded:
networkObj = NETWORK_MAP[akey]
bucketObj = BUCKET_MAP[akey]
cognitoObj = COGNITO_MAP[akey]
option = "main_%s" % account['all']
mainIn = "%s/%s/%s" % (rootFolder, 'defaults', option)
writeYaml(defaultVar, mainIn)
account_replace("%s.yaml" % mainIn, str(acctID), str(akey))
for key, value in BUCKET_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(bucketObj[key]))
for key, value in NETWORK_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(networkObj[key]))
for key, value in COGNITO_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(cognitoObj[key]))
# option = "defaults_main%s"%account['all']
if directorysNeeded:
if not sendto is None:
print(" .... creating a main.yaml for ansible using dev")
opt = "main_%s.yaml" % accountOrigin['all']
src = "%s/%s/%s" % (rootFolder, 'defaults', opt)
opt2 = "main.yaml"
dst = "%s/%s/%s" % (rootFolder, 'defaults', opt2)
copyfile(src, dst)
print(" -------==------===---- COPY START....")
print(" sending to %s. from %s" % (sendto, rootFolder))
distutils.dir_util.copy_tree(rootFolder, sendto)
print(" -------==------===---- FINAL YAML file....")
ansibleRoot = sendto.split('roles/')[0]
targets = ['%s' % targetString]
rootYML = [{"name": "micro modler for gateways resource -%s" % target,
"hosts": "dev",
"remote_user": "root",
"roles": targets}]
# ansibleRoot
writeYaml(rootYML, ansibleRoot, targetString)
else:
option = "defaults_main"
mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
file_defaults = "%s.yaml" % mainIn
# CREATE default with all vars
writeYaml(defaultVar, mainIn)
account_replace(file_defaults, str(acctID), str(akey))
print(file_tasks)
# return file_tasks, file_defaults
return acctID, targetString, acctTitle, True
| 45.09375
| 155
| 0.448943
|
# all that is needed is a target Lambda, tests, and it will do the rest.
# finds associate roles and policies
# creates Ansible modules based on those policies and roles
# defines the Lambdas and creates them with tests
# finds api-gateways or other events
# if api found defines the security needed. creates modules for deployment with templates
import re
import os
# import ValueError
# import time
# import random
# from time import sleep
# from datetime import datetime, date
import json
# import shutil
# import boto3
from botocore.exceptions import ClientError
# import sys
from shutil import copyfile
# import fileinput
# import logging
# import urllib
import distutils
# from distutils import dir_util
# import awsconnect
# from awsconnect import awsConnect
from microUtils import writeYaml, account_replace, loadServicesMap, ansibleSetup, serviceID
from microUtils import describe_role, roleCleaner
# sudo ansible-playbook -i windows-servers CR-Admin-Users.yml -vvvv
dir_path = os.path.dirname(__file__)
class ApiGatewayMolder():
origin = None
temp = None
def Cooker(self, target):
lambda_describe(target)
def __init__(self, directory, islambda=False):
global dir_path
if not islambda:
temp = "%s/%s" % (dir_path, directory)
else:
temp = '/tmp'
self.temp = temp
if not os.path.exists(temp):
os.makedirs(temp)
else:
print(" directory %s already exists... remove or change name." % temp)
#############################################
# ##### [ USAGE PLAN KEYs ]#################
#############################################
#############################################
def describe_apiKey(self, client, apiKey, position=None):
rlist = [] # client.get_api_key(apiKey='9j5xhi7ene')
if position is None:
response = client.get_api_key(apiKey=apiKey, limit=500)
else:
response = client.get_api_key(
apiKey=apiKey, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_apiKey(client, apiKey, response['position'])
keys = baseList + rlist
return keys
def keysMapAPI(self, client, baseList):
pass
# for base in baseList:
def describe_planKey(self, client, usagePlanID, position=None):
rlist = []
if position is None:
response = client.get_usage_plan_keys(
usagePlanId=usagePlanID, limit=500)
else:
response = client.get_usage_plan_keys(
usagePlanId=usagePlanID, position=position, limit=500)
temp = response['items']
baseList = self.keysMapAPI(client, temp)
if "position" in response:
rlist = self.describe_planKey(
client, usagePlanID, response['position'])
keys = baseList + rlist
return keys
def describe_apiUsage(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_usage_plans(keyId=restApiId, limit=500)
else:
response = client.get_usage_plans(
keyId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_apiUsage(
client, restApiId, response['position'])
usagePlans = baseList + rlist
return usagePlans
def put_UsagePlan(self, client, usagePlans):
for plans in usagePlans:
# delete first if exists... or ignore....
# response = client.delete_usage_plan(
# usagePlanId='string'
# )
client.create_usage_plan(name=plans['name'], description=plans['description'],
apiStages=plans['apiStages'],
throttle=plans['throttle'],
quota=plans['quota']
)
def put_PlanKeys(self, client, keys, usagePlanId):
for key in keys:
# delete first if exists... or ignore....
# response = client.delete_usage_plan(
# usagePlanId='string'
# )
response = client.create_usage_plan_key(usagePlanId=usagePlanId, keyId=key['id'],
keyType=key['type']
)
def put_APIKeys(self, client, keys, usagePlanId):
for key in keys:
response = client.create_api_key(name=key['name'],
description=key['description'],
enabled=key['enabled'],
generateDistinctId=key['generateDistinctId'],
value=key['value'],
# customerId=key['customerId']
)
#############################################
###### [ USAGE PLAN KEYs ]#################
#############################################
#############################################
#############################################
###### [ API AUTHORIZERS ]#################
#############################################
#############################################
def describe_authorizers(self, client, restApiId, name, auths):
auth_list = self.describe_Allauths(client, restApiId)
aList = []
for auth in auth_list:
auth.update({'state': 'present', 'apiName': name})
auths.append(auth)
# auths.update({apiStage:{'stage':stageLabel,'api':apiName, 'state':'present'}})
return auths
def describe_Allauths(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_authorizers(restApiId=restApiId, limit=500)
else:
response = client.get_authorizers(
restApiId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_Allmodels(
client, restApiId, response['position'])
models = baseList + rlist
return models
#############################################
#### E N D [ API AUTHORIZERS ]##############
#############################################
#############################################
#############################################
###### [ API MODELS ]#################
#############################################
#############################################
def describe_Allmodels(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_models(restApiId=restApiId, limit=500)
else:
response = client.get_models(
restApiId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.describe_Allmodels(
client, restApiId, response['position'])
models = baseList + rlist
return models
def get_attrModel(self, client, target, restApiId, init=False):
rawSchema = client.get_model(
restApiId=restApiId, modelName=target, flatten=True)['schema']
rr = json.loads(rawSchema)
mType = rr['type']
# print(" - - - - START attrModel - - - - -")
# print(rr)
if 'properties' in rr:
response = rr['properties']
else:
response = rr['items']
definitions = rr['definitions']
oFinal = {}
for k, v in response.items():
value = v
name = k
print(" key: %s, value: %s" % (k, v))
if "$ref" in k:
vmodel = os.path.basename(v)
oValue = {'items': None}
value = self.get_attrModel(client, vmodel, restApiId)
name = target
oValue['items'] = value
# print(" - - - - START attrModel - - - -%s -"%target)
# print(response)
# print(oValue)
# raise
if init:
return {'type': mType, 'items': value['items']}
elif len(v) > 1 and type(v) is dict:
# 'type': 'object', 'additionalProperties': {'$ref': '
print(v)
oType = v['type']
# if 'items' in v:
for oK, oV in v.items():
if 'type' in oK:
continue
# itemKey = oK
# oValue=v['items']
if "$ref" in oV:
vmodel = os.path.basename(oV['$ref'])
value = self.get_attrModel(client, vmodel, restApiId)
# oValue['items']=value
oFinal.update({name: {'type': oType, 'items': oV}})
oFinal.update({name: value})
return {'type': mType, 'items': oFinal}
def describe_modelInTarget(self, client, apiName, target, svcType='dynamodb'):
apis = self.getAll_rest_apis(client)
restApiId = None
for api in apis:
name = api['name']
if name in apiName:
restApiId = api['id']
break
if restApiId is None:
return None
mapped = None
if svcType in 'dynamodb':
obj = {}
begin = ""
mapped = self.get_attrModel(client, target, restApiId, True)
# print("--------COMPLETED----------")
# print(" ----------------------")
# print(" ----------------")
# print(mapped)
return mapped
# def models_filter(self,models):
# return models
def describe_models(self, client, restApiId, name, models):
model_list = self.describe_Allmodels(client, restApiId)
print("START")
print(restApiId)
basic = []
advanced = []
advTemp = []
models.update({'basic': basic, 'dependant': advanced})
basicNames = []
for model in model_list:
rawSchema = model['schema'].replace(
restApiId, "%s_id" % (name)) # gets converted in real time
# model['schema']=model['schema'].replace("\n", "" ).replace( "\\", "" )
# model['schema'] =model['schema'].replace('{', "{'$schema':'http://json-schema.org/draft-04/schema#', 'title': '%s' ,"%(model['name']), 1)
model['schema'] = json.loads(rawSchema)
model.update({'api': name, 'state': 'present'})
apiModel = "%s_%s" % (name, model['name'])
if '$ref' in rawSchema:
advTemp.append(model)
else:
basic.append(model)
basicNames.append(model['name'])
# models.update({apiModel:model})
# print(advTemp)
# raise ValueError('A very specific bad thing happened.')
# print("-------000-----~~~----~~~----~~~")
# print(advTemp)
# print("-------000b-----~~~----~~~----~~~")
advTemp = self.relocRefsOnDependencies(advTemp, basicNames)
advanced.extend(advTemp)
# raise
# advanced=advTemp
print(advanced)
print("-------000b-----~~~----~~~----~~~")
# print(models)
# print("--------001----~~~----~~~----~~~")
# raise ValueError('A very specific bad thing happened.')
return models
def describeRefs(self, key, value):
refs = []
if 'type' in key:
return None
if isinstance(value, (list, tuple, dict)):
# print("%s:%s"%(key, value))
if '$ref' in value:
refs.append(value['$ref'])
else:
for k, v in value.items():
dd = self.describeRefs(k, v)
if dd:
refs = refs + dd
else:
return None
return refs
##################################################
####### BELOW REPOSITION ########################
###### BASED ON DEPENDENCIES ##################
##################################################
def modelRefs(self, schema):
refs = []
if 'properties' in schema:
for k, v in schema['properties'].items():
tempRefs = self.describeRefs(k, v)
if tempRefs is None:
continue
for ref in tempRefs:
refs.append(os.path.basename(ref))
elif 'items' in schema:
for k, v in schema['items'].items():
if "$ref" in k:
refs.append(os.path.basename(v))
if refs:
refs = list(set(refs))
return refs
#
def relocRefsOnDependencies(self, array, basicNames):
additionali = 0
for i, adv in enumerate(array):
if additionali > 0:
i = additionali + i
if i >= len(array):
break
# print("--= %s"%( array[i] ) )
sSchema = array[i]['schema']
if 'properties' in sSchema or 'items' in sSchema:
refsIn = []
model = array[i]
IName = model['name']
if 'items' in sSchema:
refsIn.append(os.path.basename(sSchema['items']['$ref']))
else:
props = sSchema['properties']
# print(props)
for k, v in props.items():
refs = self.describeRefs(k, v)
if refs is None:
continue
for ref in refs:
refM = os.path.basename(ref)
refsIn.append(refM)
if refsIn:
for rName in refsIn:
if rName in basicNames:
print(" --: continue %s" % (rName))
continue
else:
for num, atemp in enumerate(array):
# aprops = atemp['schema']#['properties']
mname = atemp['name']
# print(mname)
if mname == rName: # found match for reference
if num > i: # reference is AFTER i and MUST be before
additionali = additionali + 1
del array[num]
# Item now just before reference
array.insert(i, atemp)
if 'client' in mname and 'clients' in IName:
print(i)
print(num)
print(model)
print("### client &*&*&*&****&&&&&")
# raise
if additionali > 0:
print("additional found ... rerun")
self.relocRefsOnDependencies(array, basicNames)
return array
##################################################
####### ABOVE REPOSITION ########################
###### BASED ON DEPENDENCIES ##################
##################################################
def put_models(self, client, models):
modelsAdded = []
for model in models:
# current= client.get_model(restApiId=restApiId,modelName=modelName)
# if model.name == current.modelName:
# delete model first
modelName = model.name
try:
response = client.delete_model(
restApiId=model.restApiId, modelName=modelName)
print("[W] found MODEL %s and deleted..." % modelName)
except ClientError as e:
print(" -[W]- NOT found MODEL %s .." % modelName)
print(e.response['Error']['Message'])
try:
response = client.create_model(restApiId=model.restApiId, name=modelName, description=model.description,
schema=model.schema, contentType=model.contentType)
modelsAdded.append()
except ClientError as e:
print(e.response['Error']['Message'])
return modelsAdded
#############################################
##### [ API MODELS ]#### ABOVE ##########
#############################################
#############################################
def put_resources(self, client, resources):
modelsAdded = []
for model in models:
# current= client.get_model(restApiId=restApiId,modelName=modelName)
# if model.name == current.modelName:
# delete model first
modelName = model.name
try:
response = client.delete_model(
restApiId=model.restApiId, modelName=modelName)
print("[W] found MODEL %s and deleted..." % modelName)
except ClientError as e:
print(" -[W]- NOT found MODEL %s .." % modelName)
print(e.response['Error']['Message'])
try:
response = client.create_model(restApiId=model.restApiId, name=modelName, description=model.description,
schema=model.schema, contentType=model.contentType)
modelsAdded.append()
except ClientError as e:
print(e.response['Error']['Message'])
return modelsAdded
def describe_stages(self, client, apiID, apiName, stages):
# client = aconnect.__get_client__('apigateway')
# client = boto3.client('apigateway')
usage = client.get_usage_plans()['items']
for use in usage:
for stage in use['apiStages']:
if apiID in stage['apiId']:
stageLabel = stage['stage']
apiStage = "%s_%s" % (apiName, stageLabel)
if apiStage in stages:
continue
stages.update(
{apiStage: {'stage': stageLabel, 'api': apiName, 'state': 'present'}})
return stages
def getAllResources(self, client, restApiId, position=None):
rlist = []
if position is None:
response = client.get_resources(restApiId=restApiId, limit=500)
else:
response = client.get_resources(
restApiId=restApiId, position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.getAllResources(
client, restApiId, response['position'])
final = baseList + rlist
return final
def getAll_rest_apis(self, client, position=None):
rlist = []
if position is None:
response = client.get_rest_apis(limit=500)
else:
response = client.get_rest_apis(position=position, limit=500)
baseList = response['items']
if "position" in response:
rlist = self.getAll_rest_apis(client, response['position'])
final = baseList + rlist
return final
# get api, resource, method, integration and responses, models
def describe_gateway(self, resourceNname, resourceType, aconnect, resourceRole=None, targetAPI=None):
DEBUG = False
client = aconnect.__get_client__('apigateway')
# client = boto3.client('apigateway')
# apis=client.get_rest_apis()
apis = self.getAll_rest_apis(client)
integratedAPIs = []
stages = {}
models = {}
auths = []
addedResource = {}
possibleOptions = {}
print("*********************************************************************")
print(" API GATEWAY ")
print("*********************************************************************")
print(apis)
print("*********************************************************************")
print(targetAPI)
GREEDY = False
if '*' == resourceNname:
GREEDY = True
# for api in apis['items']:
for api in apis:
id = api['id']
name = api['name']
if targetAPI is not None:
if name.lower() != targetAPI.lower():
continue
# resources = client.get_resources( restApiId=id,limit=500)
resources = self.getAllResources(client, id)
final_MODELS = {'basic': [], 'dependant': []}
final_AUTHS = []
# api id, stage id, model id
stages = self.describe_stages(client, id, name, stages)
models = self.describe_models(client, id, name, models)
auths = self.describe_authorizers(client, id, name, auths)
if GREEDY:
final_MODELS = models
final_AUTHS = auths
print(
" API GATEWAY resources ")
print(" resources %s " % resources)
# for rest in resources['items']:
for rest in resources:
path = rest['path']
mId = rest['id']
parentId = None
if 'parentId' in rest:
parentId = rest['parentId']
if 'resourceMethods' not in rest:
parent = rest
continue
if "xxx/" in path:
print(path)
DEBUG = True
else:
DEBUG = False
pathString = path
resourceString = resourceNname
pathString = roleCleaner(pathString)
if '_*' not in resourceNname:
resourceString = roleCleaner(resourceString)
if (resourceString != pathString and resourceType != 'lambda' and not GREEDY):
continue
else:
tempString = resourceString[:-2]
resourceString = roleCleaner(tempString)
if (resourceString not in pathString) and resourceType != 'lambda' and not GREEDY:
continue
print(rest['resourceMethods'])
for key, value in rest['resourceMethods'].items():
tempResourceRole = resourceRole
# ONLY FOR TESTINNG
# if not 'GET' in key:
# continue
# DELETE ABOVE !!!!!!!!!
if DEBUG:
print(" @#@#. 002. #@#@ %s %s client.get_method(restApiId=%s,resourceId=%s,httpMethod=%s)" % (
name, path, id, mId, key))
# integrated = None
# mInfo = value
print(value)
# if (resourceString != pathString and resourceType != 'lambda' and not GREEDY):
if DEBUG:
print(" *API* [%s][%s]" %
(resourceNname, pathString))
# try:
# integrated = client.get_integration(restApiId=id, resourceId=mId, httpMethod=key)
# del integrated['ResponseMetadata']
# except ClientError as e:
# print(e.response['Error']['Message'])
# print integrated
method = client.get_method(
restApiId=id, resourceId=mId, httpMethod=key)
del method['ResponseMetadata']
authType = method['authorizationType']
keyRequired = method['apiKeyRequired']
function = method['httpMethod']
print(". %s. [%s]" % (function, resourceType))
if function.lower() not in resourceType.lower() and resourceType != 'lambda' and resourceType != '*':
if not 'options' in function.lower():
continue
integratedType = None
if not 'methodIntegration' in method:
continue
methodIntegration = method['methodIntegration']
print(" method:")
# print (method)
print("----====-----====----====----")
# method = client.get_method( restApiId='20iv84vxh9',resourceId='i8b9of',httpMethod='GET')
# if not 'uri' in methodIntegration:
# # print(". - -- - 002-- - ?")
# # print ( methodIntegration )
# continue
operationName = rModels = sModels = authName = requestParameters = requestValidator = authScope = None
methodResponse = None
if 'methodResponses' in method:
methodResponse = method['methodResponses']
# print("~~~~~1")
# print (methodResponse)
# print("~~~~~1bb")
if '200' in methodResponse:
if 'responseModels' in methodResponse['200']:
mm = methodResponse['200']['responseModels']
sModels = {}
for mkey, mvalue in mm.items():
sModels.update({mkey: mvalue})
# if 'GET' in key:
# raise
if 'requestModels' in method:
sm = method['requestModels']
rModels = {}
# print("~~~~~2")
# print(sm)
# print("~~~~~2bb")
# raise
# FIND MODELS AND ADD ONLY WHATS NEEDED IF NOT GREEDY
for rkey, rvalue in sm.items():
# print(rkey)
# raise
if 'empty' in rvalue.lower():
continue
rModels.update({rkey: rvalue})
if not GREEDY: # ONLY ADD MODELS NEEDED FOR API
found = False
heritage = []
for md in models['dependant']:
if md['name'] == rvalue:
if md not in final_MODELS['dependant']:
final_MODELS['dependant'].append(
md)
heritage = self.modelRefs(
md['schema'])
found = True
break
if not found or heritage:
for md in models['basic']:
if md['name'] == rvalue:
if md not in final_MODELS['basic']:
final_MODELS['basic'].append(
md)
break
if heritage: # IF MODEL HAS REFS GRAB THEM HERE
for md in models['basic']:
if md['name'] in heritage:
if md not in final_MODELS['basic']:
final_MODELS['basic'].append(
md)
print(final_MODELS)
# raise
integration = None
if 'uri' in methodIntegration:
integration = methodIntegration['uri']
if 'credentials' in methodIntegration:
tempResourceRole = methodIntegration['credentials']
if rModels is None:
rModels = {}
if sModels is None:
sModels = {}
if requestParameters is None:
requestParameters = {}
if requestValidator is None:
requestValidator = {}
if methodResponse is None:
methodResponse = {}
if integration is None:
integration = {}
if 'requestValidatorId' in method:
requestValidator = client.get_request_validator(restApiId=id,
requestValidatorId=method['requestValidatorId'])
del requestValidator['ResponseMetadata']
if 'requestParameters' in method:
requestParameters = method['requestParameters']
if 'authorizerId' in method:
auth = client.get_authorizer(
restApiId=id, authorizerId=method['authorizerId'])
authName = auth['name']
if 'authorizationScopes' in method:
authScope = method['authorizationScopes']
add = False
print("%s[%s] type:[%s]. name:[%s]" % (
integration, function, resourceType, resourceNname))
if DEBUG:
print('if (%s in %s and %s in %s) or %s == "*"' % (resourceType,
integration, resourceNname, integration, resourceNname))
print("-------------------")
print(". %s == %s" % (resourceString, pathString))
# raise
if (resourceType in integration and resourceNname in integration) or resourceNname == "*":
add = True
elif '_*' in resourceNname and resourceType == '*' and pathString.startswith(resourceString):
# print('if (%s in %s and %s in %s) or %s == "*"' % (resourceType, integration, resourceNname, integration, resourceNname))
# print("-------------------")
# print(". %s == %s" % (resourceString , pathString))
# raise
add = True
elif '_*' in resourceNname and resourceType.lower() == function.lower() and pathString.startswith(resourceString):
# print('if (%s in %s and %s in %s) or %s == "*"' % (resourceType, integration, resourceNname, integration, resourceNname))
# print("-------------------")
# print(". %s == %s" % (resourceString , pathString))
raise
add = True
elif resourceType == '*' and resourceString == pathString:
add = True
elif resourceType.lower() == function.lower() and resourceString == pathString:
add = True
elif key == "OPTIONS":
possibleOptions.update({path: {'name': name,
'parentid': parentId,
'credentials': None,
'state': 'present',
'id': mId,
'operationlabel': operationName,
'requestparameters': requestParameters,
'requestvalidator': requestValidator,
'authscope': authScope,
'requestmodels': rModels,
'responsemodels': sModels,
'authorizationType': authType,
'authName': authName,
'apiKeyRequired': keyRequired,
'type': integratedType,
'path': path,
'httpMethod': function,
'methodIn': methodIntegration,
'methodResponse': methodResponse}})
# if 'COGNITO' in authType:
# print (" . . .1 . . ")
# print (method)
# print (" . . .2 . . ")
if add:
addedResource.update({path: mId})
if not 'credentials' in methodIntegration:
methodIntegration.update(
{'credentials': tempResourceRole})
integratedAPIs.append(
{'name': name,
'parentid': parentId,
'credentials': tempResourceRole,
'state': 'present',
'id': mId,
'operationlabel': operationName,
'requestparameters': requestParameters,
'requestvalidator': requestValidator,
'authscope': authScope,
'requestmodels': rModels,
'responsemodels': sModels,
'authorizationType': authType,
'authName': authName,
'apiKeyRequired': keyRequired,
'type': integratedType,
'path': path,
'httpMethod': function,
'methodIn': methodIntegration,
'methodResponse': methodResponse})
# print(addedResource)
# raise
for rK, rV in addedResource.items(): # Ensure OPTIONS picked up for Methods gathered
if rK in possibleOptions:
integratedAPIs.append(possibleOptions[rK])
print("=====>>> !!")
print("completed!!")
print("completed!!")
print(integratedAPIs)
# print (addedResource)
# raise ValueError(" stopping now for check...")
if len(integratedAPIs) == 0:
return None, None, None, None
return integratedAPIs, stages, final_MODELS, final_AUTHS
def summary_gateway(self, client, targetAPI):
apis = self.getAll_rest_apis(client)
id = None
for api in apis:
if targetAPI in api['name']:
id = api['id']
break
if id is None:
print("[E] no api tree found with name: %s" % (targetAPI))
raise
resources = self.getAllResources(client, id)
integratedAPIs = {}
print(" API GATEWAY resources ")
print(" resources %s " % resources)
# for rest in resources['items']:
for rest in resources:
path = rest['path']
mId = rest['id']
if not 'resourceMethods' in rest:
parent = rest
continue
for key, value in rest['resourceMethods'].items():
# print integrated
method = client.get_method(
restApiId=id, resourceId=mId, httpMethod=key)
del method['ResponseMetadata']
authType = method['authorizationType']
keyRequired = method['apiKeyRequired']
integratedType = None
if not 'methodIntegration' in method:
continue
if 'requestParameters' in method:
requestParameters = method['requestParameters']
if 'authorizationScopes' in method:
authScope = method['authorizationScopes']
add = False
if (resourceType in integration and resourceNname in integration) or resourceNname == "*":
add = True
# elif key == "OPTIONS":
# continue
if add:
integratedAPIs.update({path:
{
'id': mId,
'requestparameters': requestParameters,
'authscope': authScope,
'authorizationType': authType,
'apiKeyRequired': keyRequired,
'path': path,
'httpMethod': method['httpMethod']}})
return integratedAPIs
def describe_GatewayALL(self, target, aconnect, accountOrigin, accounts=[], types=[], sendto=None, targetAPI=None, isFullUpdate=False, needDirs=False):
print("describe_GatewayALL")
# describe_gateway(self, resourceNname, resourceType, aconnect , resourceRole=None,targetAPI=None):
allAccounts = True
directorysNeeded = needDirs
skipFiles = True
acctTitle = None
if directorysNeeded:
skipFiles = False
# tmp="/tmp"
#
self.origin = accountOrigin
acctID = accountOrigin['account']
assumeRole = accountOrigin['assume_role']
NETWORK_MAP = loadServicesMap(accountOrigin['services_map'], 'RDS')
COGNITO_MAP = loadServicesMap(accountOrigin['services_map'], 'cognito')
BUCKET_MAP = loadServicesMap(accountOrigin['services_map'], 'S3')
# self.origin['account']
iamRole = "CR-Lambda-VPC"
print("### CREATING IAM ROLE: %s" % (iamRole))
targetString = roleCleaner(target)
roles, resourceRole = describe_role(
iamRole, aconnect, acctID, True if 'api' in types else False)
# (target,'lambda', aconnect, resourceRole, targetAPI)
apis, stages, models, auths = self.describe_gateway(
'*', '*', aconnect, resourceRole, targetAPI)
taskMain, rootFolder, targetLabel = ansibleSetup(
self.temp, target, isFullUpdate, skipFiles)
taskMain = taskMain[0:2]
taskMain.append({"import_tasks": "../aws/agw_authorizer.yml",
"vars": {"project": '{{ project }}'}})
taskMain.append({"import_tasks": "../aws/agw_model.yml",
"vars": {"project": '{{ project }}'}})
taskMain.append({"import_tasks": "../aws/_agw.yml",
"vars": {"project": '{{ project }}'}})
skipping = error_path = None
if 'error_path' in accountOrigin:
error_path = accountOrigin['error_path']
if 'skipping' in accountOrigin:
skipping = accountOrigin['skipping']
# error_path: /Users/astro_sk/Documents/TFS/Ansible_Deployer
if not skipping:
skipping = {
"methods": False,
"options": False,
"models": False,
"stage": False,
"resources": False
}
# ONLY needs two files default definition and tasks
#############################################
#############################################
######## write YAML to file in tasks #######
#############################################
#############################################
# rootFolder=tmp
option = "main"
if directorysNeeded:
mainIn = "%s/%s/%s" % (rootFolder, 'tasks', option)
else:
option = "tasks_main"
mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s" % (rootFolder, option)
writeYaml(taskMain, mainIn)
file_tasks = "%s.yaml" % mainIn
file_defaults = None
if 'services_map' in accountOrigin:
mapfile = accountOrigin['services_map']
serviceMap = loadServicesMap(mapfile, None)
for akey, account in accounts.items():
# if not account in acctID:
if acctID == akey:
acctTitle = account['title']
if not allAccounts:
if not acctID in akey:
continue
eID = serviceID(akey, None, account['all'], serviceMap)
accDetail = {
"account_id": akey,
"error_path": error_path,
"skipping": skipping,
"env": account['title'],
"role_duration": 3600,
"region": "us-east-1",
"eid": eID,
"roles": [],
"policies": []
}
if assumeRole:
accDetail.update({"cross_acct_role": account['role']})
defaultVar = {targetLabel: accDetail}
########################################################
############# API GATEWAY METHODS ###################
########################################################
# print (" A P I. see below. ......===---->>>")
api_list = []
stage_list = [] #
model_list = models # [] #
# stages.update({apiStage:{'stage':stageLabel,'api':apiName}})
if not apis is None:
# for mk,mv in models.items():
# model_list.append(mv)
for sk, sv in stages.items():
stage_list.append(sv)
for api in apis:
oApi = {
'name': api['name'],
'id': api['id'],
'credentials': "%s" % api['credentials'],
'authorizationType': api['authorizationType'],
'apiKeyRequired': api['apiKeyRequired'],
'type': api['type'],
'path': api['path'],
'operational_name': api['operationlabel'],
'request_valid': api['requestvalidator'],
'request_params': api['requestparameters'],
'auth_scope': api['authscope'],
'authName': api['authName'],
'request_models': api['requestmodels'],
'response_models': api['responsemodels'],
'httpMethod': api['httpMethod'],
'parentid': api['parentid'],
'method_response': api['methodResponse'],
'method_integration': api['methodIn'],
'state': api['state']
}
api_list.append(oApi)
defaultVar[targetLabel].update({"api_gw": api_list})
defaultVar[targetLabel].update({"api_stages": stage_list})
defaultVar[targetLabel].update({"api_models": model_list})
defaultVar[targetLabel].update({"api_authorizers": auths})
# defaultVar[targetLabel].update({ "api_domains": stage_list })
# defaultVar[targetLabel].update({ "api_usage": stage_list })
# option = "defaults_main%s"%account['all']
# option = "defaults_main"
# mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
# # mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
# file_defaults = "%s.yaml" % mainIn
# # CREATE default with all vars
# writeYaml(defaultVar, mainIn)
# account_replace(file_defaults, str(acctID), str(akey))
#
if directorysNeeded:
networkObj = NETWORK_MAP[akey]
bucketObj = BUCKET_MAP[akey]
cognitoObj = COGNITO_MAP[akey]
option = "main_%s" % account['all']
mainIn = "%s/%s/%s" % (rootFolder, 'defaults', option)
writeYaml(defaultVar, mainIn)
print("----> file: %s" % (mainIn))
account_replace("%s.yaml" % mainIn, str(acctID), str(akey))
for key, value in BUCKET_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(bucketObj[key]))
for key, value in NETWORK_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(networkObj[key]))
for key, value in COGNITO_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(cognitoObj[key]))
if directorysNeeded:
if not sendto is None:
print(" .... creating a main.yaml for ansible using dev")
opt = "main_%s.yaml" % accountOrigin['all']
src = "%s/%s/%s" % (rootFolder, 'defaults', opt)
opt2 = "main.yaml"
dst = "%s/%s/%s" % (rootFolder, 'defaults', opt2)
print("----> src: %s" % (src))
print("----> dst: %s" % (dst))
copyfile(src, dst)
print(" -------==------===---- COPY ALL. START....")
print(" sending to %s. from %s" % (sendto, rootFolder))
distutils.dir_util.copy_tree(rootFolder, sendto)
print(" -------==------===---- FINAL YAML file....")
ansibleRoot = sendto.split('roles/')[0]
targets = ['%s' % targetString]
rootYML = [{"name": "micro modler for ALL gateways resource -%s" % target,
"hosts": "dev",
"remote_user": "root",
"roles": targets}]
# ansibleRoot
writeYaml(rootYML, ansibleRoot, targetString)
else:
option = "defaults_main"
mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
file_defaults = "%s.yaml" % mainIn
# CREATE default with all vars
writeYaml(defaultVar, mainIn)
account_replace(file_defaults, str(acctID), str(akey))
print(file_tasks)
# return file_tasks, file_defaults
return acctID, target, acctTitle, True
def describe_GwResource(self, target, aconnect, accountOrigin, accounts=[], types=[], sendto=None, targetAPI=None, isFullUpdate=False, needDirs=False):
print("describe_GwResource for target deployments")
# describe_gateway(self, resourceNname, resourceType, aconnect , resourceRole=None,targetAPI=None):
# isFullUpdate = False
directorysNeeded = needDirs
skipFiles = True
if directorysNeeded:
skipFiles = False
acctTitle = None
# tmp="/tmp"
self.origin = accountOrigin
acctID = accountOrigin['account']
assumeRole = accountOrigin['assume_role']
NETWORK_MAP = loadServicesMap(accountOrigin['services_map'], 'RDS')
COGNITO_MAP = loadServicesMap(accountOrigin['services_map'], 'cognito')
BUCKET_MAP = loadServicesMap(accountOrigin['services_map'], 'S3')
# self.origin['account']
iamRole = "CR-Lambda-VPC"
print("### CREATING IAM ROLE: %s" % (iamRole))
roles, resourceRole = describe_role(
iamRole, aconnect, acctID, True if 'api' in types else False)
targetString = roleCleaner(target)
if not "[" in target:
msg = "[E] arguments givent do not contain methods for resource %s" % (
target)
print(msg)
raise
method = re.search(r'\[(.*?)\]', target).group(1)
print("==-=-=-===-=--=-==--=-=>>>>> YMB")
# (target,'lambda', aconnect, resourceRole, targetAPI)
if '/*[' in target: # this means we must recursively find all lower paths
apis, stages, models, auths = self.describe_gateway(
targetString, method, aconnect, resourceRole, targetAPI)
else:
apis, stages, models, auths = self.describe_gateway(
targetString, method, aconnect, resourceRole, targetAPI)
print("======================")
print(len(apis))
print("======================")
taskMain, rootFolder, targetLabel = ansibleSetup(
self.temp, targetString, isFullUpdate, skipFiles)
taskMain = taskMain[0:2]
taskMain.append({"import_tasks": "../aws/agw_model.yml",
"vars": {"project": '{{ project }}'}})
taskMain.append({"import_tasks": "../aws/_agw.yml",
"vars": {"project": '{{ project }}'}})
skipping = error_path = None
if 'error_path' in accountOrigin:
error_path = accountOrigin['error_path']
if 'skipping' in accountOrigin:
skipping = accountOrigin['skipping']
# error_path: /Users/astro_sk/Documents/TFS/Ansible_Deployer
if skipping:
skipping = {
"methods": False,
"options": False,
"models": False,
"stage": False,
"resources": False
}
if not apis:
msg = "[E] missing apis please fix "
print(msg)
raise
# ONLY needs two files default definition and tasks
#############################################
#############################################
######## write YAML to file in tasks #######
#############################################
#############################################
# rootFolder=tmp
option = "main"
# mainIn = "%s/%s/%s"%(rootFolder,'tasks',option)
if directorysNeeded:
mainIn = "%s/%s/%s" % (rootFolder, 'tasks', option)
else:
option = "tasks_main"
mainIn = "%s/%s" % (rootFolder, option)
writeYaml(taskMain, mainIn)
file_tasks = "%s.yaml" % mainIn
file_defaults = None
for akey, account in accounts.items():
# if not account in acctID:
if acctID == akey:
acctTitle = account['title']
accDetail = {
"account_id": akey,
"error_path": error_path,
"skipping": skipping,
"env": account['title'],
"role_duration": 3600,
"region": "us-east-1",
"eid": account['eID'],
"roles": [],
"policies": []
}
if assumeRole:
accDetail.update({"cross_acct_role": account['role']})
defaultVar = {targetLabel: accDetail}
########################################################
############# API GATEWAY METHODS ###################
########################################################
# print (" A P I. see below. ......===---->>>")
api_list = []
stage_list = [] #
model_list = models # [] #
# stages.update({apiStage:{'stage':stageLabel,'api':apiName}})
if not apis is None:
# for mk,mv in models.items():
# model_list.append(mv)
for sk, sv in stages.items():
stage_list.append(sv)
for api in apis:
oApi = {
'name': api['name'],
'id': api['id'],
'credentials': "%s" % api['credentials'],
'authorizationType': api['authorizationType'],
'apiKeyRequired': api['apiKeyRequired'],
'type': api['type'],
'path': api['path'],
'operational_name': api['operationlabel'],
'request_valid': api['requestvalidator'],
'request_params': api['requestparameters'],
'auth_scope': api['authscope'],
'authName': api['authName'],
'request_models': api['requestmodels'],
'response_models': api['responsemodels'],
'httpMethod': api['httpMethod'],
'parentid': api['parentid'],
'method_response': api['methodResponse'],
'method_integration': api['methodIn'],
'state': api['state']
}
api_list.append(oApi)
defaultVar[targetLabel].update({"api_gw": api_list})
defaultVar[targetLabel].update({"api_stages": stage_list})
defaultVar[targetLabel].update({"api_models": model_list})
defaultVar[targetLabel].update({"api_authorizers": auths})
# defaultVar[targetLabel].update({ "api_domains": stage_list })
# defaultVar[targetLabel].update({ "api_usage": stage_list })
#
if directorysNeeded:
networkObj = NETWORK_MAP[akey]
bucketObj = BUCKET_MAP[akey]
cognitoObj = COGNITO_MAP[akey]
option = "main_%s" % account['all']
mainIn = "%s/%s/%s" % (rootFolder, 'defaults', option)
writeYaml(defaultVar, mainIn)
account_replace("%s.yaml" % mainIn, str(acctID), str(akey))
for key, value in BUCKET_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(bucketObj[key]))
for key, value in NETWORK_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(networkObj[key]))
for key, value in COGNITO_MAP[acctID].items():
account_replace("%s.yaml" % mainIn, str(
value), str(cognitoObj[key]))
# option = "defaults_main%s"%account['all']
if directorysNeeded:
if not sendto is None:
print(" .... creating a main.yaml for ansible using dev")
opt = "main_%s.yaml" % accountOrigin['all']
src = "%s/%s/%s" % (rootFolder, 'defaults', opt)
opt2 = "main.yaml"
dst = "%s/%s/%s" % (rootFolder, 'defaults', opt2)
copyfile(src, dst)
print(" -------==------===---- COPY START....")
print(" sending to %s. from %s" % (sendto, rootFolder))
distutils.dir_util.copy_tree(rootFolder, sendto)
print(" -------==------===---- FINAL YAML file....")
ansibleRoot = sendto.split('roles/')[0]
targets = ['%s' % targetString]
rootYML = [{"name": "micro modler for gateways resource -%s" % target,
"hosts": "dev",
"remote_user": "root",
"roles": targets}]
# ansibleRoot
writeYaml(rootYML, ansibleRoot, targetString)
else:
option = "defaults_main"
mainIn = "%s/%s" % (rootFolder, option)
# mainIn = "%s/%s/%s"%(rootFolder,'defaults',option)
file_defaults = "%s.yaml" % mainIn
# CREATE default with all vars
writeYaml(defaultVar, mainIn)
account_replace(file_defaults, str(acctID), str(akey))
print(file_tasks)
# return file_tasks, file_defaults
return acctID, targetString, acctTitle, True
| true
| true
|
1c4883629d9f8ead68e5a641c661b4fe1a8fcefa
| 17,266
|
py
|
Python
|
webrecorder/webrecorder/models/recording.py
|
awesome-archive/webrecorder
|
51ff3adab2c383025984570ab916ca0f03f2c92e
|
[
"Apache-2.0"
] | null | null | null |
webrecorder/webrecorder/models/recording.py
|
awesome-archive/webrecorder
|
51ff3adab2c383025984570ab916ca0f03f2c92e
|
[
"Apache-2.0"
] | null | null | null |
webrecorder/webrecorder/models/recording.py
|
awesome-archive/webrecorder
|
51ff3adab2c383025984570ab916ca0f03f2c92e
|
[
"Apache-2.0"
] | null | null | null |
import json
import hashlib
import os
import base64
import shutil
from six.moves.urllib.parse import urlsplit
from pywb.utils.canonicalize import calc_search_range
from pywb.warcserver.index.cdxobject import CDXObject
from pywb.utils.loaders import BlockLoader
from webrecorder.utils import redis_pipeline
from webrecorder.models.base import RedisUniqueComponent, RedisUnorderedList
from webrecorder.models.stats import Stats
from webrecorder.rec.storage.storagepaths import strip_prefix, add_local_store_prefix
from webrecorder.rec.storage import LocalFileStorage
from warcio.timeutils import timestamp_now, sec_to_timestamp, timestamp20_now
# ============================================================================
class Recording(RedisUniqueComponent):
"""Recording Redis building block.
:cvar str MY_TYPE: type of building block
:cvar str INFO_KEY: building block information Redis key
:cvar str ALL_KEYS: building block key pattern Redis key
:cvar str OPEN_REC_KEY: ongoing recording Redis key
:cvar str CDX: CDX index Redis key
:cvar str RA_KEY: remote archives Redis key
:cvar str PENDING_SIZE_KEY: outstanding size Redis key
:cvar str PENDING_COUNT_KEY: outstanding CDX index lines Redis key
:cvar str PENDING_TTL: outstanding TTL Redis key
:cvar str REC_WARC_KEY: WARC Redis key (recording)
:cvar str COLL_WARC_KEY: WARC Redis key (collection)
:cvar str COMMIT_LOCK_KEY: storage lock Redis key
:cvar str INDEX_FILE_KEY: CDX index file
:cvar str INDEX_NAME_TEMPL: CDX index filename template
:cvar str DELETE_RETRY: delete/retry Redis key
:cvar int OPEN_REC_TTL: TTL ongoing recording
"""
MY_TYPE = 'rec'
INFO_KEY = 'r:{rec}:info'
ALL_KEYS = 'r:{rec}:*'
OPEN_REC_KEY = 'r:{rec}:open'
CDXJ_KEY = 'r:{rec}:cdxj'
RA_KEY = 'r:{rec}:ra'
PENDING_SIZE_KEY = 'r:{rec}:_ps'
PENDING_COUNT_KEY = 'r:{rec}:_pc'
PENDING_TTL = 90
REC_WARC_KEY = 'r:{rec}:wk'
COLL_WARC_KEY = 'c:{coll}:warc'
COMMIT_LOCK_KEY = 'r:{rec}:lock'
INDEX_FILE_KEY = '@index_file'
INDEX_NAME_TEMPL = 'index-{timestamp}-{random}.cdxj'
DELETE_RETRY = 'q:delete_retry'
# overridable
OPEN_REC_TTL = 5400
@classmethod
def init_props(cls, config):
"""Initialize class variables.
:param dict config: Webrecorder configuration
"""
cls.OPEN_REC_TTL = int(config['open_rec_ttl'])
#cls.INDEX_FILE_KEY = config['info_index_key']
cls.CDXJ_KEY = config.get('cdxj_key_templ', cls.CDXJ_KEY)
#cls.INDEX_NAME_TEMPL = config['index_name_templ']
#cls.COMMIT_WAIT_TEMPL = config['commit_wait_templ']
@property
def name(self):
"""Read-only attribute name."""
return self.my_id
def init_new(self, title='', desc='', rec_type=None, ra_list=None):
"""Initialize new recording Redis building block.
:param str title: title
:param str desc: description
:param rec_type: type of recording
:type: str or None
:param ra_list: remote archives
:type: list or None
:returns: component ID
:rtype: str
"""
rec = self._create_new_id()
open_rec_key = self.OPEN_REC_KEY.format(rec=rec)
self.data = {
'title': title,
'desc': desc,
'size': 0,
}
if rec_type:
self.data['rec_type'] = rec_type
with redis_pipeline(self.redis) as pi:
self._init_new(pi)
if ra_list:
ra_key = self.RA_KEY.format(rec=self.my_id)
pi.sadd(ra_key, *ra_list)
pi.setex(open_rec_key, self.OPEN_REC_TTL, 1)
return rec
def is_open(self, extend=True):
"""Return whether the recording is ongoing. Optionally extend
TTL of recording.
:param bool extend: whether to extend TTL of recording
:returns: whether recording is ongoing
:rtype: bool
"""
open_rec_key = self.OPEN_REC_KEY.format(rec=self.my_id)
if extend:
return self.redis.expire(open_rec_key, self.OPEN_REC_TTL)
else:
return self.redis.exists(open_rec_key)
def set_closed(self):
"""Close recording."""
open_rec_key = self.OPEN_REC_KEY.format(rec=self.my_id)
self.redis.delete(open_rec_key)
def is_fully_committed(self):
"""Return whether the CDX index file has been fully committed
to storage.
:returns: whether the CDX index file is fully committed
:rtype: bool
"""
if self.get_pending_count() > 0:
return False
cdxj_key = self.CDXJ_KEY.format(rec=self.my_id)
return self.redis.exists(cdxj_key) == False
def get_pending_count(self):
"""Return outstanding CDX index lines.
:returns: outstanding CDX index lines
:rtype: int
"""
pending_count = self.PENDING_COUNT_KEY.format(rec=self.my_id)
return int(self.redis.get(pending_count) or 0)
def get_pending_size(self):
"""Return outstanding size.
:returns: outstanding size
:rtype: int
"""
pending_size = self.PENDING_SIZE_KEY.format(rec=self.my_id)
return int(self.redis.get(pending_size) or 0)
def inc_pending_count(self):
"""Increase outstanding CDX index lines."""
if not self.is_open(extend=False):
return
pending_count = self.PENDING_COUNT_KEY.format(rec=self.my_id)
self.redis.incrby(pending_count, 1)
self.redis.expire(pending_count, self.PENDING_TTL)
def inc_pending_size(self, size):
"""Increase outstanding size.
:param int size: size
"""
if not self.is_open(extend=False):
return
pending_size = self.PENDING_SIZE_KEY.format(rec=self.my_id)
self.redis.incrby(pending_size, size)
self.redis.expire(pending_size, self.PENDING_TTL)
def dec_pending_count_and_size(self, size):
"""Decrease outstanding CDX index lines and size.
:param int size: size
"""
# return if rec no longer exists (deleted while transfer is pending)
if not self.redis.exists(self.info_key):
return
pending_count = self.PENDING_COUNT_KEY.format(rec=self.my_id)
self.redis.incrby(pending_count, -1)
pending_size = self.PENDING_SIZE_KEY.format(rec=self.my_id)
self.redis.incrby(pending_size, -size)
def serialize(self,
include_pages=False,
convert_date=True,
export_filter=False,
include_files=False):
"""Serialize Redis entries.
:param bool include_pages: whether to include pages
:param bool convert_date: whether to convert date
:param bool include_files: whether to include
WARC and CDX index file filenames
"""
data = super(Recording, self).serialize(include_duration=True,
convert_date=convert_date)
if include_pages:
data['pages'] = self.get_owner().list_rec_pages(self)
# add any remote archive sources
ra_key = self.RA_KEY.format(rec=self.my_id)
data['ra_sources'] = list(self.redis.smembers(ra_key))
if include_files:
files = {}
files['warcs'] = [n for n, v in self.iter_all_files(include_index=False)]
index_file = self.get_prop(self.INDEX_FILE_KEY)
if index_file:
files['indexes'] = [os.path.basename(index_file)]
data['files'] = files
data.pop(self.INDEX_FILE_KEY, '')
return data
def delete_me(self, storage, pages=True):
"""Delete recording.
:param BaseStorage storage: Webrecorder storage
:param bool pages: whether to delete pages
:returns: result
:rtype: dict
"""
res = self.delete_files(storage)
Stats(self.redis).incr_delete(self)
# if deleting collection, no need to remove pages for each recording
# they'll be deleted with the collection
if pages:
self.get_owner().delete_rec_pages(self)
if not self.delete_object():
res['error'] = 'not_found'
return res
def _coll_warc_key(self):
"""Return WARC Redis key (collection).
:returns: Redis key
:rtype: str
"""
return self.COLL_WARC_KEY.format(coll=self.get_prop('owner'))
def iter_all_files(self, include_index=False):
"""Return filenames (generator).
:param bool include_index: whether to include index files
:returns: Redis key and filename
:rtype: str and str
"""
warc_key = self.REC_WARC_KEY.format(rec=self.my_id)
rec_warc_keys = self.redis.smembers(warc_key)
if rec_warc_keys:
all_files = self.redis.hmget(self._coll_warc_key(), rec_warc_keys)
for n, v in zip(rec_warc_keys, all_files):
yield n, v
if include_index:
index_file = self.get_prop(self.INDEX_FILE_KEY)
if index_file:
yield self.INDEX_FILE_KEY, index_file
def delete_files(self, storage):
"""Delete files (WARC and CDX index files).
:param BaseStorage storage: Webrecorder storage
:returns: result
:rtype: dict
"""
errs = []
coll_warc_key = self._coll_warc_key()
local_storage = LocalFileStorage(self.redis)
for n, v in self.iter_all_files(include_index=True):
success = storage.delete_file(v)
# if delete with default storage failed,
# may be a local, uncomitted file, that must be deleted with local storage
if not success:
success = local_storage.delete_file(v)
if not success:
errs.append(v)
# queue file to retry deletion later
self.redis.rpush(self.DELETE_RETRY, v)
else:
self.redis.hdel(coll_warc_key, n)
if errs:
return {'error_delete_files': errs}
else:
return {}
def track_remote_archive(self, pi, source_id):
"""Add remote archive.
:param StrictRedis pi: Redis interface (pipeline)
:param str source_id: remote archive ID
"""
ra_key = self.RA_KEY.format(rec=self.my_id)
pi.sadd(ra_key, source_id)
def set_patch_recording(self, patch_recording, update_ts=True):
"""Set recording patch.
:param RedisUniqueComponent patch_recording: recording building block
:param bool update_ts: whether to update timestamp
"""
if patch_recording:
self.set_prop('patch_rec', patch_recording.my_id, update_ts=update_ts)
def get_patch_recording(self):
"""Get recording patch.
:returns: recording patch
:rtype: RedisUniqueComponent
"""
patch_rec = self.get_prop('patch_rec')
if patch_rec:
return self.get_owner().get_recording(patch_rec)
def write_cdxj(self, user, cdxj_key):
"""Write CDX index lines to file.
:param RedisUniqueComponent user: user
:param str cdxj_key: CDX index file Redis key
:returns: CDX file filename and path
:rtype: str and str
"""
#full_filename = self.redis.hget(warc_key, self.INDEX_FILE_KEY)
full_filename = self.get_prop(self.INDEX_FILE_KEY)
if full_filename:
cdxj_filename = os.path.basename(strip_prefix(full_filename))
return cdxj_filename, full_filename
dirname = user.get_user_temp_warc_path()
randstr = base64.b32encode(os.urandom(5)).decode('utf-8')
timestamp = timestamp_now()
cdxj_filename = self.INDEX_NAME_TEMPL.format(timestamp=timestamp,
random=randstr)
os.makedirs(dirname, exist_ok=True)
full_filename = os.path.join(dirname, cdxj_filename)
cdxj_list = self.redis.zrange(cdxj_key, 0, -1)
with open(full_filename, 'wt') as out:
for cdxj in cdxj_list:
out.write(cdxj + '\n')
out.flush()
full_url = add_local_store_prefix(full_filename.replace(os.path.sep, '/'))
#self.redis.hset(warc_key, self.INDEX_FILE_KEY, full_url)
self.set_prop(self.INDEX_FILE_KEY, full_url)
return cdxj_filename, full_filename
def commit_to_storage(self, storage=None):
"""Commit WARCs and CDX files to storage.
:param storage: Webrecorder storage
:type: BaseStorage or None
"""
commit_lock = self.COMMIT_LOCK_KEY.format(rec=self.my_id)
if not self.redis.setnx(commit_lock, '1'):
return
collection = self.get_owner()
user = collection.get_owner()
if not storage and not user.is_anon():
storage = collection.get_storage()
info_key = self.INFO_KEY.format(rec=self.my_id)
cdxj_key = self.CDXJ_KEY.format(rec=self.my_id)
warc_key = self.COLL_WARC_KEY.format(coll=collection.my_id)
self.redis.publish('close_rec', info_key)
cdxj_filename, full_cdxj_filename = self.write_cdxj(user, cdxj_key)
all_done = True
if storage:
all_done = collection.commit_file(cdxj_filename, full_cdxj_filename, 'indexes',
info_key, self.INDEX_FILE_KEY, direct_delete=True)
for warc_filename, warc_full_filename in self.iter_all_files():
done = collection.commit_file(warc_filename, warc_full_filename, 'warcs', warc_key)
all_done = all_done and done
if all_done:
print('Deleting Redis Key: ' + cdxj_key)
self.redis.delete(cdxj_key)
self.redis.delete(commit_lock)
def _copy_prop(self, source, name):
"""Copy attribute value from given building block.
:param RedisUniqueComponent source: Redis building block
:param str name: attribute name
"""
prop = source.get_prop(name)
if prop:
self.set_prop(name, prop)
def copy_data_from_recording(self, source, delete_source=False):
"""Copy given recording building block entries.
:param RedisUniqueComponent source: building block
:param bool delete_source: whether to delete source building block
:returns: whether successful or not
:rtype: bool
"""
if self == source:
return False
if not self.is_open():
return False
errored = False
self._copy_prop(source, 'title')
self._copy_prop(source, 'desc')
self._copy_prop(source, 'rec_type')
self._copy_prop(source, 'recorded_at')
#self._copy_prop(source, 'patch_rec')
collection = self.get_owner()
user = collection.get_owner()
target_dirname = user.get_user_temp_warc_path()
target_warc_key = self.COLL_WARC_KEY.format(coll=collection.my_id)
# Copy WARCs
loader = BlockLoader()
for n, url in source.iter_all_files(include_index=True):
local_filename = n + '.' + timestamp20_now()
target_file = os.path.join(target_dirname, local_filename)
src = loader.load(url)
try:
with open(target_file, 'wb') as dest:
print('Copying {0} -> {1}'.format(url, target_file))
shutil.copyfileobj(src, dest)
size = dest.tell()
if n != self.INDEX_FILE_KEY:
self.incr_size(size)
self.redis.hset(target_warc_key, n, add_local_store_prefix(target_file))
else:
self.set_prop(n, target_file)
except:
import traceback
traceback.print_exc()
errored = True
# COPY cdxj, if exists
source_key = self.CDXJ_KEY.format(rec=source.my_id)
target_key = self.CDXJ_KEY.format(rec=self.my_id)
self.redis.zunionstore(target_key, [source_key])
# recreate pages, if any, in new recording
source_coll = source.get_owner()
source_pages = source_coll.list_rec_pages(source)
collection.import_pages(source_pages, self)
# COPY remote archives, if any
self.redis.sunionstore(self.RA_KEY.format(rec=self.my_id),
self.RA_KEY.format(rec=source.my_id))
# COPY recording warc keys
self.redis.sunionstore(self.REC_WARC_KEY.format(rec=self.my_id),
self.REC_WARC_KEY.format(rec=source.my_id))
# sync collection cdxj, if exists
collection.sync_coll_index(exists=True, do_async=True)
if not errored and delete_source:
collection = source.get_owner()
collection.remove_recording(source, delete=True)
return not errored
| 31.856089
| 99
| 0.618788
|
import json
import hashlib
import os
import base64
import shutil
from six.moves.urllib.parse import urlsplit
from pywb.utils.canonicalize import calc_search_range
from pywb.warcserver.index.cdxobject import CDXObject
from pywb.utils.loaders import BlockLoader
from webrecorder.utils import redis_pipeline
from webrecorder.models.base import RedisUniqueComponent, RedisUnorderedList
from webrecorder.models.stats import Stats
from webrecorder.rec.storage.storagepaths import strip_prefix, add_local_store_prefix
from webrecorder.rec.storage import LocalFileStorage
from warcio.timeutils import timestamp_now, sec_to_timestamp, timestamp20_now
class Recording(RedisUniqueComponent):
MY_TYPE = 'rec'
INFO_KEY = 'r:{rec}:info'
ALL_KEYS = 'r:{rec}:*'
OPEN_REC_KEY = 'r:{rec}:open'
CDXJ_KEY = 'r:{rec}:cdxj'
RA_KEY = 'r:{rec}:ra'
PENDING_SIZE_KEY = 'r:{rec}:_ps'
PENDING_COUNT_KEY = 'r:{rec}:_pc'
PENDING_TTL = 90
REC_WARC_KEY = 'r:{rec}:wk'
COLL_WARC_KEY = 'c:{coll}:warc'
COMMIT_LOCK_KEY = 'r:{rec}:lock'
INDEX_FILE_KEY = '@index_file'
INDEX_NAME_TEMPL = 'index-{timestamp}-{random}.cdxj'
DELETE_RETRY = 'q:delete_retry'
OPEN_REC_TTL = 5400
@classmethod
def init_props(cls, config):
cls.OPEN_REC_TTL = int(config['open_rec_ttl'])
cls.CDXJ_KEY = config.get('cdxj_key_templ', cls.CDXJ_KEY)
@property
def name(self):
return self.my_id
def init_new(self, title='', desc='', rec_type=None, ra_list=None):
rec = self._create_new_id()
open_rec_key = self.OPEN_REC_KEY.format(rec=rec)
self.data = {
'title': title,
'desc': desc,
'size': 0,
}
if rec_type:
self.data['rec_type'] = rec_type
with redis_pipeline(self.redis) as pi:
self._init_new(pi)
if ra_list:
ra_key = self.RA_KEY.format(rec=self.my_id)
pi.sadd(ra_key, *ra_list)
pi.setex(open_rec_key, self.OPEN_REC_TTL, 1)
return rec
def is_open(self, extend=True):
open_rec_key = self.OPEN_REC_KEY.format(rec=self.my_id)
if extend:
return self.redis.expire(open_rec_key, self.OPEN_REC_TTL)
else:
return self.redis.exists(open_rec_key)
def set_closed(self):
open_rec_key = self.OPEN_REC_KEY.format(rec=self.my_id)
self.redis.delete(open_rec_key)
def is_fully_committed(self):
if self.get_pending_count() > 0:
return False
cdxj_key = self.CDXJ_KEY.format(rec=self.my_id)
return self.redis.exists(cdxj_key) == False
def get_pending_count(self):
pending_count = self.PENDING_COUNT_KEY.format(rec=self.my_id)
return int(self.redis.get(pending_count) or 0)
def get_pending_size(self):
pending_size = self.PENDING_SIZE_KEY.format(rec=self.my_id)
return int(self.redis.get(pending_size) or 0)
def inc_pending_count(self):
if not self.is_open(extend=False):
return
pending_count = self.PENDING_COUNT_KEY.format(rec=self.my_id)
self.redis.incrby(pending_count, 1)
self.redis.expire(pending_count, self.PENDING_TTL)
def inc_pending_size(self, size):
if not self.is_open(extend=False):
return
pending_size = self.PENDING_SIZE_KEY.format(rec=self.my_id)
self.redis.incrby(pending_size, size)
self.redis.expire(pending_size, self.PENDING_TTL)
def dec_pending_count_and_size(self, size):
if not self.redis.exists(self.info_key):
return
pending_count = self.PENDING_COUNT_KEY.format(rec=self.my_id)
self.redis.incrby(pending_count, -1)
pending_size = self.PENDING_SIZE_KEY.format(rec=self.my_id)
self.redis.incrby(pending_size, -size)
def serialize(self,
include_pages=False,
convert_date=True,
export_filter=False,
include_files=False):
data = super(Recording, self).serialize(include_duration=True,
convert_date=convert_date)
if include_pages:
data['pages'] = self.get_owner().list_rec_pages(self)
ra_key = self.RA_KEY.format(rec=self.my_id)
data['ra_sources'] = list(self.redis.smembers(ra_key))
if include_files:
files = {}
files['warcs'] = [n for n, v in self.iter_all_files(include_index=False)]
index_file = self.get_prop(self.INDEX_FILE_KEY)
if index_file:
files['indexes'] = [os.path.basename(index_file)]
data['files'] = files
data.pop(self.INDEX_FILE_KEY, '')
return data
def delete_me(self, storage, pages=True):
res = self.delete_files(storage)
Stats(self.redis).incr_delete(self)
if pages:
self.get_owner().delete_rec_pages(self)
if not self.delete_object():
res['error'] = 'not_found'
return res
def _coll_warc_key(self):
return self.COLL_WARC_KEY.format(coll=self.get_prop('owner'))
def iter_all_files(self, include_index=False):
warc_key = self.REC_WARC_KEY.format(rec=self.my_id)
rec_warc_keys = self.redis.smembers(warc_key)
if rec_warc_keys:
all_files = self.redis.hmget(self._coll_warc_key(), rec_warc_keys)
for n, v in zip(rec_warc_keys, all_files):
yield n, v
if include_index:
index_file = self.get_prop(self.INDEX_FILE_KEY)
if index_file:
yield self.INDEX_FILE_KEY, index_file
def delete_files(self, storage):
errs = []
coll_warc_key = self._coll_warc_key()
local_storage = LocalFileStorage(self.redis)
for n, v in self.iter_all_files(include_index=True):
success = storage.delete_file(v)
# if delete with default storage failed,
# may be a local, uncomitted file, that must be deleted with local storage
if not success:
success = local_storage.delete_file(v)
if not success:
errs.append(v)
# queue file to retry deletion later
self.redis.rpush(self.DELETE_RETRY, v)
else:
self.redis.hdel(coll_warc_key, n)
if errs:
return {'error_delete_files': errs}
else:
return {}
def track_remote_archive(self, pi, source_id):
ra_key = self.RA_KEY.format(rec=self.my_id)
pi.sadd(ra_key, source_id)
def set_patch_recording(self, patch_recording, update_ts=True):
if patch_recording:
self.set_prop('patch_rec', patch_recording.my_id, update_ts=update_ts)
def get_patch_recording(self):
patch_rec = self.get_prop('patch_rec')
if patch_rec:
return self.get_owner().get_recording(patch_rec)
def write_cdxj(self, user, cdxj_key):
#full_filename = self.redis.hget(warc_key, self.INDEX_FILE_KEY)
full_filename = self.get_prop(self.INDEX_FILE_KEY)
if full_filename:
cdxj_filename = os.path.basename(strip_prefix(full_filename))
return cdxj_filename, full_filename
dirname = user.get_user_temp_warc_path()
randstr = base64.b32encode(os.urandom(5)).decode('utf-8')
timestamp = timestamp_now()
cdxj_filename = self.INDEX_NAME_TEMPL.format(timestamp=timestamp,
random=randstr)
os.makedirs(dirname, exist_ok=True)
full_filename = os.path.join(dirname, cdxj_filename)
cdxj_list = self.redis.zrange(cdxj_key, 0, -1)
with open(full_filename, 'wt') as out:
for cdxj in cdxj_list:
out.write(cdxj + '\n')
out.flush()
full_url = add_local_store_prefix(full_filename.replace(os.path.sep, '/'))
#self.redis.hset(warc_key, self.INDEX_FILE_KEY, full_url)
self.set_prop(self.INDEX_FILE_KEY, full_url)
return cdxj_filename, full_filename
def commit_to_storage(self, storage=None):
commit_lock = self.COMMIT_LOCK_KEY.format(rec=self.my_id)
if not self.redis.setnx(commit_lock, '1'):
return
collection = self.get_owner()
user = collection.get_owner()
if not storage and not user.is_anon():
storage = collection.get_storage()
info_key = self.INFO_KEY.format(rec=self.my_id)
cdxj_key = self.CDXJ_KEY.format(rec=self.my_id)
warc_key = self.COLL_WARC_KEY.format(coll=collection.my_id)
self.redis.publish('close_rec', info_key)
cdxj_filename, full_cdxj_filename = self.write_cdxj(user, cdxj_key)
all_done = True
if storage:
all_done = collection.commit_file(cdxj_filename, full_cdxj_filename, 'indexes',
info_key, self.INDEX_FILE_KEY, direct_delete=True)
for warc_filename, warc_full_filename in self.iter_all_files():
done = collection.commit_file(warc_filename, warc_full_filename, 'warcs', warc_key)
all_done = all_done and done
if all_done:
print('Deleting Redis Key: ' + cdxj_key)
self.redis.delete(cdxj_key)
self.redis.delete(commit_lock)
def _copy_prop(self, source, name):
prop = source.get_prop(name)
if prop:
self.set_prop(name, prop)
def copy_data_from_recording(self, source, delete_source=False):
if self == source:
return False
if not self.is_open():
return False
errored = False
self._copy_prop(source, 'title')
self._copy_prop(source, 'desc')
self._copy_prop(source, 'rec_type')
self._copy_prop(source, 'recorded_at')
#self._copy_prop(source, 'patch_rec')
collection = self.get_owner()
user = collection.get_owner()
target_dirname = user.get_user_temp_warc_path()
target_warc_key = self.COLL_WARC_KEY.format(coll=collection.my_id)
# Copy WARCs
loader = BlockLoader()
for n, url in source.iter_all_files(include_index=True):
local_filename = n + '.' + timestamp20_now()
target_file = os.path.join(target_dirname, local_filename)
src = loader.load(url)
try:
with open(target_file, 'wb') as dest:
print('Copying {0} -> {1}'.format(url, target_file))
shutil.copyfileobj(src, dest)
size = dest.tell()
if n != self.INDEX_FILE_KEY:
self.incr_size(size)
self.redis.hset(target_warc_key, n, add_local_store_prefix(target_file))
else:
self.set_prop(n, target_file)
except:
import traceback
traceback.print_exc()
errored = True
# COPY cdxj, if exists
source_key = self.CDXJ_KEY.format(rec=source.my_id)
target_key = self.CDXJ_KEY.format(rec=self.my_id)
self.redis.zunionstore(target_key, [source_key])
# recreate pages, if any, in new recording
source_coll = source.get_owner()
source_pages = source_coll.list_rec_pages(source)
collection.import_pages(source_pages, self)
# COPY remote archives, if any
self.redis.sunionstore(self.RA_KEY.format(rec=self.my_id),
self.RA_KEY.format(rec=source.my_id))
# COPY recording warc keys
self.redis.sunionstore(self.REC_WARC_KEY.format(rec=self.my_id),
self.REC_WARC_KEY.format(rec=source.my_id))
# sync collection cdxj, if exists
collection.sync_coll_index(exists=True, do_async=True)
if not errored and delete_source:
collection = source.get_owner()
collection.remove_recording(source, delete=True)
return not errored
| true
| true
|
1c488389c60c45a3e89c0d110cc34e9ff1cf71ed
| 1,792
|
py
|
Python
|
app/auth/forms.py
|
benzhang13/microblog
|
c5bfea71c9832e67bfb585ac90268b2cd651f5c8
|
[
"MIT"
] | null | null | null |
app/auth/forms.py
|
benzhang13/microblog
|
c5bfea71c9832e67bfb585ac90268b2cd651f5c8
|
[
"MIT"
] | 1
|
2021-06-02T00:00:55.000Z
|
2021-06-02T00:00:55.000Z
|
app/auth/forms.py
|
benzhang13/microblog
|
c5bfea71c9832e67bfb585ac90268b2cd651f5c8
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired, EqualTo, ValidationError, Email
from flask_babel import _, lazy_gettext as _l
from app.models import User
class LoginForm(FlaskForm):
username = StringField(_l('Username'), validators=[DataRequired()])
password = PasswordField(_l('Password'), validators=[DataRequired()])
remember_me = BooleanField(_l('Remember Me'))
submit = SubmitField(_l('Sign In'))
class RegistrationForm(FlaskForm):
username = StringField(_l('Username'), validators=[DataRequired()])
email = StringField(_l('E-Mail'), validators=[DataRequired(), Email()])
password = PasswordField(_l('Password'), validators=[DataRequired()])
confirm_password = PasswordField(_l('Confirm Password'), validators=[DataRequired(), EqualTo('password')])
submit = SubmitField(_l('Register'))
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError(_('That username is already in use.'))
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError(_('That email address is already in use.'))
class ResetPasswordRequestForm(FlaskForm):
email = StringField(_l('Email'), validators=[DataRequired(), Email()])
submit = SubmitField(_l('Request Password Reset'))
class ResetPasswordForm(FlaskForm):
password = PasswordField(_l('Password'), validators=[DataRequired()])
password2 = PasswordField(_l('Confirm Password'), validators=[DataRequired(), EqualTo('password')])
submit = SubmitField(_l('Reset Password'))
| 43.707317
| 110
| 0.72154
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField
from wtforms.validators import DataRequired, EqualTo, ValidationError, Email
from flask_babel import _, lazy_gettext as _l
from app.models import User
class LoginForm(FlaskForm):
username = StringField(_l('Username'), validators=[DataRequired()])
password = PasswordField(_l('Password'), validators=[DataRequired()])
remember_me = BooleanField(_l('Remember Me'))
submit = SubmitField(_l('Sign In'))
class RegistrationForm(FlaskForm):
username = StringField(_l('Username'), validators=[DataRequired()])
email = StringField(_l('E-Mail'), validators=[DataRequired(), Email()])
password = PasswordField(_l('Password'), validators=[DataRequired()])
confirm_password = PasswordField(_l('Confirm Password'), validators=[DataRequired(), EqualTo('password')])
submit = SubmitField(_l('Register'))
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError(_('That username is already in use.'))
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError(_('That email address is already in use.'))
class ResetPasswordRequestForm(FlaskForm):
email = StringField(_l('Email'), validators=[DataRequired(), Email()])
submit = SubmitField(_l('Request Password Reset'))
class ResetPasswordForm(FlaskForm):
password = PasswordField(_l('Password'), validators=[DataRequired()])
password2 = PasswordField(_l('Confirm Password'), validators=[DataRequired(), EqualTo('password')])
submit = SubmitField(_l('Reset Password'))
| true
| true
|
1c4883c265a51d61f247b212020ad34acbb8f4fd
| 3,740
|
py
|
Python
|
src/neon_paths.py
|
Ritesh313/DeepTreeAttention
|
b1095831acdb98b84c7ea93ab529e519145c89ad
|
[
"MIT"
] | null | null | null |
src/neon_paths.py
|
Ritesh313/DeepTreeAttention
|
b1095831acdb98b84c7ea93ab529e519145c89ad
|
[
"MIT"
] | null | null | null |
src/neon_paths.py
|
Ritesh313/DeepTreeAttention
|
b1095831acdb98b84c7ea93ab529e519145c89ad
|
[
"MIT"
] | null | null | null |
#Utility functions for searching for NEON schema data given a bound or filename. Optionally generating .tif files from .h5 hyperspec files.
import os
import math
import re
import h5py
from src import Hyperspectral
def bounds_to_geoindex(bounds):
"""Convert an extent into NEONs naming schema
Args:
bounds: list of top, left, bottom, right bounds, usually from geopandas.total_bounds
Return:
geoindex: str {easting}_{northing}
"""
easting = min(bounds[0], bounds[2])
northing = min(bounds[1], bounds[3])
easting = math.floor(easting / 1000) * 1000
northing = math.floor(northing / 1000) * 1000
geoindex = "{}_{}".format(easting, northing)
return geoindex
def find_sensor_path(lookup_pool, shapefile=None, bounds=None):
"""Find a hyperspec path based on the shapefile using NEONs schema
Args:
bounds: Optional: list of top, left, bottom, right bounds, usually from geopandas.total_bounds. Instead of providing a shapefile
lookup_pool: glob string to search for matching files for geoindex
Returns:
year_match: full path to sensor tile
"""
if shapefile is None:
geo_index = bounds_to_geoindex(bounds=bounds)
match = [x for x in lookup_pool if geo_index in x]
match.sort()
try:
year_match = match[-1]
except Exception as e:
raise ValueError("No matches for geoindex {} in sensor pool with bounds {}".format(geo_index, bounds))
else:
#Get file metadata from name string
basename = os.path.splitext(os.path.basename(shapefile))[0]
geo_index = re.search("(\d+_\d+)_image", basename).group(1)
match = [x for x in lookup_pool if geo_index in x]
match.sort()
try:
year_match = match[-1]
except Exception as e:
raise ValueError("No matches for geoindex {} in sensor pool".format(geo_index))
return year_match
def convert_h5(hyperspectral_h5_path, rgb_path, savedir):
tif_basename = os.path.splitext(os.path.basename(rgb_path))[0] + "_hyperspectral.tif"
tif_path = "{}/{}".format(savedir, tif_basename)
Hyperspectral.generate_raster(h5_path=hyperspectral_h5_path,
rgb_filename=rgb_path,
bands="All",
save_dir=savedir)
return tif_path
def lookup_and_convert(rgb_pool, hyperspectral_pool, savedir, bounds = None, shapefile=None):
hyperspectral_h5_path = find_sensor_path(shapefile=shapefile,lookup_pool=hyperspectral_pool, bounds=bounds)
rgb_path = find_sensor_path(shapefile=shapefile, lookup_pool=rgb_pool, bounds=bounds)
#convert .h5 hyperspec tile if needed
tif_basename = os.path.splitext(os.path.basename(rgb_path))[0] + "_hyperspectral.tif"
tif_path = "{}/{}".format(savedir, tif_basename)
if not os.path.exists(tif_path):
tif_path = convert_h5(hyperspectral_h5_path, rgb_path, savedir)
return tif_path
def site_from_path(path):
basename = os.path.splitext(os.path.basename(path))[0]
site_name = re.search("NEON_D\d+_(\w+)_D", basename).group(1)
return site_name
def domain_from_path(path):
basename = os.path.splitext(os.path.basename(path))[0]
domain_name = re.search("NEON_(D\d+)_\w+_D", basename).group(1)
return domain_name
def elevation_from_tile(path):
try:
h5 = h5py.File(path, 'r')
elevation = h5[list(h5.keys())[0]]["Reflectance"]["Metadata"]["Ancillary_Imagery"]["Smooth_Surface_Elevation"].value.mean()
h5.close()
except Exception as e:
raise IOError("{} failed to read elevation from tile:".format(path, e))
return elevation
| 36.666667
| 139
| 0.669786
|
import os
import math
import re
import h5py
from src import Hyperspectral
def bounds_to_geoindex(bounds):
easting = min(bounds[0], bounds[2])
northing = min(bounds[1], bounds[3])
easting = math.floor(easting / 1000) * 1000
northing = math.floor(northing / 1000) * 1000
geoindex = "{}_{}".format(easting, northing)
return geoindex
def find_sensor_path(lookup_pool, shapefile=None, bounds=None):
if shapefile is None:
geo_index = bounds_to_geoindex(bounds=bounds)
match = [x for x in lookup_pool if geo_index in x]
match.sort()
try:
year_match = match[-1]
except Exception as e:
raise ValueError("No matches for geoindex {} in sensor pool with bounds {}".format(geo_index, bounds))
else:
basename = os.path.splitext(os.path.basename(shapefile))[0]
geo_index = re.search("(\d+_\d+)_image", basename).group(1)
match = [x for x in lookup_pool if geo_index in x]
match.sort()
try:
year_match = match[-1]
except Exception as e:
raise ValueError("No matches for geoindex {} in sensor pool".format(geo_index))
return year_match
def convert_h5(hyperspectral_h5_path, rgb_path, savedir):
tif_basename = os.path.splitext(os.path.basename(rgb_path))[0] + "_hyperspectral.tif"
tif_path = "{}/{}".format(savedir, tif_basename)
Hyperspectral.generate_raster(h5_path=hyperspectral_h5_path,
rgb_filename=rgb_path,
bands="All",
save_dir=savedir)
return tif_path
def lookup_and_convert(rgb_pool, hyperspectral_pool, savedir, bounds = None, shapefile=None):
hyperspectral_h5_path = find_sensor_path(shapefile=shapefile,lookup_pool=hyperspectral_pool, bounds=bounds)
rgb_path = find_sensor_path(shapefile=shapefile, lookup_pool=rgb_pool, bounds=bounds)
tif_basename = os.path.splitext(os.path.basename(rgb_path))[0] + "_hyperspectral.tif"
tif_path = "{}/{}".format(savedir, tif_basename)
if not os.path.exists(tif_path):
tif_path = convert_h5(hyperspectral_h5_path, rgb_path, savedir)
return tif_path
def site_from_path(path):
basename = os.path.splitext(os.path.basename(path))[0]
site_name = re.search("NEON_D\d+_(\w+)_D", basename).group(1)
return site_name
def domain_from_path(path):
basename = os.path.splitext(os.path.basename(path))[0]
domain_name = re.search("NEON_(D\d+)_\w+_D", basename).group(1)
return domain_name
def elevation_from_tile(path):
try:
h5 = h5py.File(path, 'r')
elevation = h5[list(h5.keys())[0]]["Reflectance"]["Metadata"]["Ancillary_Imagery"]["Smooth_Surface_Elevation"].value.mean()
h5.close()
except Exception as e:
raise IOError("{} failed to read elevation from tile:".format(path, e))
return elevation
| true
| true
|
1c4884286d5cca9042d1e66e455a44b672e6671c
| 2,340
|
py
|
Python
|
tests/test_user_settings.py
|
fearsd/django-logging-middleware
|
6eb95774c1bcb1829aa1a94223d9e2c39217d8f9
|
[
"MIT"
] | 4
|
2021-04-08T14:14:04.000Z
|
2021-09-08T07:57:38.000Z
|
tests/test_user_settings.py
|
fearsd/django-logging-middleware
|
6eb95774c1bcb1829aa1a94223d9e2c39217d8f9
|
[
"MIT"
] | null | null | null |
tests/test_user_settings.py
|
fearsd/django-logging-middleware
|
6eb95774c1bcb1829aa1a94223d9e2c39217d8f9
|
[
"MIT"
] | null | null | null |
import pytest
from django.conf import settings as _settings
from django.test import override_settings
from logging_middleware.settings import DEFAULTS, Settings
@override_settings(DJANGO_LOGGING_MIDDLEWARE={})
def test_check_settings_if_user_didnt_set_settings():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': True, 'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_set_default_format_true_and_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': False, 'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_set_default_format_false_and_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == False
assert settings.MESSAGE_FORMAT == '{message}'
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': False})
def test_check_settings_if_user_set_default_format_false_but_didnt_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == False
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': True})
def test_check_settings_if_user_set_default_format_true_and_didnt_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_didnt_set_default_format_but_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': ''})
def test_check_settings_if_user_set_wrong_data():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
| 48.75
| 102
| 0.814103
|
import pytest
from django.conf import settings as _settings
from django.test import override_settings
from logging_middleware.settings import DEFAULTS, Settings
@override_settings(DJANGO_LOGGING_MIDDLEWARE={})
def test_check_settings_if_user_didnt_set_settings():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': True, 'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_set_default_format_true_and_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': False, 'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_set_default_format_false_and_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == False
assert settings.MESSAGE_FORMAT == '{message}'
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': False})
def test_check_settings_if_user_set_default_format_false_but_didnt_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == False
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': True})
def test_check_settings_if_user_set_default_format_true_and_didnt_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'MESSAGE_FORMAT': '{message}'})
def test_check_settings_if_user_didnt_set_default_format_but_set_message_format():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
@override_settings(DJANGO_LOGGING_MIDDLEWARE={'DEFAULT_FORMAT': ''})
def test_check_settings_if_user_set_wrong_data():
settings = Settings(_settings)
assert settings.DEFAULT_FORMAT == DEFAULTS['DEFAULT_FORMAT']
assert settings.MESSAGE_FORMAT == DEFAULTS['MESSAGE_FORMAT']
| true
| true
|
1c488578f4656560111adedec4ff3808053e0d81
| 6,143
|
py
|
Python
|
nemo/collections/nlp/data/datasets/joint_intent_slot_dataset/joint_intent_slot_descriptor.py
|
Dannynis/NeMo
|
0d703d2c48158ec271d84cca76c3f423195327b2
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/data/datasets/joint_intent_slot_dataset/joint_intent_slot_descriptor.py
|
Dannynis/NeMo
|
0d703d2c48158ec271d84cca76c3f423195327b2
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/data/datasets/joint_intent_slot_dataset/joint_intent_slot_descriptor.py
|
Dannynis/NeMo
|
0d703d2c48158ec271d84cca76c3f423195327b2
|
[
"Apache-2.0"
] | null | null | null |
# =============================================================================
# Copyright 2020 NVIDIA. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import itertools
from nemo import logging
from nemo.collections.nlp.data.datasets.datasets_utils import calc_class_weights, get_label_stats, if_exist
__all__ = ['JointIntentSlotDataDesc']
class JointIntentSlotDataDesc:
""" Convert the raw data to the standard format supported by
JointIntentSlotDataset.
By default, the None label for slots is 'O'.
JointIntentSlotDataset requires two files:
input_file: file to sequence + label.
the first line is header (sentence [tab] label)
each line should be [sentence][tab][label]
slot_file: file to slot labels, each line corresponding to
slot labels for a sentence in input_file. No header.
To keep the mapping from label index to label consistent during
training and inferencing, we require the following files:
dicts.intents.csv: each line is an intent. The first line
corresponding to the 0 intent label, the second line
corresponding to the 1 intent label, and so on.
dicts.slots.csv: each line is a slot. The first line
corresponding to the 0 slot label, the second line
corresponding to the 1 slot label, and so on.
Args:
data_dir (str): the directory of the dataset
none_slot_label (str): the label for slots that aren't identified
defaulted to 'O'
pad_label (int): the int used for padding. If set to -1,
it'll be set to the whatever the None label is.
"""
def __init__(self, data_dir, none_slot_label='O', pad_label=-1):
if not if_exist(data_dir, ['dict.intents.csv', 'dict.slots.csv']):
raise FileNotFoundError(
"Make sure that your data follows the standard format "
"supported by JointIntentSlotDataset. Your data must "
"contain dict.intents.csv and dict.slots.csv."
)
self.data_dir = data_dir
self.intent_dict_file = self.data_dir + '/dict.intents.csv'
self.slot_dict_file = self.data_dir + '/dict.slots.csv'
self.intents_label_ids = JointIntentSlotDataDesc.label2idx(self.intent_dict_file)
self.num_intents = len(self.intents_label_ids)
self.slots_label_ids = JointIntentSlotDataDesc.label2idx(self.slot_dict_file)
self.num_slots = len(self.slots_label_ids)
for mode in ['train', 'test', 'dev']:
if not if_exist(self.data_dir, [f'{mode}.tsv']):
logging.info(f' Stats calculation for {mode} mode' f' is skipped as {mode}.tsv was not found.')
continue
logging.info(f' Stats calculating for {mode} mode...')
slot_file = f'{self.data_dir}/{mode}_slots.tsv'
with open(slot_file, 'r') as f:
slot_lines = f.readlines()
input_file = f'{self.data_dir}/{mode}.tsv'
with open(input_file, 'r') as f:
input_lines = f.readlines()[1:] # Skipping headers at index 0
if len(slot_lines) != len(input_lines):
raise ValueError(
"Make sure that the number of slot lines match the "
"number of intent lines. There should be a 1-1 "
"correspondence between every slot and intent lines."
)
dataset = list(zip(slot_lines, input_lines))
raw_slots, queries, raw_intents = [], [], []
for slot_line, input_line in dataset:
slot_list = [int(slot) for slot in slot_line.strip().split()]
raw_slots.append(slot_list)
parts = input_line.strip().split()
raw_intents.append(int(parts[-1]))
queries.append(' '.join(parts[:-1]))
infold = self.data_dir
logging.info(f'Three most popular intents in {mode} mode:')
total_intents, intent_label_freq = get_label_stats(raw_intents, infold + f'/{mode}_intent_stats.tsv')
merged_slots = itertools.chain.from_iterable(raw_slots)
logging.info(f'Three most popular slots in {mode} mode:')
slots_total, slots_label_freq = get_label_stats(merged_slots, infold + f'/{mode}_slot_stats.tsv')
if mode == 'train':
self.slot_weights = calc_class_weights(slots_label_freq)
logging.info(f'Slot weights are - {self.slot_weights}')
self.intent_weights = calc_class_weights(intent_label_freq)
logging.info(f'Intent weights are - {self.intent_weights}')
logging.info(f'Total intents - {total_intents}')
logging.info(f'Intent label frequency - {intent_label_freq}')
logging.info(f'Total Slots - {slots_total}')
logging.info(f'Slots label frequency - {slots_label_freq}')
if pad_label != -1:
self.pad_label = pad_label
else:
if none_slot_label not in self.slots_label_ids:
raise ValueError(f'none_slot_label {none_slot_label} not ' f'found in {self.slot_dict_file}.')
self.pad_label = self.slots_label_ids[none_slot_label]
@staticmethod
def label2idx(file):
lines = open(file, 'r').readlines()
lines = [line.strip() for line in lines if line.strip()]
labels = {lines[i]: i for i in range(len(lines))}
return labels
| 44.194245
| 113
| 0.621195
|
import itertools
from nemo import logging
from nemo.collections.nlp.data.datasets.datasets_utils import calc_class_weights, get_label_stats, if_exist
__all__ = ['JointIntentSlotDataDesc']
class JointIntentSlotDataDesc:
def __init__(self, data_dir, none_slot_label='O', pad_label=-1):
if not if_exist(data_dir, ['dict.intents.csv', 'dict.slots.csv']):
raise FileNotFoundError(
"Make sure that your data follows the standard format "
"supported by JointIntentSlotDataset. Your data must "
"contain dict.intents.csv and dict.slots.csv."
)
self.data_dir = data_dir
self.intent_dict_file = self.data_dir + '/dict.intents.csv'
self.slot_dict_file = self.data_dir + '/dict.slots.csv'
self.intents_label_ids = JointIntentSlotDataDesc.label2idx(self.intent_dict_file)
self.num_intents = len(self.intents_label_ids)
self.slots_label_ids = JointIntentSlotDataDesc.label2idx(self.slot_dict_file)
self.num_slots = len(self.slots_label_ids)
for mode in ['train', 'test', 'dev']:
if not if_exist(self.data_dir, [f'{mode}.tsv']):
logging.info(f' Stats calculation for {mode} mode' f' is skipped as {mode}.tsv was not found.')
continue
logging.info(f' Stats calculating for {mode} mode...')
slot_file = f'{self.data_dir}/{mode}_slots.tsv'
with open(slot_file, 'r') as f:
slot_lines = f.readlines()
input_file = f'{self.data_dir}/{mode}.tsv'
with open(input_file, 'r') as f:
input_lines = f.readlines()[1:]
if len(slot_lines) != len(input_lines):
raise ValueError(
"Make sure that the number of slot lines match the "
"number of intent lines. There should be a 1-1 "
"correspondence between every slot and intent lines."
)
dataset = list(zip(slot_lines, input_lines))
raw_slots, queries, raw_intents = [], [], []
for slot_line, input_line in dataset:
slot_list = [int(slot) for slot in slot_line.strip().split()]
raw_slots.append(slot_list)
parts = input_line.strip().split()
raw_intents.append(int(parts[-1]))
queries.append(' '.join(parts[:-1]))
infold = self.data_dir
logging.info(f'Three most popular intents in {mode} mode:')
total_intents, intent_label_freq = get_label_stats(raw_intents, infold + f'/{mode}_intent_stats.tsv')
merged_slots = itertools.chain.from_iterable(raw_slots)
logging.info(f'Three most popular slots in {mode} mode:')
slots_total, slots_label_freq = get_label_stats(merged_slots, infold + f'/{mode}_slot_stats.tsv')
if mode == 'train':
self.slot_weights = calc_class_weights(slots_label_freq)
logging.info(f'Slot weights are - {self.slot_weights}')
self.intent_weights = calc_class_weights(intent_label_freq)
logging.info(f'Intent weights are - {self.intent_weights}')
logging.info(f'Total intents - {total_intents}')
logging.info(f'Intent label frequency - {intent_label_freq}')
logging.info(f'Total Slots - {slots_total}')
logging.info(f'Slots label frequency - {slots_label_freq}')
if pad_label != -1:
self.pad_label = pad_label
else:
if none_slot_label not in self.slots_label_ids:
raise ValueError(f'none_slot_label {none_slot_label} not ' f'found in {self.slot_dict_file}.')
self.pad_label = self.slots_label_ids[none_slot_label]
@staticmethod
def label2idx(file):
lines = open(file, 'r').readlines()
lines = [line.strip() for line in lines if line.strip()]
labels = {lines[i]: i for i in range(len(lines))}
return labels
| true
| true
|
1c4886295743926485b008c1564437e2c3341689
| 4,556
|
py
|
Python
|
five_chess_game/five.py
|
richard-ma/weekendProject
|
43379bc8a2854132f52e5fc007ef7c721b0e7f4e
|
[
"MIT"
] | null | null | null |
five_chess_game/five.py
|
richard-ma/weekendProject
|
43379bc8a2854132f52e5fc007ef7c721b0e7f4e
|
[
"MIT"
] | null | null | null |
five_chess_game/five.py
|
richard-ma/weekendProject
|
43379bc8a2854132f52e5fc007ef7c721b0e7f4e
|
[
"MIT"
] | null | null | null |
# Create by Ricky
from tkinter import *
# 用数组定义一个棋盘,棋盘大小为 自定义
# 数组索引代表位置,
# 元素值代表该位置的状态:-1代表没有棋子,0代表有黑棋,1代表有白棋。
def callback(event):
global tag, tagx, tagy, a
color = ["black", "white"]
# 将点击位置转换为棋盘坐标(第几格)
x = round(event.x / mesh) - 1
y = round(event.y / mesh) - 1
# 计算点击位置与棋盘坐标的距离
errorX = mesh * (x + 1) - event.x
errorY = mesh * (y + 1) - event.y
dis = (errorX ** 2 + errorY ** 2) ** 0.5 # sqrt(x^2 + y^2) 计算原点到这一点的距离
# 如果棋盘这一点没有子,点击位置误差在允许范围内,也没有任何一方获胜,则执行
if QP[x][y] == -1 and dis < K / 2 * mesh and stop == 0:
a.config(text=key[(tag + 1) % 2], fg=color[(tag + 1) % 2]) # 切换为对方落子
QP[x][y] = tag #在这个格子上落子
canvas.create_oval(mesh * (x + 1) - Qr, mesh * (y + 1) - Qr, mesh * (x + 1) + Qr, mesh * (y + 1) + Qr,
fill=color[tag]) # 在界面上画出这个子
# 方向向量
# 0, 1 为纵向
# 1, 0 为横向
# 1, 1 为右上左下斜向
# 1, -1为右下左上斜向
v = [[0, 1], [1, 0], [1, 1], [1, -1]]
# 开始检查是否有一方获胜
for i in v:
x1, y1 = x, y
while x1 < num - 1 and x1 > 0 and y1 > 0 and y1 < num - 1:
x1 += i[0]
y1 += i[1]
# 执行到这里,x1和y1已经放到了这个方向的最后一个点
# 横向为例就是这一行最右侧的那个点
count = 0 # 连续子计数器清零,准备开始计数
while x1 <= num - 1 and x1 >= 0 and y1 >= 0 and y1 <= num - 1:
# 处理当前扫描到的子
if QP[x1][y1] == tag: # 遇到本方的子
count += 1 # 计数器加一
if count == count_num: # 计数器达到获胜需要的连续子个数,则本方获胜
win() # 本方获胜,停止游戏
else:
count = 0 # 遇到对方的子计数器清零,连续的子被打断
# 从最后一个格向回扫描棋盘上的棋子
# 横向为例就是从右向左扫描棋盘这一行
x1 -= i[0]
y1 -= i[1]
tag = (tag + 1) % 2 # 切换到另一方走
tagx, tagy = x, y # 这句没什么用
def restart():
global QP, tag, a, b, stop
QP = []
for i in range(num):
QP.append([-1] * num)
canvas.create_rectangle(mesh - 20, mesh - 20, mesh * num + 20, mesh * num + 20, fill="yellow")
for i in range(num):
canvas.create_line(mesh, mesh * (i + 1), mesh * num, mesh * (i + 1))
canvas.create_line(mesh * (i + 1), mesh, mesh * (i + 1), mesh * num)
tag = 0
stop = 0
a.config(text=key[tag], fg=color[tag])
b.config(text="走棋", fg=color[tag])
#获胜的函数
def win():
global stop
a.config(text=key[tag], fg=color[tag])
b.config(text="获胜", fg='red')
stop = 1
if __name__ == '__main__':
num = input('请输入自定义网格大小,建议在 5 - 25 之间\n') # 棋盘网格数量 num - 3 就是每行可以存放棋子的数目
count_num = int(input('请输入获胜的条件,例如五个棋子获胜或者三个棋子获胜\n'))
tag = 0 # tag标记该轮哪家走,0代表黑方,1代表白方
stop = 0
#print(type(num))
num = int(num) + 3
K = 0.9 # 点击的灵敏度 0~1 之间
px = 5
py = 50
wide = 60
high = 30
mesh = round(400 / num) # 每个格子的大小,总长度是400
Qr = 0.45 * mesh # 棋子的大小,前面的系数在0~0.5之间选取
key = ["黑方", "白方"]#定义棋子
color = ["black", "white"]
# 初始化棋盘
QP = []
for i in range(num):
QP.append([-1] * num) # 棋盘全部-1,是一个空棋盘
tk = Tk()
tk.geometry(str((num + 1) * mesh + 2 * px) + 'x' + str((num + 1) * mesh + py + px))
tk.title('五子棋')
# 构造棋盘界面 图形绘制
suen = Canvas(tk, width=(num + 1) * mesh + 2 * px, height=(num + 1) * mesh + py + px)
suen.place(x=0, y=0)
suen.create_rectangle(0, 0, (num + 1) * mesh + 2 * px, (num + 1) * mesh + py + px, fill="green")#上部分背景颜色
canvas = Canvas(tk, width=str((num + 1) * mesh), height=str((num + 1) * mesh))
canvas.place(x=px, y=py)
canvas.create_rectangle(mesh - 20, mesh - 20, mesh * num + 20, mesh * num + 20, fill="yellow")#背景颜色
for i in range(num):
canvas.create_line(mesh, mesh * (i + 1), mesh * num, mesh * (i + 1))
canvas.create_line(mesh * (i + 1), mesh, mesh * (i + 1), mesh * num)
canvas.bind("<Button-1>", callback)#定义事件绑定 鼠标左键,点击左键调用callback
# 定义一个开始游戏的按钮
# 使用restart函数初始化界面,制造新的一局
Button(tk, text='开始', command=restart).place(x=2 * px, y=(py - high) / 2, width=wide, heigh=high)
# 中间的文字
a = Label(tk, text=key[tag], fg=color[tag], bg='green', font=("Times", "14", "bold"))
b = Label(tk, text=" ", fg=color[tag], bg='green', font=("Times", "14", "bold"))
a.place(x=2 * px + 60 + 10 + 90, y=(py - high) / 2 + 4)
b.place(x=(num + 1) * mesh + px - wide - px - 10 - 42 - 90, y=(py - high) / 2 + 4)
# 开始tk的主循环,游戏开始
tk.mainloop()
| 36.15873
| 111
| 0.490342
|
from tkinter import *
def callback(event):
global tag, tagx, tagy, a
color = ["black", "white"]
x = round(event.x / mesh) - 1
y = round(event.y / mesh) - 1
errorX = mesh * (x + 1) - event.x
errorY = mesh * (y + 1) - event.y
dis = (errorX ** 2 + errorY ** 2) ** 0.5
if QP[x][y] == -1 and dis < K / 2 * mesh and stop == 0:
a.config(text=key[(tag + 1) % 2], fg=color[(tag + 1) % 2])
QP[x][y] = tag
canvas.create_oval(mesh * (x + 1) - Qr, mesh * (y + 1) - Qr, mesh * (x + 1) + Qr, mesh * (y + 1) + Qr,
fill=color[tag])
v = [[0, 1], [1, 0], [1, 1], [1, -1]]
for i in v:
x1, y1 = x, y
while x1 < num - 1 and x1 > 0 and y1 > 0 and y1 < num - 1:
x1 += i[0]
y1 += i[1]
count = 0
while x1 <= num - 1 and x1 >= 0 and y1 >= 0 and y1 <= num - 1:
if QP[x1][y1] == tag:
count += 1
if count == count_num:
win()
else:
count = 0
x1 -= i[0]
y1 -= i[1]
tag = (tag + 1) % 2
tagx, tagy = x, y
def restart():
global QP, tag, a, b, stop
QP = []
for i in range(num):
QP.append([-1] * num)
canvas.create_rectangle(mesh - 20, mesh - 20, mesh * num + 20, mesh * num + 20, fill="yellow")
for i in range(num):
canvas.create_line(mesh, mesh * (i + 1), mesh * num, mesh * (i + 1))
canvas.create_line(mesh * (i + 1), mesh, mesh * (i + 1), mesh * num)
tag = 0
stop = 0
a.config(text=key[tag], fg=color[tag])
b.config(text="走棋", fg=color[tag])
def win():
global stop
a.config(text=key[tag], fg=color[tag])
b.config(text="获胜", fg='red')
stop = 1
if __name__ == '__main__':
num = input('请输入自定义网格大小,建议在 5 - 25 之间\n')
count_num = int(input('请输入获胜的条件,例如五个棋子获胜或者三个棋子获胜\n'))
tag = 0
stop = 0
num = int(num) + 3
K = 0.9
px = 5
py = 50
wide = 60
high = 30
mesh = round(400 / num)
Qr = 0.45 * mesh
key = ["黑方", "白方"]
color = ["black", "white"]
QP = []
for i in range(num):
QP.append([-1] * num)
tk = Tk()
tk.geometry(str((num + 1) * mesh + 2 * px) + 'x' + str((num + 1) * mesh + py + px))
tk.title('五子棋')
suen = Canvas(tk, width=(num + 1) * mesh + 2 * px, height=(num + 1) * mesh + py + px)
suen.place(x=0, y=0)
suen.create_rectangle(0, 0, (num + 1) * mesh + 2 * px, (num + 1) * mesh + py + px, fill="green")
canvas = Canvas(tk, width=str((num + 1) * mesh), height=str((num + 1) * mesh))
canvas.place(x=px, y=py)
canvas.create_rectangle(mesh - 20, mesh - 20, mesh * num + 20, mesh * num + 20, fill="yellow")
for i in range(num):
canvas.create_line(mesh, mesh * (i + 1), mesh * num, mesh * (i + 1))
canvas.create_line(mesh * (i + 1), mesh, mesh * (i + 1), mesh * num)
canvas.bind("<Button-1>", callback)
Button(tk, text='开始', command=restart).place(x=2 * px, y=(py - high) / 2, width=wide, heigh=high)
a = Label(tk, text=key[tag], fg=color[tag], bg='green', font=("Times", "14", "bold"))
b = Label(tk, text=" ", fg=color[tag], bg='green', font=("Times", "14", "bold"))
a.place(x=2 * px + 60 + 10 + 90, y=(py - high) / 2 + 4)
b.place(x=(num + 1) * mesh + px - wide - px - 10 - 42 - 90, y=(py - high) / 2 + 4)
tk.mainloop()
| true
| true
|
1c48862fc9b77064b6c1d653315072c6ffd5acd3
| 1,176
|
py
|
Python
|
tools/nntool/importer/tflite/tflite_schema_head/SplitVOptions.py
|
danieldennett/gap_sdk
|
5667c899025a3a152dbf91e5c18e5b3e422d4ea6
|
[
"Apache-2.0"
] | 1
|
2020-01-29T15:39:31.000Z
|
2020-01-29T15:39:31.000Z
|
tools/nntool/importer/tflite/tflite_schema_head/SplitVOptions.py
|
danieldennett/gap_sdk
|
5667c899025a3a152dbf91e5c18e5b3e422d4ea6
|
[
"Apache-2.0"
] | null | null | null |
tools/nntool/importer/tflite/tflite_schema_head/SplitVOptions.py
|
danieldennett/gap_sdk
|
5667c899025a3a152dbf91e5c18e5b3e422d4ea6
|
[
"Apache-2.0"
] | null | null | null |
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite_schema_head
import flatbuffers
class SplitVOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsSplitVOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = SplitVOptions()
x.Init(buf, n + offset)
return x
@classmethod
def SplitVOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# SplitVOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# SplitVOptions
def NumSplits(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def SplitVOptionsStart(builder): builder.StartObject(1)
def SplitVOptionsAddNumSplits(builder, numSplits): builder.PrependInt32Slot(0, numSplits, 0)
def SplitVOptionsEnd(builder): return builder.EndObject()
| 33.6
| 114
| 0.713435
|
import flatbuffers
class SplitVOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsSplitVOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = SplitVOptions()
x.Init(buf, n + offset)
return x
@classmethod
def SplitVOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
def NumSplits(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def SplitVOptionsStart(builder): builder.StartObject(1)
def SplitVOptionsAddNumSplits(builder, numSplits): builder.PrependInt32Slot(0, numSplits, 0)
def SplitVOptionsEnd(builder): return builder.EndObject()
| true
| true
|
1c488633cadf893d188e25b37e09bbb17d11d9ba
| 2,033
|
py
|
Python
|
common.py
|
mtonxbjss/bucket-antivirus-function
|
5a4c96329d420d2c0754d566f53370249c89b64a
|
[
"Apache-2.0"
] | null | null | null |
common.py
|
mtonxbjss/bucket-antivirus-function
|
5a4c96329d420d2c0754d566f53370249c89b64a
|
[
"Apache-2.0"
] | null | null | null |
common.py
|
mtonxbjss/bucket-antivirus-function
|
5a4c96329d420d2c0754d566f53370249c89b64a
|
[
"Apache-2.0"
] | null | null | null |
# Upside Travel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boto3
import errno
import os
AV_DEFINITION_S3_BUCKET = os.getenv("AV_DEFINITION_S3_BUCKET")
AV_DEFINITION_S3_PREFIX = os.getenv("AV_DEFINITION_S3_PREFIX", "clamav_defs")
AV_DEFINITION_PATH = os.getenv("AV_DEFINITION_PATH", "/tmp/clamav_defs")
AV_SCAN_START_SNS_ARN = os.getenv("AV_SCAN_START_SNS_ARN")
AV_SCAN_START_METADATA = os.getenv("AV_SCAN_START_METADATA", "av-scan-start")
AV_STATUS_CLEAN = os.getenv("AV_STATUS_CLEAN", "CLEAN")
AV_STATUS_INFECTED = os.getenv("AV_STATUS_INFECTED", "INFECTED")
AV_STATUS_METADATA = os.getenv("AV_STATUS_METADATA", "av-status")
AV_STATUS_SNS_ARN = os.getenv("AV_STATUS_SNS_ARN")
AV_TIMESTAMP_METADATA = os.getenv("AV_TIMESTAMP_METADATA", "av-timestamp")
CLAMAVLIB_PATH = os.getenv("CLAMAVLIB_PATH", "./bin")
CLAMSCAN_PATH = os.getenv("CLAMSCAN_PATH", "./bin/clamscan")
FRESHCLAM_PATH = os.getenv("FRESHCLAM_PATH", "./bin/freshclam")
AV_PROCESS_ORIGINAL_VERSION_ONLY = os.getenv("AV_PROCESS_ORIGINAL_VERSION_ONLY", "False")
AV_DELETE_INFECTED_FILES = os.getenv("AV_DELETE_INFECTED_FILES", "False")
AV_DEFINITION_FILE_PREFIXES = ["main", "daily", "bytecode"]
AV_DEFINITION_FILE_SUFFIXES = ["cld", "cvd"]
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
def create_dir(path):
if not os.path.exists(path):
try:
print("Attempting to create directory %s.\n" % path)
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
| 40.66
| 89
| 0.749631
|
import boto3
import errno
import os
AV_DEFINITION_S3_BUCKET = os.getenv("AV_DEFINITION_S3_BUCKET")
AV_DEFINITION_S3_PREFIX = os.getenv("AV_DEFINITION_S3_PREFIX", "clamav_defs")
AV_DEFINITION_PATH = os.getenv("AV_DEFINITION_PATH", "/tmp/clamav_defs")
AV_SCAN_START_SNS_ARN = os.getenv("AV_SCAN_START_SNS_ARN")
AV_SCAN_START_METADATA = os.getenv("AV_SCAN_START_METADATA", "av-scan-start")
AV_STATUS_CLEAN = os.getenv("AV_STATUS_CLEAN", "CLEAN")
AV_STATUS_INFECTED = os.getenv("AV_STATUS_INFECTED", "INFECTED")
AV_STATUS_METADATA = os.getenv("AV_STATUS_METADATA", "av-status")
AV_STATUS_SNS_ARN = os.getenv("AV_STATUS_SNS_ARN")
AV_TIMESTAMP_METADATA = os.getenv("AV_TIMESTAMP_METADATA", "av-timestamp")
CLAMAVLIB_PATH = os.getenv("CLAMAVLIB_PATH", "./bin")
CLAMSCAN_PATH = os.getenv("CLAMSCAN_PATH", "./bin/clamscan")
FRESHCLAM_PATH = os.getenv("FRESHCLAM_PATH", "./bin/freshclam")
AV_PROCESS_ORIGINAL_VERSION_ONLY = os.getenv("AV_PROCESS_ORIGINAL_VERSION_ONLY", "False")
AV_DELETE_INFECTED_FILES = os.getenv("AV_DELETE_INFECTED_FILES", "False")
AV_DEFINITION_FILE_PREFIXES = ["main", "daily", "bytecode"]
AV_DEFINITION_FILE_SUFFIXES = ["cld", "cvd"]
s3 = boto3.resource('s3')
s3_client = boto3.client('s3')
def create_dir(path):
if not os.path.exists(path):
try:
print("Attempting to create directory %s.\n" % path)
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
| true
| true
|
1c48866e70ed37e9e61b13ba25081074ce52cbf5
| 364
|
py
|
Python
|
synth.py
|
anguillanneuf/sloth
|
5b1aaec81e1074f523d77c3d9b82097aa853910b
|
[
"Apache-2.0"
] | null | null | null |
synth.py
|
anguillanneuf/sloth
|
5b1aaec81e1074f523d77c3d9b82097aa853910b
|
[
"Apache-2.0"
] | null | null | null |
synth.py
|
anguillanneuf/sloth
|
5b1aaec81e1074f523d77c3d9b82097aa853910b
|
[
"Apache-2.0"
] | null | null | null |
import synthtool as s
import synthtool.gcp as gcp
import synthtool.languages.node as node
import logging
logging.basicConfig(level=logging.DEBUG)
AUTOSYNTH_MULTIPLE_COMMITS = True
common_templates = gcp.CommonTemplates()
templates = common_templates.node_library()
s.copy(templates, excludes=["README.md", ".github/workflows/ci.yaml"])
node.install()
node.fix()
| 26
| 70
| 0.804945
|
import synthtool as s
import synthtool.gcp as gcp
import synthtool.languages.node as node
import logging
logging.basicConfig(level=logging.DEBUG)
AUTOSYNTH_MULTIPLE_COMMITS = True
common_templates = gcp.CommonTemplates()
templates = common_templates.node_library()
s.copy(templates, excludes=["README.md", ".github/workflows/ci.yaml"])
node.install()
node.fix()
| true
| true
|
1c4886b81dc852c6d976f50f05da92a184e83d41
| 1,813
|
py
|
Python
|
storage_formats/apache_parquet/python/fastparquet_test/fastparquet_test.py
|
statisticsnorway/microdata-testing
|
3b14a2f6f08791a666c659bbdc9cdf9d41a4c23d
|
[
"Apache-2.0"
] | null | null | null |
storage_formats/apache_parquet/python/fastparquet_test/fastparquet_test.py
|
statisticsnorway/microdata-testing
|
3b14a2f6f08791a666c659bbdc9cdf9d41a4c23d
|
[
"Apache-2.0"
] | null | null | null |
storage_formats/apache_parquet/python/fastparquet_test/fastparquet_test.py
|
statisticsnorway/microdata-testing
|
3b14a2f6f08791a666c659bbdc9cdf9d41a4c23d
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
from time import time
from fastparquet import ParquetFile
from fastparquet import write
from fastparquet.parquet_thrift.parquet.ttypes import RowGroup
from timer import timeblock
def run_test(input_file: str, output_dir: str, filters: list):
print('Using fastparquet')
pf = ParquetFile(input_file)
print('Parquet metadata: ' + str(pf.info))
print('Parquet schema: ' + str(pf.schema))
print('Parquet columns: ' + str(pf.columns))
print('Parquet count (total number of rows): ' + str(pf.count))
print('Parquet dtypes: ' + str(pf.dtypes))
print('Parquet statistics: ' + str(pf.statistics))
print('Parquet cats: ' + str(pf.cats)) # possible values of each partitioning field
print('Parquet row_groups number: ' + str(len(pf.row_groups)))
# print('Parquet row_groups: ' + str(pf.row_groups))
with timeblock('fastparquet read and filter'):
data = pf.to_pandas(filters=filters)
# data: RowGroup = pf.filter_row_groups(filters=filters)
# for df in pf.iter_row_groups():
# print(df.shape)
size = sys.getsizeof(data)
print('Size of filtered Pandas dataframe in memory: ' + str(size) + ' bytes (' + str(size / 1000000) + ' MB)')
milliseconds_since_epoch = int(time() * 1000)
output_file = output_dir + str(milliseconds_since_epoch) + '.parquet'
print('Output file name: ' + output_file)
with timeblock('pyarrow write_table()'):
write(output_file, data, compression='SNAPPY')
pf = ParquetFile(output_file)
print('Parquet metadata of output: ' + str(str(pf.info)))
print('Parquet schema of output: ' + str(pf.schema))
print('Size of output file on disk: ' + str(os.path.getsize(output_file)) + ' bytes ('
+ str(os.path.getsize(output_file) / 1000000) + ' MB)')
| 39.413043
| 114
| 0.678434
|
import os
import sys
from time import time
from fastparquet import ParquetFile
from fastparquet import write
from fastparquet.parquet_thrift.parquet.ttypes import RowGroup
from timer import timeblock
def run_test(input_file: str, output_dir: str, filters: list):
print('Using fastparquet')
pf = ParquetFile(input_file)
print('Parquet metadata: ' + str(pf.info))
print('Parquet schema: ' + str(pf.schema))
print('Parquet columns: ' + str(pf.columns))
print('Parquet count (total number of rows): ' + str(pf.count))
print('Parquet dtypes: ' + str(pf.dtypes))
print('Parquet statistics: ' + str(pf.statistics))
print('Parquet cats: ' + str(pf.cats))
print('Parquet row_groups number: ' + str(len(pf.row_groups)))
with timeblock('fastparquet read and filter'):
data = pf.to_pandas(filters=filters)
size = sys.getsizeof(data)
print('Size of filtered Pandas dataframe in memory: ' + str(size) + ' bytes (' + str(size / 1000000) + ' MB)')
milliseconds_since_epoch = int(time() * 1000)
output_file = output_dir + str(milliseconds_since_epoch) + '.parquet'
print('Output file name: ' + output_file)
with timeblock('pyarrow write_table()'):
write(output_file, data, compression='SNAPPY')
pf = ParquetFile(output_file)
print('Parquet metadata of output: ' + str(str(pf.info)))
print('Parquet schema of output: ' + str(pf.schema))
print('Size of output file on disk: ' + str(os.path.getsize(output_file)) + ' bytes ('
+ str(os.path.getsize(output_file) / 1000000) + ' MB)')
| true
| true
|
1c48882c69189e1136485f2dbf88602c9088f55f
| 6,642
|
py
|
Python
|
pull_vegas_slot_machine_v9.py
|
weaselers/candy_cane_contest
|
1d619529cd8640c20b534ec9a3f6d5f786bb78aa
|
[
"BSD-3-Clause"
] | null | null | null |
pull_vegas_slot_machine_v9.py
|
weaselers/candy_cane_contest
|
1d619529cd8640c20b534ec9a3f6d5f786bb78aa
|
[
"BSD-3-Clause"
] | null | null | null |
pull_vegas_slot_machine_v9.py
|
weaselers/candy_cane_contest
|
1d619529cd8640c20b534ec9a3f6d5f786bb78aa
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import pandas as pd
import random, os, datetime, math
from random import shuffle
from collections import OrderedDict
from collections import defaultdict
total_reward = 0
bandit_dict = {}
def set_seed(my_seed=42):
os.environ["PYTHONHASHSEED"] = str(my_seed)
random.seed(my_seed)
np.random.seed(my_seed)
def get_next_bandit(exception=None):
'''
Choose best next bandit
'''
# init best bandit number and expectation score
best_bandit = 0
best_bandit_expected = 0
# shuffle bandit_dict to not explore bandits in order
b = list(bandit_dict.items())
shuffle(b)
a = OrderedDict(b)
for bnd in dict(a):
expect = (
(
bandit_dict[bnd]["win"] # from nb of win
- bandit_dict[bnd]["loss"] # remove nb of loss
+ (bandit_dict[bnd]["loss"] > 0)
+ bandit_dict[bnd]["opp"] # add nb of opponant pull
- (bandit_dict[bnd]["opp"] > 0) * 1.5 # minus a bonus if opponant did pulled
+ bandit_dict[bnd]["op_continue"] # add nb of times opponant continued to pull
)
/ ( # divided by
bandit_dict[bnd]["win"] # nb of win
+ bandit_dict[bnd]["loss"] # plus number of loss
+ bandit_dict[bnd]["opp"] # nb of times opponant used it
) # times
* math.pow(
0.97, # decay to the power of
bandit_dict[bnd]["win"]
+ bandit_dict[bnd]["loss"]
+ bandit_dict[bnd]["opp"], # total number of pull on this bandit
)
)
if expect > best_bandit_expected:
if bnd != exception:
best_bandit_expected = expect
best_bandit = bnd
return best_bandit
def get_a_virgin_bandit():
'''
return a bandit never explored by me or opponant
'''
l = list(bandit_dict.items())
random.shuffle(l)
d = dict(l)
for bnd in d:
if (d[bnd]["win"] == 1) and (
d[bnd]["loss"] == 0) and (
d[bnd]["opp"] == 0):
return bnd
def is_still_virgin_bandit_present():
'''
return a bandit never explored by me or opponant
'''
count_virgin_bandit = 0
for bnd in bandit_dict:
if (bandit_dict[bnd]["win"] == 1) and (
bandit_dict[bnd]["loss"] == 0) and (
bandit_dict[bnd]["opp"] == 0):
count_virgin_bandit += 1
if count_virgin_bandit > 0:
return 1
else:
return 0
my_action_list = []
op_action_list = []
op_continue_cnt_dict = defaultdict(int)
def multi_armed_probabilities(observation, configuration):
global total_reward, bandit_dict
# initialise randomly
my_pull = random.randrange(configuration["banditCount"])
# first step: initialise bandit_dict with default values
if 0 == observation["step"]:
set_seed()
total_reward = 0
bandit_dict = {}
for i in range(configuration["banditCount"]):
bandit_dict[i] = {
"win": 1,
"loss": 0,
"opp": 0,
"my_continue": 0,
"op_continue": 0,
}
else:
# update total reward (starting at 0)
last_reward = observation["reward"] - total_reward
total_reward = observation["reward"]
# update (last) action lists
my_idx = observation["agentIndex"]
my_last_action = observation["lastActions"][my_idx]
op_last_action = observation["lastActions"][1 - my_idx]
my_action_list.append(my_last_action)
op_action_list.append(op_last_action)
# update bandit dict
if 0 < last_reward:
# update nb of wining if won on last used bandit
bandit_dict[my_last_action]["win"] = bandit_dict[my_last_action]["win"] + 1
else:
# update nb of loss if lost on last used bandit
bandit_dict[my_last_action]["loss"] = (
bandit_dict[my_last_action]["loss"] + 1
)
# update opponant action count on bandit
bandit_dict[op_last_action]["opp"] = bandit_dict[op_last_action]["opp"] + 1
# if we played for more than 3 times since started
if observation["step"] >= 3:
if my_action_list[-1] == my_action_list[-2]:
# update 'my_continue' since I played the same bandit two times in a row
bandit_dict[my_last_action]["my_continue"] += 1
else:
bandit_dict[my_last_action]["my_continue"] = 0
if op_action_list[-1] == op_action_list[-2]:
# update 'op_continue' since opponant played the same bandit two times in a row
bandit_dict[op_last_action]["op_continue"] += 1
else:
bandit_dict[op_last_action]["op_continue"] = 0
# if we played less than 4 times since started
if observation["step"] < 4:
return get_a_virgin_bandit()
if (observation["step"] < 100) and (op_action_list[-1] != op_action_list[-2]):
if is_still_virgin_bandit_present() == 1:
return get_a_virgin_bandit()
# if opponant stays on same bandit 2 times in a row
if (op_action_list[-1] == op_action_list[-2]):
# if I wasn't on his bandit
if my_action_list[-1] != op_action_list[-1]:
# I go there
my_pull = op_action_list[-1]
# else if I was there
elif my_action_list[-1] == op_action_list[-1]:
# if I just won
if last_reward > 0:
my_pull = my_last_action
else:
my_pull = get_next_bandit()
# else if I won
elif last_reward > 0:
my_pull = get_next_bandit(my_action_list[-1])
else:
# if I was winning 3 times in a row but I lost last time
if (my_action_list[-1] == my_action_list[-2]) and (
my_action_list[-1] == my_action_list[-3]
):
# then I choose 50/50 if I continue
if random.random() < 0.5:
# random tell me to stay on the same bandit
my_pull = my_action_list[-1]
else:
# I choose another one
my_pull = get_next_bandit()
# As I wasn't on the same bandit 3 times in a row, I move
else:
my_pull = get_next_bandit()
return my_pull
| 34.061538
| 95
| 0.552695
|
import numpy as np
import pandas as pd
import random, os, datetime, math
from random import shuffle
from collections import OrderedDict
from collections import defaultdict
total_reward = 0
bandit_dict = {}
def set_seed(my_seed=42):
os.environ["PYTHONHASHSEED"] = str(my_seed)
random.seed(my_seed)
np.random.seed(my_seed)
def get_next_bandit(exception=None):
best_bandit = 0
best_bandit_expected = 0
b = list(bandit_dict.items())
shuffle(b)
a = OrderedDict(b)
for bnd in dict(a):
expect = (
(
bandit_dict[bnd]["win"]
- bandit_dict[bnd]["loss"]
+ (bandit_dict[bnd]["loss"] > 0)
+ bandit_dict[bnd]["opp"]
- (bandit_dict[bnd]["opp"] > 0) * 1.5
+ bandit_dict[bnd]["op_continue"]
)
/ (
bandit_dict[bnd]["win"]
+ bandit_dict[bnd]["loss"]
+ bandit_dict[bnd]["opp"]
)
* math.pow(
0.97,
bandit_dict[bnd]["win"]
+ bandit_dict[bnd]["loss"]
+ bandit_dict[bnd]["opp"],
)
)
if expect > best_bandit_expected:
if bnd != exception:
best_bandit_expected = expect
best_bandit = bnd
return best_bandit
def get_a_virgin_bandit():
l = list(bandit_dict.items())
random.shuffle(l)
d = dict(l)
for bnd in d:
if (d[bnd]["win"] == 1) and (
d[bnd]["loss"] == 0) and (
d[bnd]["opp"] == 0):
return bnd
def is_still_virgin_bandit_present():
count_virgin_bandit = 0
for bnd in bandit_dict:
if (bandit_dict[bnd]["win"] == 1) and (
bandit_dict[bnd]["loss"] == 0) and (
bandit_dict[bnd]["opp"] == 0):
count_virgin_bandit += 1
if count_virgin_bandit > 0:
return 1
else:
return 0
my_action_list = []
op_action_list = []
op_continue_cnt_dict = defaultdict(int)
def multi_armed_probabilities(observation, configuration):
global total_reward, bandit_dict
my_pull = random.randrange(configuration["banditCount"])
if 0 == observation["step"]:
set_seed()
total_reward = 0
bandit_dict = {}
for i in range(configuration["banditCount"]):
bandit_dict[i] = {
"win": 1,
"loss": 0,
"opp": 0,
"my_continue": 0,
"op_continue": 0,
}
else:
last_reward = observation["reward"] - total_reward
total_reward = observation["reward"]
my_idx = observation["agentIndex"]
my_last_action = observation["lastActions"][my_idx]
op_last_action = observation["lastActions"][1 - my_idx]
my_action_list.append(my_last_action)
op_action_list.append(op_last_action)
if 0 < last_reward:
bandit_dict[my_last_action]["win"] = bandit_dict[my_last_action]["win"] + 1
else:
bandit_dict[my_last_action]["loss"] = (
bandit_dict[my_last_action]["loss"] + 1
)
bandit_dict[op_last_action]["opp"] = bandit_dict[op_last_action]["opp"] + 1
if observation["step"] >= 3:
if my_action_list[-1] == my_action_list[-2]:
bandit_dict[my_last_action]["my_continue"] += 1
else:
bandit_dict[my_last_action]["my_continue"] = 0
if op_action_list[-1] == op_action_list[-2]:
bandit_dict[op_last_action]["op_continue"] += 1
else:
bandit_dict[op_last_action]["op_continue"] = 0
if observation["step"] < 4:
return get_a_virgin_bandit()
if (observation["step"] < 100) and (op_action_list[-1] != op_action_list[-2]):
if is_still_virgin_bandit_present() == 1:
return get_a_virgin_bandit()
if (op_action_list[-1] == op_action_list[-2]):
if my_action_list[-1] != op_action_list[-1]:
# I go there
my_pull = op_action_list[-1]
# else if I was there
elif my_action_list[-1] == op_action_list[-1]:
# if I just won
if last_reward > 0:
my_pull = my_last_action
else:
my_pull = get_next_bandit()
# else if I won
elif last_reward > 0:
my_pull = get_next_bandit(my_action_list[-1])
else:
# if I was winning 3 times in a row but I lost last time
if (my_action_list[-1] == my_action_list[-2]) and (
my_action_list[-1] == my_action_list[-3]
):
# then I choose 50/50 if I continue
if random.random() < 0.5:
# random tell me to stay on the same bandit
my_pull = my_action_list[-1]
else:
# I choose another one
my_pull = get_next_bandit()
# As I wasn't on the same bandit 3 times in a row, I move
else:
my_pull = get_next_bandit()
return my_pull
| true
| true
|
1c4889fed63df332eb3bfa47a1614a45f7d74ea9
| 377
|
py
|
Python
|
smoke_long_only.py
|
axey733/pylivetrader
|
a33ddd3bbf2fe6e57d016e98c77d84c39401f9ec
|
[
"Apache-2.0"
] | null | null | null |
smoke_long_only.py
|
axey733/pylivetrader
|
a33ddd3bbf2fe6e57d016e98c77d84c39401f9ec
|
[
"Apache-2.0"
] | null | null | null |
smoke_long_only.py
|
axey733/pylivetrader
|
a33ddd3bbf2fe6e57d016e98c77d84c39401f9ec
|
[
"Apache-2.0"
] | null | null | null |
import long_only as algo
from pylivetrader.testing.smoke import harness
def test_algo():
pipe = harness.DefaultPipelineHooker()
harness.run_smoke(algo,
pipeline_hook=pipe,
)
if __name__ == '__main__':
import sys
from logbook import StreamHandler
StreamHandler(sys.stdout).push_application()
test_algo()
| 19.842105
| 48
| 0.655172
|
import long_only as algo
from pylivetrader.testing.smoke import harness
def test_algo():
pipe = harness.DefaultPipelineHooker()
harness.run_smoke(algo,
pipeline_hook=pipe,
)
if __name__ == '__main__':
import sys
from logbook import StreamHandler
StreamHandler(sys.stdout).push_application()
test_algo()
| true
| true
|
1c488b64db5fd0207e4b64efdc4fa1da6d8f39d4
| 8,957
|
py
|
Python
|
pymanopt/solvers/conjugate_gradient.py
|
antoinecollas/pymanopt
|
8586302c80aa5885f91b094bed85b6ffe849a5bf
|
[
"BSD-3-Clause"
] | 1
|
2021-02-09T10:24:15.000Z
|
2021-02-09T10:24:15.000Z
|
pymanopt/solvers/conjugate_gradient.py
|
antoinecollas/pymanopt
|
8586302c80aa5885f91b094bed85b6ffe849a5bf
|
[
"BSD-3-Clause"
] | null | null | null |
pymanopt/solvers/conjugate_gradient.py
|
antoinecollas/pymanopt
|
8586302c80aa5885f91b094bed85b6ffe849a5bf
|
[
"BSD-3-Clause"
] | 1
|
2021-04-30T08:49:02.000Z
|
2021-04-30T08:49:02.000Z
|
import time
from copy import deepcopy
import numpy as np
from pymanopt import tools
from pymanopt.solvers.linesearch import LineSearchAdaptive
from pymanopt.solvers.solver import Solver
# TODO: Use Python's enum module.
BetaTypes = tools.make_enum(
"BetaTypes",
"FletcherReeves PolakRibiere HestenesStiefel HagerZhang".split())
class ConjugateGradient(Solver):
"""
Module containing conjugate gradient algorithm based on
conjugategradient.m from the manopt MATLAB package.
"""
def __init__(self, beta_type=BetaTypes.HestenesStiefel, orth_value=np.inf,
linesearch=None, *args, **kwargs):
"""
Instantiate gradient solver class.
Variable attributes (defaults in brackets):
- beta_type (BetaTypes.HestenesStiefel)
Conjugate gradient beta rule used to construct the new search
direction
- orth_value (numpy.inf)
Parameter for Powell's restart strategy. An infinite
value disables this strategy. See in code formula for
the specific criterion used.
- linesearch (LineSearchAdaptive)
The linesearch method to used.
"""
super().__init__(*args, **kwargs)
self._beta_type = beta_type
self._orth_value = orth_value
if linesearch is None:
self._linesearch = LineSearchAdaptive()
else:
self._linesearch = linesearch
self.linesearch = None
def solve(self, problem, x=None, reuselinesearch=False):
"""
Perform optimization using nonlinear conjugate gradient method with
linesearch.
This method first computes the gradient of obj w.r.t. arg, and then
optimizes by moving in a direction that is conjugate to all previous
search directions.
Arguments:
- problem
Pymanopt problem setup using the Problem class, this must
have a .manifold attribute specifying the manifold to optimize
over, as well as a cost and enough information to compute
the gradient of that cost.
- x=None
Optional parameter. Starting point on the manifold. If none
then a starting point will be randomly generated.
- reuselinesearch=False
Whether to reuse the previous linesearch object. Allows to
use information from a previous solve run.
Returns:
- x
Local minimum of obj, or if algorithm terminated before
convergence x will be the point at which it terminated.
"""
man = problem.manifold
verbosity = problem.verbosity
objective = problem.cost
gradient = problem.grad
if not reuselinesearch or self.linesearch is None:
self.linesearch = deepcopy(self._linesearch)
linesearch = self.linesearch
# If no starting point is specified, generate one at random.
if x is None:
x = man.rand()
# Initialize iteration counter and timer
iter = 0
stepsize = np.nan
time0 = time.time()
if verbosity >= 1:
print("Optimizing...")
if verbosity >= 2:
print(" iter\t\t cost val\t grad. norm")
# Calculate initial cost-related quantities
cost = objective(x)
grad = gradient(x)
gradnorm = man.norm(x, grad)
Pgrad = problem.precon(x, grad)
gradPgrad = man.inner(x, grad, Pgrad)
# Initial descent direction is the negative gradient
desc_dir = -Pgrad
self._start_optlog(extraiterfields=['gradnorm'],
solverparams={'beta_type': self._beta_type,
'orth_value': self._orth_value,
'linesearcher': linesearch})
while True:
if verbosity >= 2:
print("%5d\t%+.16e\t%.8e" % (iter, cost, gradnorm))
if self._logverbosity >= 2:
self._append_optlog(iter, x, cost, gradnorm=gradnorm)
stop_reason = self._check_stopping_criterion(
time0, gradnorm=gradnorm, iter=iter + 1, stepsize=stepsize)
if stop_reason:
if verbosity >= 1:
print(stop_reason)
print('')
break
# The line search algorithms require the directional derivative of
# the cost at the current point x along the search direction.
df0 = man.inner(x, grad, desc_dir)
# If we didn't get a descent direction: restart, i.e., switch to
# the negative gradient. Equivalent to resetting the CG direction
# to a steepest descent step, which discards the past information.
if df0 >= 0:
# Or we switch to the negative gradient direction.
if verbosity >= 3:
print("Conjugate gradient info: got an ascent direction "
"(df0 = %.2f), reset to the (preconditioned) "
"steepest descent direction." % df0)
# Reset to negative gradient: this discards the CG memory.
desc_dir = -Pgrad
df0 = -gradPgrad
# Execute line search
stepsize, newx = linesearch.search(objective, man, x, desc_dir,
cost, df0)
# Compute the new cost-related quantities for newx
newcost = objective(newx)
newgrad = gradient(newx)
newgradnorm = man.norm(newx, newgrad)
Pnewgrad = problem.precon(newx, newgrad)
newgradPnewgrad = man.inner(newx, newgrad, Pnewgrad)
# Apply the CG scheme to compute the next search direction
oldgrad = man.transp(x, newx, grad)
orth_grads = man.inner(newx, oldgrad, Pnewgrad) / newgradPnewgrad
# Powell's restart strategy (see page 12 of Hager and Zhang's
# survey on conjugate gradient methods, for example)
if abs(orth_grads) >= self._orth_value:
beta = 0
desc_dir = -Pnewgrad
else:
desc_dir = man.transp(x, newx, desc_dir)
if self._beta_type == BetaTypes.FletcherReeves:
beta = newgradPnewgrad / gradPgrad
elif self._beta_type == BetaTypes.PolakRibiere:
diff = newgrad - oldgrad
ip_diff = man.inner(newx, Pnewgrad, diff)
beta = max(0, ip_diff / gradPgrad)
elif self._beta_type == BetaTypes.HestenesStiefel:
diff = newgrad - oldgrad
ip_diff = man.inner(newx, Pnewgrad, diff)
try:
beta = max(0,
ip_diff / man.inner(newx, diff, desc_dir))
# if ip_diff = man.inner(newx, diff, desc_dir) = 0
except ZeroDivisionError:
beta = 1
elif self._beta_type == BetaTypes.HagerZhang:
diff = newgrad - oldgrad
Poldgrad = man.transp(x, newx, Pgrad)
Pdiff = Pnewgrad - Poldgrad
deno = man.inner(newx, diff, desc_dir)
numo = man.inner(newx, diff, Pnewgrad)
numo -= (2 * man.inner(newx, diff, Pdiff) *
man.inner(newx, desc_dir, newgrad) / deno)
beta = numo / deno
# Robustness (see Hager-Zhang paper mentioned above)
desc_dir_norm = man.norm(newx, desc_dir)
eta_HZ = -1 / (desc_dir_norm * min(0.01, gradnorm))
beta = max(beta, eta_HZ)
else:
types = ", ".join(
["BetaTypes.%s" % t for t in BetaTypes._fields])
raise ValueError(
"Unknown beta_type %s. Should be one of %s." % (
self._beta_type, types))
if type(beta) != float:
beta = float(beta)
desc_dir = -Pnewgrad + beta * desc_dir
# Update the necessary variables for the next iteration.
x = newx
cost = newcost
grad = newgrad
Pgrad = Pnewgrad
gradnorm = newgradnorm
gradPgrad = newgradPnewgrad
iter += 1
if self._logverbosity <= 0:
return x
else:
self._stop_optlog(x, cost, stop_reason, time0,
stepsize=stepsize, gradnorm=gradnorm,
iter=iter)
return x, self._optlog
| 40.165919
| 78
| 0.54449
|
import time
from copy import deepcopy
import numpy as np
from pymanopt import tools
from pymanopt.solvers.linesearch import LineSearchAdaptive
from pymanopt.solvers.solver import Solver
BetaTypes = tools.make_enum(
"BetaTypes",
"FletcherReeves PolakRibiere HestenesStiefel HagerZhang".split())
class ConjugateGradient(Solver):
def __init__(self, beta_type=BetaTypes.HestenesStiefel, orth_value=np.inf,
linesearch=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self._beta_type = beta_type
self._orth_value = orth_value
if linesearch is None:
self._linesearch = LineSearchAdaptive()
else:
self._linesearch = linesearch
self.linesearch = None
def solve(self, problem, x=None, reuselinesearch=False):
man = problem.manifold
verbosity = problem.verbosity
objective = problem.cost
gradient = problem.grad
if not reuselinesearch or self.linesearch is None:
self.linesearch = deepcopy(self._linesearch)
linesearch = self.linesearch
# If no starting point is specified, generate one at random.
if x is None:
x = man.rand()
# Initialize iteration counter and timer
iter = 0
stepsize = np.nan
time0 = time.time()
if verbosity >= 1:
print("Optimizing...")
if verbosity >= 2:
print(" iter\t\t cost val\t grad. norm")
# Calculate initial cost-related quantities
cost = objective(x)
grad = gradient(x)
gradnorm = man.norm(x, grad)
Pgrad = problem.precon(x, grad)
gradPgrad = man.inner(x, grad, Pgrad)
# Initial descent direction is the negative gradient
desc_dir = -Pgrad
self._start_optlog(extraiterfields=['gradnorm'],
solverparams={'beta_type': self._beta_type,
'orth_value': self._orth_value,
'linesearcher': linesearch})
while True:
if verbosity >= 2:
print("%5d\t%+.16e\t%.8e" % (iter, cost, gradnorm))
if self._logverbosity >= 2:
self._append_optlog(iter, x, cost, gradnorm=gradnorm)
stop_reason = self._check_stopping_criterion(
time0, gradnorm=gradnorm, iter=iter + 1, stepsize=stepsize)
if stop_reason:
if verbosity >= 1:
print(stop_reason)
print('')
break
# The line search algorithms require the directional derivative of
# the cost at the current point x along the search direction.
df0 = man.inner(x, grad, desc_dir)
# If we didn't get a descent direction: restart, i.e., switch to
if df0 >= 0:
if verbosity >= 3:
print("Conjugate gradient info: got an ascent direction "
"(df0 = %.2f), reset to the (preconditioned) "
"steepest descent direction." % df0)
desc_dir = -Pgrad
df0 = -gradPgrad
stepsize, newx = linesearch.search(objective, man, x, desc_dir,
cost, df0)
newcost = objective(newx)
newgrad = gradient(newx)
newgradnorm = man.norm(newx, newgrad)
Pnewgrad = problem.precon(newx, newgrad)
newgradPnewgrad = man.inner(newx, newgrad, Pnewgrad)
oldgrad = man.transp(x, newx, grad)
orth_grads = man.inner(newx, oldgrad, Pnewgrad) / newgradPnewgrad
if abs(orth_grads) >= self._orth_value:
beta = 0
desc_dir = -Pnewgrad
else:
desc_dir = man.transp(x, newx, desc_dir)
if self._beta_type == BetaTypes.FletcherReeves:
beta = newgradPnewgrad / gradPgrad
elif self._beta_type == BetaTypes.PolakRibiere:
diff = newgrad - oldgrad
ip_diff = man.inner(newx, Pnewgrad, diff)
beta = max(0, ip_diff / gradPgrad)
elif self._beta_type == BetaTypes.HestenesStiefel:
diff = newgrad - oldgrad
ip_diff = man.inner(newx, Pnewgrad, diff)
try:
beta = max(0,
ip_diff / man.inner(newx, diff, desc_dir))
except ZeroDivisionError:
beta = 1
elif self._beta_type == BetaTypes.HagerZhang:
diff = newgrad - oldgrad
Poldgrad = man.transp(x, newx, Pgrad)
Pdiff = Pnewgrad - Poldgrad
deno = man.inner(newx, diff, desc_dir)
numo = man.inner(newx, diff, Pnewgrad)
numo -= (2 * man.inner(newx, diff, Pdiff) *
man.inner(newx, desc_dir, newgrad) / deno)
beta = numo / deno
desc_dir_norm = man.norm(newx, desc_dir)
eta_HZ = -1 / (desc_dir_norm * min(0.01, gradnorm))
beta = max(beta, eta_HZ)
else:
types = ", ".join(
["BetaTypes.%s" % t for t in BetaTypes._fields])
raise ValueError(
"Unknown beta_type %s. Should be one of %s." % (
self._beta_type, types))
if type(beta) != float:
beta = float(beta)
desc_dir = -Pnewgrad + beta * desc_dir
x = newx
cost = newcost
grad = newgrad
Pgrad = Pnewgrad
gradnorm = newgradnorm
gradPgrad = newgradPnewgrad
iter += 1
if self._logverbosity <= 0:
return x
else:
self._stop_optlog(x, cost, stop_reason, time0,
stepsize=stepsize, gradnorm=gradnorm,
iter=iter)
return x, self._optlog
| true
| true
|
1c488bf03fa2dc3897aa74a2e2eaf075900ba316
| 724
|
py
|
Python
|
tools/vig_focus_dist.py
|
jtniehof/photo
|
313c6f5d450404a3f402a2646526abf55a4920b7
|
[
"CC0-1.0"
] | null | null | null |
tools/vig_focus_dist.py
|
jtniehof/photo
|
313c6f5d450404a3f402a2646526abf55a4920b7
|
[
"CC0-1.0"
] | null | null | null |
tools/vig_focus_dist.py
|
jtniehof/photo
|
313c6f5d450404a3f402a2646526abf55a4920b7
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
"""Sort vignetting images by focal distance"""
import glob
import os
import os.path
import subprocess
for f in sorted(list(glob.glob('*.CR2'))):
exiv2_process = subprocess.Popen(
["exiv2", "-PEkt", "-g", "Exif.CanonFi.FocusDistanceUpper", "-g", "Exif.CanonFi.FocusDistanceLower", f], stdout=subprocess.PIPE)
lines = exiv2_process.communicate()[0].splitlines()
upper = float(lines[0].split()[-2])
lower = float(lines[1].split()[-2])
dist = (upper + lower) / 2.
if dist > 2.:
subdir = 'vignetting'
else:
subdir = 'vignetting_{0:.2f}'.format(dist)
if not os.path.isdir(subdir):
os.mkdir(subdir)
os.rename(f, os.path.join(subdir, f))
| 28.96
| 136
| 0.632597
|
import glob
import os
import os.path
import subprocess
for f in sorted(list(glob.glob('*.CR2'))):
exiv2_process = subprocess.Popen(
["exiv2", "-PEkt", "-g", "Exif.CanonFi.FocusDistanceUpper", "-g", "Exif.CanonFi.FocusDistanceLower", f], stdout=subprocess.PIPE)
lines = exiv2_process.communicate()[0].splitlines()
upper = float(lines[0].split()[-2])
lower = float(lines[1].split()[-2])
dist = (upper + lower) / 2.
if dist > 2.:
subdir = 'vignetting'
else:
subdir = 'vignetting_{0:.2f}'.format(dist)
if not os.path.isdir(subdir):
os.mkdir(subdir)
os.rename(f, os.path.join(subdir, f))
| true
| true
|
1c488c509a425b91566922bd03cb5f216b1c1b6b
| 30,847
|
py
|
Python
|
netharn/initializers/functional.py
|
Erotemic/netharn
|
bc4a6d75445c949e709e5ab903ba72813ec68b79
|
[
"Apache-2.0"
] | 38
|
2018-06-18T07:47:31.000Z
|
2021-10-31T23:18:58.000Z
|
netharn/initializers/functional.py
|
Erotemic/netharn
|
bc4a6d75445c949e709e5ab903ba72813ec68b79
|
[
"Apache-2.0"
] | 10
|
2018-06-08T01:21:58.000Z
|
2020-01-11T02:13:36.000Z
|
netharn/initializers/functional.py
|
Erotemic/netharn
|
bc4a6d75445c949e709e5ab903ba72813ec68b79
|
[
"Apache-2.0"
] | 6
|
2018-04-17T22:06:02.000Z
|
2019-12-13T03:04:53.000Z
|
import numpy as np
import torch
import ubelt as ub
def trainable_layers(model, names=False):
"""
Returns all layers containing trainable parameters
Notes:
It may be better to simply use model.named_parameters() instead in most
situation. This is useful when you need the classes that contains the
parameters instead of the parameters themselves.
Example:
>>> import torchvision
>>> model = torchvision.models.AlexNet()
>>> list(trainable_layers(model, names=True))
"""
if names:
stack = [('', '', model)]
while stack:
prefix, basename, item = stack.pop()
name = '.'.join([p for p in [prefix, basename] if p])
if isinstance(item, torch.nn.modules.conv._ConvNd):
yield name, item
elif isinstance(item, torch.nn.modules.batchnorm._BatchNorm):
yield name, item
elif hasattr(item, 'reset_parameters'):
yield name, item
child_prefix = name
for child_basename, child_item in list(item.named_children())[::-1]:
stack.append((child_prefix, child_basename, child_item))
else:
queue = [model]
while queue:
item = queue.pop(0)
# TODO: need to put all trainable layer types here
# (I think this is just everything with reset_parameters)
if isinstance(item, torch.nn.modules.conv._ConvNd):
yield item
elif isinstance(item, torch.nn.modules.batchnorm._BatchNorm):
yield item
elif hasattr(item, 'reset_parameters'):
yield item
# if isinstance(input, torch.nn.modules.Linear):
# yield item
# if isinstance(input, torch.nn.modules.Bilinear):
# yield item
# if isinstance(input, torch.nn.modules.Embedding):
# yield item
# if isinstance(input, torch.nn.modules.EmbeddingBag):
# yield item
for child in item.children():
queue.append(child)
def apply_initializer(input, func, funckw):
"""
Recursively initializes the input using a torch.nn.init function.
If the input is a model, then only known layer types are initialized.
Args:
input (Tensor | Module): can be a model, layer, or tensor
func (callable): initialization function
funckw (dict):
Example:
>>> from torch import nn
>>> import torch
>>> class DummyNet(nn.Module):
>>> def __init__(self, n_channels=1, n_classes=10):
>>> super(DummyNet, self).__init__()
>>> self.conv = nn.Conv2d(n_channels, 10, kernel_size=5)
>>> self.norm = nn.BatchNorm2d(10)
>>> self.param = torch.nn.Parameter(torch.rand(3))
>>> self = DummyNet()
>>> func = nn.init.kaiming_normal_
>>> apply_initializer(self, func, {})
>>> func = nn.init.constant_
>>> apply_initializer(self, func, {'val': 42})
>>> assert np.all(self.conv.weight.detach().numpy() == 42)
>>> assert np.all(self.conv.bias.detach().numpy() == 0), 'bias is always init to zero'
>>> assert np.all(self.norm.bias.detach().numpy() == 0), 'bias is always init to zero'
>>> assert np.all(self.norm.weight.detach().numpy() == 1)
>>> assert np.all(self.norm.running_mean.detach().numpy() == 0.0)
>>> assert np.all(self.norm.running_var.detach().numpy() == 1.0)
"""
if getattr(input, 'bias', None) is not None:
# print('zero input bias')
# zero all biases
input.bias.data.zero_()
if isinstance(input, (torch.Tensor)):
# assert False, ('input is tensor? does this make sense?')
# print('input is tensor')
func(input, **funckw)
# data = input
elif isinstance(input, (torch.nn.modules.conv._ConvNd)):
# print('input is convnd')
func(input.weight, **funckw)
# elif isinstance(input, (torch.nn.modules.linear.Linear)):
# func(input.weight, **funckw)
elif isinstance(input, torch.nn.modules.batchnorm._BatchNorm):
# Use default batch norm
input.reset_parameters()
# elif isinstance(input, torch.nn.modules.Linear):
# input.reset_parameters()
elif hasattr(input, 'reset_parameters'):
# print('unknown input type fallback on reset_params')
input.reset_parameters()
else:
# input is a torch module
model = input
# print('recurse input')
layers = list(trainable_layers(model))
# print('layers = {!r}'.format(layers))
for item in layers:
apply_initializer(item, func, funckw)
def load_partial_state(model, model_state_dict, leftover=None,
ignore_unset=False, verbose=2,
mangle=True, association=None,
initializer=None):
"""
CommandLine:
python -m netharn.initializers.nninit_base load_partial_state
Args:
model (torch.nn.Module): module to initialize
model_state_dict (dict): state dict we wish to transfer
leftover (callable): fallback method for initializing incompatible
areas, if none then those areas are left as-is.
association (str): controls how we search for the association between
the two model states. Can be strict, module-hack, prefix-hack, or
embedding. Default is: prefix-hack.
mangle (bool, default=True): If True, mangles tensors that have the
same key, but different shapes forcing them to fit. This might
destroy information when forcing a a larger tensor into a smaller
tensor, or leave extra uninitialized room when a small tensor is
placed in a larger one. Note be careful when mangling a
classification layer if class indexes are not aligned.
verbose (int): verbosity level
Returns:
Dict: info - summary of actions taken
TODO:
- [ ] Allow user to specify how incompatible layers are handled.
Notes:
Have you ever had the scenario where
Has anyone ever had a problem where you had a torch model with a state
dict with keys that looked like: `mymodel.detector.layer1.conv.weight`,
but you had a pretrained weight file with keys that looked like:
`module.layer1.conv.weight`?
The latest version of
`netharn.initializers.functional.load_patial_state` can handle this by
solving a maximum-common-subtree-isomorphism problem. This computes the
largest possible mapping between the two state dictionaries that share
consistent suffixes.
>>> # This means you can load an off-the-shelf unmodified pretrained resnet50
>>> # where the keys might look something like this:
>>> resnet_keys = {
>>> 'conv1.weight',
>>> 'layer1.0.conv1.weight',
>>> 'layer1.0.conv2.weight',
>>> 'layer1.0.conv3.weight',
>>> 'layer1.0.downsample.0.weight',
>>> 'layer2.0.conv1.weight',
>>> 'layer2.0.conv2.weight',
>>> 'layer2.0.conv3.weight',
>>> 'layer3.0.conv1.weight',
>>> 'layer4.0.conv1.weight',
>>> 'fc.weight',
>>> 'fc.bias',
>>> }
>>> #
>>> # And perhaps you have a model that has a state dict where keys
>>> # look like this:
>>> model_keys = {
>>> 'preproc.conv1.weight'
>>> 'backbone.layer1.0.conv1.weight',
>>> 'backbone.layer1.0.conv2.weight',
>>> 'backbone.layer1.0.conv3.weight',
>>> 'backbone.layer1.0.downsample.0.weight',
>>> 'backbone.layer2.0.conv1.weight',
>>> 'backbone.layer2.0.conv2.weight',
>>> 'backbone.layer2.0.conv3.weight',
>>> 'backbone.layer3.0.conv1.weight',
>>> 'backbone.layer4.0.conv1.weight',
>>> 'head.conv1'
>>> 'head.conv2'
>>> 'head.fc.weight'
>>> 'head.fc.bias'
>>> }
>>> #
>>> # We can compute a partial mapping between them
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(resnet_keys, model_keys)
>>> print(ub.repr2(ub.dzip(subpaths1, subpaths2)))
{
'layer1.0.conv2.weight': 'backbone.layer1.0.conv2.weight',
'layer1.0.conv3.weight': 'backbone.layer1.0.conv3.weight',
'layer1.0.downsample.0.weight': 'backbone.layer1.0.downsample.0.weight',
'layer2.0.conv1.weight': 'backbone.layer2.0.conv1.weight',
'layer2.0.conv2.weight': 'backbone.layer2.0.conv2.weight',
'layer2.0.conv3.weight': 'backbone.layer2.0.conv3.weight',
'layer3.0.conv1.weight': 'backbone.layer3.0.conv1.weight',
'layer4.0.conv1.weight': 'backbone.layer4.0.conv1.weight',
}
Also, if the sizes of the tensor don't quite fit, they will be
mangled, i.e. "shoved-in" as best as possible.
Example:
>>> import netharn as nh
>>> self1 = nh.models.ToyNet2d(input_channels=1, num_classes=10)
>>> self2 = nh.models.ToyNet2d(input_channels=3, num_classes=2)
>>> self1.hack_param1 = torch.nn.Parameter(torch.rand(1))
>>> self2.hack_param1 = torch.nn.Parameter(torch.rand(3))
>>> self2.hack_param2 = torch.nn.Parameter(torch.rand(3))
>>> model_state_dict = self1.state_dict()
>>> load_partial_state(self2, model_state_dict)
>>> load_partial_state(self2, model_state_dict, leftover=torch.nn.init.kaiming_normal_)
Example:
>>> import netharn as nh
>>> xpu = nh.XPU(None)
>>> self1 = nh.models.ToyNet2d()
>>> self2 = xpu.mount(self1)
>>> load_partial_state(self2, self1.state_dict())
>>> load_partial_state(self1, self2.state_dict())
>>> # Add extra nonsense to state-dict
>>> extra_state_dict = {'extra.' + k: v for k, v in self1.state_dict().items()}
>>> extra_state_dict['stats'] = ub.peek(extra_state_dict.values()).clone()
>>> model = self2
>>> model_state_dict = extra_state_dict
>>> load_partial_state(self2, extra_state_dict)
Example:
>>> # xdoctest: +REQUIRES(--slow)
>>> from netharn.initializers.functional import * # NOQA
>>> import torchvision
>>> import torch
>>> resnet50 = torchvision.models.resnet50()
>>> class CustomModel(torch.nn.Module):
>>> def __init__(self):
>>> super().__init__()
>>> self.module = resnet50
>>> self.extra = torch.nn.Linear(1, 1)
>>> model = CustomModel()
>>> model_state_dict = resnet50.state_dict()
>>> model_state_dict2 = {'prefix.' + k: v for k, v in model_state_dict.items()}
>>> import ubelt as ub
>>> with ub.Timer(verbose=2, label='strict'):
>>> load_partial_state(model, model_state_dict, association='strict', verbose=0)
>>> with ub.Timer(verbose=2, label='prefix-hack'):
>>> load_partial_state(model, model_state_dict, association='prefix-hack', verbose=0)
>>> with ub.Timer(verbose=2, label='module-hack'):
>>> load_partial_state(model, model_state_dict, association='module-hack', verbose=0)
>>> with ub.Timer(verbose=2, label='embedding'):
>>> load_partial_state(model, model_state_dict, association='embedding', verbose=0)
>>> load_partial_state(model, model_state_dict, association='prefix-hack', verbose=1)
>>> load_partial_state(model, model_state_dict, association='module-hack', verbose=1)
CommandLine:
xdoctest -m /home/joncrall/code/netharn/netharn/initializers/functional.py load_partial_state:2 --slow
"""
if association is None:
association = 'module-hack' # old default
# association = 'prefix-hack' # new default
if initializer is not None:
import warnings
warnings.warn('initializer is deprecated use leftover')
leftover = initializer
self_state = model.state_dict()
def _fix_keys(model_state_dict):
"""
Hack around DataParallel wrapper. If there is nothing in common between
the two models check to see if prepending 'module.' to other keys fixes
it.
"""
other_keys = set(model_state_dict)
self_keys = set(self_state)
common_keys = other_keys.intersection(self_keys)
if not common_keys:
if association == 'strict':
pass
elif association == 'module-hack':
# If there are no common keys try a hack
prefix = 'module.'
def smap(f, ss):
return set(map(f, ss))
def fix1(k):
return prefix + k
def fix2(k):
if k.startswith(prefix):
return k[len(prefix):]
if smap(fix1, other_keys).intersection(self_keys):
model_state_dict = ub.map_keys(fix1, model_state_dict)
elif smap(fix2, other_keys).intersection(self_keys):
model_state_dict = ub.map_keys(fix2, model_state_dict)
elif association == 'prefix-hack':
import functools
def add_prefix(k, prefix):
return prefix + k
def remove_prefix(k, prefix):
if k.startswith(prefix):
return k[len(prefix):]
# set1 = other_keys
# target_set2 = self_keys
found = _best_prefix_transform(other_keys, self_keys)
if found is not None:
for action, prefix in found['transform']:
if action == 'add':
func = functools.partial(add_prefix, prefix=prefix)
elif action == 'remove':
func = functools.partial(remove_prefix, prefix=prefix)
else:
raise AssertionError
model_state_dict = ub.map_keys(func, model_state_dict)
elif association == 'embedding':
if verbose > 1:
print('Using subpath embedding assocation, may take some time')
# I believe this is the correct way to solve the problem
paths1 = sorted(other_keys)
paths2 = sorted(self_state)
subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2)
mapping = ub.dzip(subpaths1, subpaths2)
if verbose > 1:
print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
model_state_dict = ub.map_keys(lambda k: mapping.get(k, k), model_state_dict)
else:
raise KeyError(association)
return model_state_dict
other_state = _fix_keys(model_state_dict)
self_unset_keys = set(self_state.keys()) # will end up as keys in our that were not set
other_unused_keys = set(other_state.keys()) # will end up as keys in the other model that were not used
seen_keys = ub.ddict(set)
for key, other_value in other_state.items():
if key not in self_state:
if verbose > 0:
print('Skipping {} because it does not exist'.format(key))
seen_keys['skipped'].add(key)
else:
self_value = self_state[key]
if other_value.size() == self_value.size():
self_state[key] = other_value
self_unset_keys.remove(key)
other_unused_keys.remove(key)
seen_keys['full_add'].add(key)
elif len(other_value.size()) == len(self_value.size()):
if not mangle:
if verbose > 0:
print('Skipping {} due to incompatable size and mangle=False'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
elif key.endswith('bias'):
if verbose > 0:
print('Skipping {} due to incompatable size'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
else:
if leftover is None:
if verbose > 0:
print('Skipping {} due to incompatable size and no default initializer'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
else:
if verbose > 0:
print('Partially add {} with incompatable size'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
# Initialize all weights in case any are unspecified
if leftover is None:
try:
leftover(self_state[key])
except Exception:
if verbose > 0:
print('Unable to init {} with {}'.format(key, leftover))
# Transfer as much as possible
min_size = np.minimum(self_state[key].shape,
other_value.shape)
sl = tuple([slice(0, s) for s in min_size])
self_state[key][sl] = other_value[sl]
# if shock_partial:
# # Shock weights because we are doing something weird
# # might help the network recover in case this is
# # not a good idea
# shock(self_state[key], func=leftover)
self_unset_keys.remove(key)
other_unused_keys.remove(key)
if self_state[key].numel() < other_value.numel():
seen_keys['partial_add_some'].add(key)
else:
seen_keys['partial_add_all'].add(key)
else:
if verbose > 0:
print('Skipping {} due to incompatable size'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
if ignore_unset is True:
self_unset_keys = []
elif ignore_unset:
self_unset_keys = list(ub.oset(self_unset_keys) - set(ignore_unset))
if (self_unset_keys or other_unused_keys or
seen_keys['partial_add_some'] or seen_keys['partial_add_all']):
if verbose > 0:
if seen_keys:
print('Pretrained weights are a partial fit')
else:
print('Pretrained weights do not fit!')
if verbose > 1:
print('Seen Keys: {}'.format(ub.repr2(seen_keys, nl=2)))
print('Self Unset Keys: {}'.format(ub.repr2(self_unset_keys, nl=1)))
print('Other Unused keys: {}'.format(ub.repr2(other_unused_keys, nl=1)))
print('summary:')
seen_sum = ub.map_vals(len, seen_keys)
print('Seen Num: {}'.format(ub.repr2(seen_sum, nl=2)))
print('Self Unset Num: {}'.format(ub.repr2(len(self_unset_keys), nl=1)))
print('Other Unused Num: {}'.format(ub.repr2(len(other_unused_keys), nl=1)))
if leftover:
if verbose > 0:
print('Initializing unused keys using {}'.format(leftover))
for key in self_unset_keys:
if key.endswith('.num_batches_tracked'):
pass # ignore num_batches_tracked
elif key.endswith('.bias'):
self_state[key].fill_(0)
else:
try:
leftover(self_state[key])
except Exception:
if verbose > 0:
print('Unable to init {} with {}'.format(key, leftover))
else:
if verbose > 0:
print('Pretrained weights are a perfect fit')
model.load_state_dict(self_state)
info = {
'seen': seen_keys,
'self_unset': self_unset_keys,
'other_unused': other_unused_keys
}
return info
def _best_prefix_transform(set1, target_set2):
"""
Find a way to transform prefixes of items in set1 to match target_set2
Example:
>>> set1 = {'mod.f.0.w',
>>> 'mod.f.1.b',
>>> 'mod.f.1.n',
>>> 'mod.f.1.rm',
>>> 'mod.f.1.rv',}
>>> #
>>> target_set2 = {
>>> 'bar.foo.extra.f.1.b',
>>> 'bar.foo.extra.f.1.n',
>>> 'bar.foo.extra.f.1.w',
>>> 'bar.foo.extra.f.3.w',
>>> }
>>> _best_prefix_transform(set1, target_set2)
>>> target_set2.add('JUNK')
>>> _best_prefix_transform(set1, target_set2)
"""
# probably an efficient way to do this with a trie
# NOTE: In general this is a graph-isomorphism problem or a maximum common
# subgraph problem. However, we can look only at the special case of
# "maximum common subtrees". Given two directory structures (as trees)
# we find the common bits.
# https://perso.ensta-paris.fr/~diam/ro/online/viggo_wwwcompendium/node168.html
# We can approximate to O(log log n / log^2 n)
# Can get algorithm from maximum independent set
# https://arxiv.org/abs/1602.07210
# The most efficient algorithm here would be for solving
# "Maximum common labeled subtrees"
# APX-hard for unordered trees, but polytime solveable for ordered trees
# For directory structures we can induce an order, and hense obtain a
# polytime solution
# #
# On the Maximum Common Embedded Subtree Problem for Ordered Trees
# https://pdfs.semanticscholar.org/0b6e/061af02353f7d9b887f9a378be70be64d165.pdf
from os.path import commonprefix
prefixes1 = commonprefix(list(set1)).split('.')
prefixes2 = commonprefix(list(target_set2)).split('.')
# Remove the trailing prefixes that are the same
num_same = 0
for i in range(1, min(len(prefixes1), len(prefixes2))):
if prefixes1[-i] == prefixes2[-i]:
num_same = i
else:
break
prefixes1 = prefixes1[:-num_same]
prefixes2 = prefixes2[:-num_same]
ALLOW_FUZZY = 1
if ALLOW_FUZZY and len(prefixes2) == 0:
# SUPER HACK FOR CASE WHERE THERE IS JUST ONE SPOILER ELEMENT IN THE
# TARGET SET. THE ALGORITHM NEEDS TO BE RETHOUGHT FOR THAT CASE
possible_prefixes = [k.split('.') for k in target_set2]
prefix_hist = ub.ddict(lambda: 0)
for item in possible_prefixes:
for i in range(1, len(item)):
prefix_hist[tuple(item[0:i])] += 1
prefixes2 = ['.'.join(ub.argmax(prefix_hist))]
def add_prefix(items, prefix):
return {prefix + k for k in items}
def remove_prefix(items, prefix):
return {k[len(prefix):] if k.startswith(prefix) else k for k in items}
import itertools as it
found_cand = []
for i1, i2 in it.product(range(len(prefixes1) + 1), range(len(prefixes2) + 1)):
if i1 == 0 and i2 == 0:
continue
# Very inefficient, we should be able to do better
prefix1 = '.'.join(prefixes1[:i1])
prefix2 = '.'.join(prefixes2[:i2])
if prefix1:
prefix1 = prefix1 + '.'
if prefix2:
prefix2 = prefix2 + '.'
# We are allowed to remove a prefix from a set, add the other
# prefix to the set, or remove and then add.
set1_cand1 = remove_prefix(set1, prefix1)
set1_cand2 = add_prefix(set1, prefix2)
set1_cand3 = add_prefix(set1_cand1, prefix2)
common1 = set1_cand1 & target_set2
common2 = set1_cand2 & target_set2
common3 = set1_cand3 & target_set2
if common1:
found_cand.append({
'transform': [('remove', prefix1)],
'value': len(common1),
})
if common2:
found_cand.append({
'transform': [('add', prefix2)],
'value': len(common2),
})
if common3:
found_cand.append({
'transform': [('remove', prefix1), ('add', prefix2)],
'value': len(common3),
})
if len(found_cand):
found = max(found_cand, key=lambda x: x['value'])
else:
found = None
return found
def maximum_common_ordered_subpaths(paths1, paths2, sep='.'):
"""
CommandLine:
xdoctest -m /home/joncrall/code/netharn/netharn/initializers/functional.py maximum_common_ordered_subpaths:0 --profile && cat profile_output.txt
xdoctest -m /home/joncrall/code/netharn/netharn/initializers/functional.py maximum_common_ordered_subpaths:0
Example:
>>> import torchvision
>>> resnet50 = torchvision.models.resnet50()
>>> paths1 = sorted(resnet50.state_dict().keys())[0:100]
>>> paths2 = ['prefix.' + k for k in paths1]
>>> paths2.append('extra_key')
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2)
>>> mapping = ub.dzip(subpaths1, subpaths2)
>>> print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
Example:
>>> rng = None
>>> import kwarray
>>> rng = kwarray.ensure_rng(rng)
>>> def random_paths(rng, max_depth=10):
>>> depth = rng.randint(1, max_depth)
>>> parts = list(map(chr, rng.randint(ord('a'), ord('z'), size=depth)))
>>> path = '.'.join(parts)
>>> return path
>>> n = 50
>>> paths1 = sorted({random_paths(rng) for _ in range(n)})
>>> paths2 = sorted({random_paths(rng) for _ in range(n)})
>>> paths1 = paths1 + ['a.' + k for k in paths2[0:n // 3]]
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2)
>>> mapping = ub.dzip(subpaths1, subpaths2)
>>> print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
Example:
>>> from netharn.initializers.functional import * # NOQA
>>> paths1 = [
>>> 'stats',
>>> 'z.mod.f.0.w',
>>> 'a.z.mod.f.0.b',
>>> 'z.mod.f.1.b',
>>> 'z.mod.f.1.n',
>>> 'z.mod.f.1.m',
>>> 'z.mod.f.1.v',
>>> 'z.mod.f.2.m',
>>> 'z.mod.z.q'
>>> ]
>>> # paths1 = ['mod']
>>> #
>>> paths2 = [
>>> 'stats',
>>> 'bar.f.0.w',
>>> 'bar.foo.extra.z.q',
>>> 'bar.foo.extra',
>>> 'bar.foo.extra.f.1.b',
>>> 'bar.foo.extra.f.1.n',
>>> 'bar.foo.extra.f.1.w',
>>> 'bar.foo.extra.f.3.z', # FIXME we need to handle label comparision operators
>>> # I think we allow labels to match if they have the same suffix
>>> ]
>>> sep = '.'
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2, sep)
>>> mapping = ub.dzip(subpaths1, subpaths2)
>>> print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
Example:
>>> sep = '.'
>>> paths1 = ['a.b']
>>> paths2 = ['a.b']
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2, sep)
>>> mapping = ub.dzip(subpaths1, subpaths2)
>>> print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
>>> paths1 = ['c.a.b']
>>> paths2 = ['a.b']
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2, sep)
>>> mapping = ub.dzip(subpaths1, subpaths2)
>>> print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
>>> paths1 = ['c.a.b', 'c.a.e', 'c.a.q']
>>> paths2 = ['a.b', 'c.e', 'c.a', 'a.q']
>>> subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2, sep)
>>> mapping = ub.dzip(subpaths1, subpaths2)
>>> print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
"""
import networkx as nx
# the longest common balanced sequence problem
def _affinity(tok1, tok2):
score = 0
for t1, t2 in zip(tok1[::-1], tok2[::-1]):
if t1 == t2:
score += 1
else:
break
return score
# return tok1[-1] == tok2[-1]
node_affinity = _affinity
# import operator
# eq = operator.eq
def paths_to_tree(paths):
tree = nx.OrderedDiGraph()
for path in sorted(paths):
parts = tuple(path.split(sep))
node_path = []
for i in range(1, len(parts) + 1):
node = parts[0:i]
tree.add_node(node)
tree.nodes[node]['label'] = node[-1]
node_path.append(node)
for u, v in ub.iter_window(node_path, 2):
tree.add_edge(u, v)
return tree
tree1 = paths_to_tree(paths1)
tree2 = paths_to_tree(paths2)
# _print_forest(tree1)
# _print_forest(tree2)
# if 0:
# DiGM = isomorphism.DiGraphMatcher(tree1, tree2)
# DiGM.is_isomorphic()
# list(DiGM.subgraph_isomorphisms_iter())
from netharn.initializers import _nx_extensions
subtree1, subtree2 = _nx_extensions.maximum_common_ordered_tree_embedding(tree1, tree2, node_affinity=node_affinity)
# subtree1, subtree2 = _nx_extensions.maximum_common_ordered_subtree_isomorphism(tree1, tree2, node_affinity=node_affinity)
subpaths1 = [sep.join(node) for node in subtree1.nodes if subtree1.out_degree[node] == 0]
subpaths2 = [sep.join(node) for node in subtree2.nodes if subtree2.out_degree[node] == 0]
return subpaths1, subpaths2
| 42.372253
| 152
| 0.555224
|
import numpy as np
import torch
import ubelt as ub
def trainable_layers(model, names=False):
if names:
stack = [('', '', model)]
while stack:
prefix, basename, item = stack.pop()
name = '.'.join([p for p in [prefix, basename] if p])
if isinstance(item, torch.nn.modules.conv._ConvNd):
yield name, item
elif isinstance(item, torch.nn.modules.batchnorm._BatchNorm):
yield name, item
elif hasattr(item, 'reset_parameters'):
yield name, item
child_prefix = name
for child_basename, child_item in list(item.named_children())[::-1]:
stack.append((child_prefix, child_basename, child_item))
else:
queue = [model]
while queue:
item = queue.pop(0)
if isinstance(item, torch.nn.modules.conv._ConvNd):
yield item
elif isinstance(item, torch.nn.modules.batchnorm._BatchNorm):
yield item
elif hasattr(item, 'reset_parameters'):
yield item
for child in item.children():
queue.append(child)
def apply_initializer(input, func, funckw):
if getattr(input, 'bias', None) is not None:
input.bias.data.zero_()
if isinstance(input, (torch.Tensor)):
func(input, **funckw)
elif isinstance(input, (torch.nn.modules.conv._ConvNd)):
func(input.weight, **funckw)
elif isinstance(input, torch.nn.modules.batchnorm._BatchNorm):
input.reset_parameters()
elif hasattr(input, 'reset_parameters'):
input.reset_parameters()
else:
model = input
layers = list(trainable_layers(model))
for item in layers:
apply_initializer(item, func, funckw)
def load_partial_state(model, model_state_dict, leftover=None,
ignore_unset=False, verbose=2,
mangle=True, association=None,
initializer=None):
if association is None:
association = 'module-hack'
ializer is not None:
import warnings
warnings.warn('initializer is deprecated use leftover')
leftover = initializer
self_state = model.state_dict()
def _fix_keys(model_state_dict):
other_keys = set(model_state_dict)
self_keys = set(self_state)
common_keys = other_keys.intersection(self_keys)
if not common_keys:
if association == 'strict':
pass
elif association == 'module-hack':
prefix = 'module.'
def smap(f, ss):
return set(map(f, ss))
def fix1(k):
return prefix + k
def fix2(k):
if k.startswith(prefix):
return k[len(prefix):]
if smap(fix1, other_keys).intersection(self_keys):
model_state_dict = ub.map_keys(fix1, model_state_dict)
elif smap(fix2, other_keys).intersection(self_keys):
model_state_dict = ub.map_keys(fix2, model_state_dict)
elif association == 'prefix-hack':
import functools
def add_prefix(k, prefix):
return prefix + k
def remove_prefix(k, prefix):
if k.startswith(prefix):
return k[len(prefix):]
found = _best_prefix_transform(other_keys, self_keys)
if found is not None:
for action, prefix in found['transform']:
if action == 'add':
func = functools.partial(add_prefix, prefix=prefix)
elif action == 'remove':
func = functools.partial(remove_prefix, prefix=prefix)
else:
raise AssertionError
model_state_dict = ub.map_keys(func, model_state_dict)
elif association == 'embedding':
if verbose > 1:
print('Using subpath embedding assocation, may take some time')
paths1 = sorted(other_keys)
paths2 = sorted(self_state)
subpaths1, subpaths2 = maximum_common_ordered_subpaths(paths1, paths2)
mapping = ub.dzip(subpaths1, subpaths2)
if verbose > 1:
print('mapping = {}'.format(ub.repr2(mapping, nl=1)))
model_state_dict = ub.map_keys(lambda k: mapping.get(k, k), model_state_dict)
else:
raise KeyError(association)
return model_state_dict
other_state = _fix_keys(model_state_dict)
self_unset_keys = set(self_state.keys())
other_unused_keys = set(other_state.keys())
seen_keys = ub.ddict(set)
for key, other_value in other_state.items():
if key not in self_state:
if verbose > 0:
print('Skipping {} because it does not exist'.format(key))
seen_keys['skipped'].add(key)
else:
self_value = self_state[key]
if other_value.size() == self_value.size():
self_state[key] = other_value
self_unset_keys.remove(key)
other_unused_keys.remove(key)
seen_keys['full_add'].add(key)
elif len(other_value.size()) == len(self_value.size()):
if not mangle:
if verbose > 0:
print('Skipping {} due to incompatable size and mangle=False'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
elif key.endswith('bias'):
if verbose > 0:
print('Skipping {} due to incompatable size'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
else:
if leftover is None:
if verbose > 0:
print('Skipping {} due to incompatable size and no default initializer'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
else:
if verbose > 0:
print('Partially add {} with incompatable size'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
if leftover is None:
try:
leftover(self_state[key])
except Exception:
if verbose > 0:
print('Unable to init {} with {}'.format(key, leftover))
min_size = np.minimum(self_state[key].shape,
other_value.shape)
sl = tuple([slice(0, s) for s in min_size])
self_state[key][sl] = other_value[sl]
emove(key)
other_unused_keys.remove(key)
if self_state[key].numel() < other_value.numel():
seen_keys['partial_add_some'].add(key)
else:
seen_keys['partial_add_all'].add(key)
else:
if verbose > 0:
print('Skipping {} due to incompatable size'.format(key))
print(' * self = {!r}'.format(self_value.size()))
print(' * other = {!r}'.format(other_value.size()))
seen_keys['skipped'].add(key)
if ignore_unset is True:
self_unset_keys = []
elif ignore_unset:
self_unset_keys = list(ub.oset(self_unset_keys) - set(ignore_unset))
if (self_unset_keys or other_unused_keys or
seen_keys['partial_add_some'] or seen_keys['partial_add_all']):
if verbose > 0:
if seen_keys:
print('Pretrained weights are a partial fit')
else:
print('Pretrained weights do not fit!')
if verbose > 1:
print('Seen Keys: {}'.format(ub.repr2(seen_keys, nl=2)))
print('Self Unset Keys: {}'.format(ub.repr2(self_unset_keys, nl=1)))
print('Other Unused keys: {}'.format(ub.repr2(other_unused_keys, nl=1)))
print('summary:')
seen_sum = ub.map_vals(len, seen_keys)
print('Seen Num: {}'.format(ub.repr2(seen_sum, nl=2)))
print('Self Unset Num: {}'.format(ub.repr2(len(self_unset_keys), nl=1)))
print('Other Unused Num: {}'.format(ub.repr2(len(other_unused_keys), nl=1)))
if leftover:
if verbose > 0:
print('Initializing unused keys using {}'.format(leftover))
for key in self_unset_keys:
if key.endswith('.num_batches_tracked'):
pass
elif key.endswith('.bias'):
self_state[key].fill_(0)
else:
try:
leftover(self_state[key])
except Exception:
if verbose > 0:
print('Unable to init {} with {}'.format(key, leftover))
else:
if verbose > 0:
print('Pretrained weights are a perfect fit')
model.load_state_dict(self_state)
info = {
'seen': seen_keys,
'self_unset': self_unset_keys,
'other_unused': other_unused_keys
}
return info
def _best_prefix_transform(set1, target_set2):
from os.path import commonprefix
prefixes1 = commonprefix(list(set1)).split('.')
prefixes2 = commonprefix(list(target_set2)).split('.')
num_same = 0
for i in range(1, min(len(prefixes1), len(prefixes2))):
if prefixes1[-i] == prefixes2[-i]:
num_same = i
else:
break
prefixes1 = prefixes1[:-num_same]
prefixes2 = prefixes2[:-num_same]
ALLOW_FUZZY = 1
if ALLOW_FUZZY and len(prefixes2) == 0:
possible_prefixes = [k.split('.') for k in target_set2]
prefix_hist = ub.ddict(lambda: 0)
for item in possible_prefixes:
for i in range(1, len(item)):
prefix_hist[tuple(item[0:i])] += 1
prefixes2 = ['.'.join(ub.argmax(prefix_hist))]
def add_prefix(items, prefix):
return {prefix + k for k in items}
def remove_prefix(items, prefix):
return {k[len(prefix):] if k.startswith(prefix) else k for k in items}
import itertools as it
found_cand = []
for i1, i2 in it.product(range(len(prefixes1) + 1), range(len(prefixes2) + 1)):
if i1 == 0 and i2 == 0:
continue
prefix1 = '.'.join(prefixes1[:i1])
prefix2 = '.'.join(prefixes2[:i2])
if prefix1:
prefix1 = prefix1 + '.'
if prefix2:
prefix2 = prefix2 + '.'
set1_cand1 = remove_prefix(set1, prefix1)
set1_cand2 = add_prefix(set1, prefix2)
set1_cand3 = add_prefix(set1_cand1, prefix2)
common1 = set1_cand1 & target_set2
common2 = set1_cand2 & target_set2
common3 = set1_cand3 & target_set2
if common1:
found_cand.append({
'transform': [('remove', prefix1)],
'value': len(common1),
})
if common2:
found_cand.append({
'transform': [('add', prefix2)],
'value': len(common2),
})
if common3:
found_cand.append({
'transform': [('remove', prefix1), ('add', prefix2)],
'value': len(common3),
})
if len(found_cand):
found = max(found_cand, key=lambda x: x['value'])
else:
found = None
return found
def maximum_common_ordered_subpaths(paths1, paths2, sep='.'):
import networkx as nx
def _affinity(tok1, tok2):
score = 0
for t1, t2 in zip(tok1[::-1], tok2[::-1]):
if t1 == t2:
score += 1
else:
break
return score
node_affinity = _affinity
def paths_to_tree(paths):
tree = nx.OrderedDiGraph()
for path in sorted(paths):
parts = tuple(path.split(sep))
node_path = []
for i in range(1, len(parts) + 1):
node = parts[0:i]
tree.add_node(node)
tree.nodes[node]['label'] = node[-1]
node_path.append(node)
for u, v in ub.iter_window(node_path, 2):
tree.add_edge(u, v)
return tree
tree1 = paths_to_tree(paths1)
tree2 = paths_to_tree(paths2)
from netharn.initializers import _nx_extensions
subtree1, subtree2 = _nx_extensions.maximum_common_ordered_tree_embedding(tree1, tree2, node_affinity=node_affinity)
subpaths1 = [sep.join(node) for node in subtree1.nodes if subtree1.out_degree[node] == 0]
subpaths2 = [sep.join(node) for node in subtree2.nodes if subtree2.out_degree[node] == 0]
return subpaths1, subpaths2
| true
| true
|
1c488c990f526cb757a984ce2af68fd635fd0898
| 30,002
|
py
|
Python
|
popcorn_gallery/popcorn/tests/integration_tests.py
|
Koenkk/popcorn_maker
|
0978b9f98dacd4e8eb753404b24eb584f410aa11
|
[
"BSD-3-Clause"
] | 15
|
2015-03-23T02:55:20.000Z
|
2021-01-12T12:42:30.000Z
|
popcorn_gallery/popcorn/tests/integration_tests.py
|
Koenkk/popcorn_maker
|
0978b9f98dacd4e8eb753404b24eb584f410aa11
|
[
"BSD-3-Clause"
] | null | null | null |
popcorn_gallery/popcorn/tests/integration_tests.py
|
Koenkk/popcorn_maker
|
0978b9f98dacd4e8eb753404b24eb584f410aa11
|
[
"BSD-3-Clause"
] | 16
|
2015-02-18T21:43:31.000Z
|
2021-11-09T22:50:03.000Z
|
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse
from django_extensions.db.fields import json
from funfactory.middleware import LocaleURLMiddleware
from test_utils import TestCase
from mock import patch
from nose.tools import ok_, eq_
from .fixtures import (create_user, create_project, create_project_category,
create_template, create_template_category,
create_external_project)
from ..forms import ProjectEditForm, ExternalProjectEditForm
from ..models import (Project, Template, TemplateCategory, ProjectCategory,
ProjectCategoryMembership)
suppress_locale_middleware = patch.object(LocaleURLMiddleware,
'process_request',
lambda *args: None)
class PopcornIntegrationTestCase(TestCase):
def setUp(self):
self.user = create_user('bob', with_profile=True)
def tearDown(self):
for model in [Project, User, Template]:
model.objects.all().delete()
def get_url(self, name, user, project):
kwargs = {
'username': user.username,
'shortcode': project.shortcode
}
return reverse(name, kwargs=kwargs)
def assertContextMessage(self, context, message_status):
ok_('messages' in context)
for item in list(context['messages']):
eq_(item.tags, message_status)
class ProjectIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(ProjectIntegrationTest, self).setUp()
self.category = create_project_category(is_featured=True)
def tearDown(self):
super(ProjectIntegrationTest, self).tearDown()
ProjectCategory.objects.all().delete()
@suppress_locale_middleware
def test_project_list(self):
project = create_project(author=self.user, status=Project.LIVE,
is_shared=True)
project.categories.add(self.category)
response = self.client.get(reverse('project_list'))
context = response.context
eq_(len(context['project_list']), 1)
eq_(len(context['category_list']), 1)
eq_(context['category'], None)
@suppress_locale_middleware
def test_project_list_category(self):
project = create_project(author=self.user, status=Project.LIVE,
is_shared=True)
project.categories.add(self.category)
response = self.client.get(reverse('project_list_category',
args=[self.category.slug]))
context = response.context
eq_(len(context['project_list']), 1)
eq_(len(context['category_list']), 1)
eq_(context['category'], self.category)
@suppress_locale_middleware
def test_project_list_invalid_category(self):
project = create_project(author=self.user)
response = self.client.get(reverse('project_list_category',
args=['invalid']))
eq_(response.status_code, 404)
class DetailIntegrationTest(PopcornIntegrationTestCase):
@suppress_locale_middleware
def test_project_detail(self):
project = create_project(author=self.user, status=Project.LIVE)
url = self.get_url('user_project', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
@suppress_locale_middleware
def test_unpublished_project_anon(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_unpublished_project_user(self):
alex = create_user('alex', with_profile=True)
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_unpublished_project_owner(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
self.client.logout()
@suppress_locale_middleware
def test_removed_project(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
class EditIntegrationTest(PopcornIntegrationTestCase):
valid_data = {
'is_shared': False,
'is_forkable': False,
'name': 'Changed!',
'status': Project.HIDDEN,
'description': 'Description of the project',
}
@suppress_locale_middleware
def test_edited_project_anon(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 302)
@suppress_locale_middleware
def test_edited_project_anon_post(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
response = self.client.post(url, self.valid_data)
eq_(response.status_code, 302)
@suppress_locale_middleware
def test_edited_project_user(self):
project = create_project(author=self.user)
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_edited_project_user_post(self):
project = create_project(author=self.user)
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.post(url, self.valid_data)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_edited_project_owner(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
context = response.context
eq_(context['project'], project)
eq_(context['form'].instance, project)
ok_(isinstance(context['form'], ProjectEditForm))
self.client.logout()
@suppress_locale_middleware
def test_edited_project_owner_post(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.post(url, self.valid_data)
self.assertRedirects(response, project.get_absolute_url())
project = Project.objects.get()
eq_(project.name, 'Changed!')
@suppress_locale_middleware
def test_edited_project_owner_post_removed(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.post(url, self.valid_data)
eq_(response.status_code, 404)
class EditProjectCategoryIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(EditProjectCategoryIntegrationTest, self).setUp()
self.project = create_project(author=self.user, status=Project.LIVE)
self.category = create_project_category()
self.data = {
'name': 'Awesome project!',
'description': 'Hello world!',
'status': Project.LIVE,
'is_shared': True,
'is_forkable': True,
'categories': [self.category.pk]
}
self.client.login(username=self.user.username, password='bob')
self.url = self.get_url('user_project_edit', self.user, self.project)
def tearDown(self):
super(EditProjectCategoryIntegrationTest, self).tearDown()
for model in [ProjectCategoryMembership, ProjectCategory, User]:
model.objects.all().delete()
self.client.logout()
def add_membership(self, status):
data = {
'user': self.user.profile,
'project_category': self.category,
'status': getattr(ProjectCategoryMembership, status)
}
return ProjectCategoryMembership.objects.create(**data)
@suppress_locale_middleware
def test_edit_project_category_get(self):
self.add_membership('APPROVED')
response = self.client.get(self.url)
eq_(response.status_code, 200)
ok_(response.context['form'].has_categories)
@suppress_locale_middleware
def test_edit_project_category_denied_get(self):
self.add_membership('DENIED')
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['form'].has_categories, False)
@suppress_locale_middleware
def test_edit_project_category_pending_get(self):
self.add_membership('PENDING')
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['form'].has_categories, False)
@suppress_locale_middleware
def test_edit_project_category_post(self):
self.add_membership('APPROVED')
response = self.client.post(self.url, self.data, follow=True)
self.assertContextMessage(response.context, 'success')
@suppress_locale_middleware
def test_edit_project_category_denied_post(self):
self.add_membership('DENIED')
response = self.client.post(self.url, self.data)
eq_(response.status_code, 200)
ok_(response.context['form'].errors)
@suppress_locale_middleware
def test_edit_project_category_pending_post(self):
self.add_membership('PENDING')
response = self.client.post(self.url, self.data)
eq_(response.status_code, 200)
ok_(response.context['form'].errors)
class MetadataIntegrationTest(PopcornIntegrationTestCase):
@suppress_locale_middleware
def test_project_detail(self):
project = create_project(author=self.user, status=Project.LIVE)
url = self.get_url('user_project_meta', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['project'], project.name)
@suppress_locale_middleware
def test_unpublished_project_anon(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_meta', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_unpublished_project_user(self):
alex = create_user('alex', with_profile=True)
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_meta', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_unpublished_project_owner(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_meta', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['project'], project.name)
self.client.logout()
@suppress_locale_middleware
def test_removed_project(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project_meta', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
class DataIntegrationTest(PopcornIntegrationTestCase):
@suppress_locale_middleware
def test_project_detail(self):
project = create_project(author=self.user, status=Project.LIVE)
url = self.get_url('user_project_data', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
ok_('data' in data)
ok_(not 'projectID' in data)
ok_(not 'name' in data)
@suppress_locale_middleware
def test_unpublished_project_anon(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_data', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_unpublished_project_user(self):
alex = create_user('alex', with_profile=True)
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_data', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_unpublished_project_owner(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_data', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
ok_('data' in data)
ok_('projectID' in data)
ok_('name' in data)
self.client.logout()
@suppress_locale_middleware
def test_published_project_not_owner(self):
other = create_user('other')
project = create_project(author=other, status=Project.LIVE)
url = self.get_url('user_project_data', other, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
ok_('data' in data)
ok_(not 'projectID' in data)
ok_(not 'name' in data)
self.client.logout()
@suppress_locale_middleware
def test_removed_project(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project_data', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
class DeleteIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(DeleteIntegrationTest, self).setUp()
category = create_project_category(name='Special')
self.project = create_project(author=self.user)
self.project.categories.add(category)
def tearDown(self):
super(DeleteIntegrationTest, self).tearDown()
ProjectCategory.objects.all().delete()
self.client.logout()
@suppress_locale_middleware
def test_delete_get(self):
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response.context['project'], self.project)
@suppress_locale_middleware
def test_delete_post(self):
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=self.user.username, password='bob')
response = self.client.post(url, {})
eq_(response.status_code, 302)
ok_(reverse('users_dashboard'), response['Location'])
eq_(Project.objects.all().count(), 0)
eq_(ProjectCategory.objects.all().count(), 1)
eq_(User.objects.filter(id=self.user.id).count(), 1)
@suppress_locale_middleware
def test_delete_not_owner_get(self):
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_delete_not_owner_post(self):
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=alex.username, password='alex')
response = self.client.post(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_delete_anon_get(self):
url = self.get_url('user_project_delete', self.user, self.project)
response = self.client.get(url)
# Redirects to login
eq_(response.status_code, 302)
@suppress_locale_middleware
def test_delete_anon_post(self):
url = self.get_url('user_project_delete', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 302)
class CategoryIntegrationTest(TestCase):
def setUp(self):
self.category = create_project_category(is_featured=True)
self.user = create_user('bob', with_profile=True)
def tearDown(self):
for model in [Project, User, Template, ProjectCategory]:
model.objects.all().delete()
@suppress_locale_middleware
def test_project_category_detail(self):
project = create_project(author=self.user, status=Project.LIVE,
is_shared=True)
project.categories.add(self.category)
response = self.client.get(self.category.get_absolute_url())
context = response.context
eq_(context['category'], self.category)
eq_(len(context['project_list']), 1)
eq_(len(context['category_list']), 1)
@suppress_locale_middleware
def test_project_category_detail_non_shared(self):
project = create_project(author=self.user, is_shared=False)
project.categories.add(self.category)
response = self.client.get(self.category.get_absolute_url())
context = response.context
eq_(context['category'], self.category)
eq_(len(context['project_list']), 0)
@suppress_locale_middleware
def test_category_detail_removed(self):
project = create_project(author=self.user, status=Project.REMOVED)
project.categories.add(self.category)
response = self.client.get(self.category.get_absolute_url())
context = response.context
eq_(context['category'], self.category)
eq_(len(context['project_list']), 0)
class TemplateIntegrationTest(TestCase):
def setUp(self):
self.category = create_template_category(is_featured=True)
def tearDown(self):
for model in [Template, TemplateCategory, User]:
model.objects.all().delete()
@suppress_locale_middleware
def test_template_list(self):
template = create_template(is_featured=True)
response = self.client.get(reverse('template_list'))
context = response.context
eq_(len(context['template_list']), 1)
eq_(len(context['category_list']), 1)
@suppress_locale_middleware
def test_template_list_category(self):
category = create_template_category()
template = create_template(is_featured=True)
template.categories.add(category)
response = self.client.get(reverse('template_list_category',
args=[category.slug]))
context = response.context
eq_(len(context['template_list']), 1)
eq_(len(context['category_list']), 1)
eq_(context['category'], category)
@suppress_locale_middleware
def test_template_list_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_list'))
context = response.context
eq_(len(context['template_list']), 0)
eq_(len(context['category_list']), 1)
@suppress_locale_middleware
def test_template_list_category_hidden(self):
category = create_template_category()
template = create_template(status=Template.HIDDEN)
template.categories.add(category)
response = self.client.get(reverse('template_list_category',
args=[category.slug]))
context = response.context
eq_(len(context['template_list']), 0)
eq_(len(context['category_list']), 1)
eq_(context['category'], category)
@suppress_locale_middleware
def test_template_detail_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_detail',
args=[template.slug]))
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_template_detail(self):
template = create_template()
response = self.client.get(reverse('template_detail',
args=[template.slug]))
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
@suppress_locale_middleware
def test_template_summary_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_summary',
args=[template.slug]))
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_template_summary(self):
template = create_template()
response = self.client.get(reverse('template_summary',
args=[template.slug]))
eq_(response.status_code, 200)
eq_(response.context['template'], template)
ok_('object' in response.context)
ok_('project_list' in response.context)
ok_('tag_list' in response.context)
@suppress_locale_middleware
def test_template_config_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_config',
args=[template.slug]))
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_template_config(self):
template = create_template(status=Template.LIVE)
response = self.client.get(reverse('template_config',
args=[template.slug]))
eq_(response.status_code, 200)
response = json.loads(response.content)
ok_('savedDataUrl' in response)
ok_('baseDir' in response)
ok_('name' in response)
class TemplateAuthorIntegrationTest(TestCase):
def setUp(self):
self.user = create_user('bob', with_profile=True)
self.client.login(username=self.user.username, password='bob')
self.template = create_template(author=self.user, status=Project.HIDDEN)
def tearDown(self):
self.client.logout()
for model in [Template, User]:
model.objects.all().delete()
@suppress_locale_middleware
def test_template_summary(self):
response = self.client.get(reverse('template_summary',
args=[self.template.slug]))
eq_(response.status_code, 200)
eq_(response.context['template'], self.template)
@suppress_locale_middleware
def test_template_detail(self):
response = self.client.get(reverse('template_detail',
args=[self.template.slug]))
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
@suppress_locale_middleware
def test_template_config(self):
response = self.client.get(reverse('template_config',
args=[self.template.slug]))
eq_(response.status_code, 200)
class TestCategoryMembershipIntegrationTest(TestCase):
def setUp(self):
self.category = create_project_category()
self.user = create_user('bob', with_profile=True)
self.client.login(username=self.user.username, password='bob')
self.url = reverse('project_category_join', args=[self.category.slug])
def tearDown(self):
for model in [ProjectCategoryMembership, ProjectCategory, User]:
model.objects.all().delete()
self.client.logout()
def assertContextMessage(self, context, message_status):
ok_('messages' in context)
for item in list(context['messages']):
eq_(item.tags, message_status)
@suppress_locale_middleware
def test_membership_request_get(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['category'], self.category)
@suppress_locale_middleware
def test_membership_request_post(self):
response = self.client.post(self.url, {}, follow=True)
self.assertContextMessage(response.context, 'success')
@suppress_locale_middleware
def test_membership_request_post_admin_notification(self):
admin = create_user('admin', with_profile=True)
admin.is_staff = True
admin.is_superuser = True
admin.save()
response = self.client.post(self.url, {}, follow=True)
self.assertContextMessage(response.context, 'success')
eq_(len(mail.outbox), 1)
@suppress_locale_middleware
def test_duplicate_membership_request_get(self):
ProjectCategoryMembership.objects.create(user=self.user.profile,
project_category=self.category)
response = self.client.get(self.url, follow=True)
self.assertContextMessage(response.context, 'error')
@suppress_locale_middleware
def test_duplicate_membership_request_post(self):
ProjectCategoryMembership.objects.create(user=self.user.profile,
project_category=self.category)
response = self.client.post(self.url, {}, follow=True)
self.assertContextMessage(response.context, 'error')
class TestExternalProjectIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(TestExternalProjectIntegrationTest, self).setUp()
self.project = create_external_project(author=self.user,
status=Project.LIVE)
self.client.login(username=self.user.username, password='bob')
def tearDown(self):
super(TestExternalProjectIntegrationTest, self).tearDown()
self.client.logout()
@suppress_locale_middleware
def test_detail_user_project(self):
url = self.get_url('user_project', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_config(self):
url = self.get_url('user_project_config', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_meta(self):
url = self.get_url('user_project_meta', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_data(self):
url = self.get_url('user_project_data', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_fork(self):
url = self.get_url('user_project_fork', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_fork_post(self):
url = self.get_url('user_project_fork', self.user, self.project)
response = self.client.post(url, {})
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_edit_project_owner(self):
url = self.get_url('user_project_edit', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 200)
context = response.context
eq_(context['project'], self.project)
eq_(context['form'].instance, self.project)
ok_(isinstance(context['form'], ExternalProjectEditForm))
self.client.logout()
@suppress_locale_middleware
def test_edit_project_owner_post(self):
data = {
'is_shared': False,
'name': 'Changed!',
'status': Project.HIDDEN,
'description': 'Description of the project',
}
url = self.get_url('user_project_edit', self.user, self.project)
response = self.client.post(url, data)
self.assertRedirects(response, self.project.get_absolute_url())
project = Project.objects.get()
eq_(project.name, 'Changed!')
| 39.632761
| 80
| 0.665822
|
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import reverse
from django_extensions.db.fields import json
from funfactory.middleware import LocaleURLMiddleware
from test_utils import TestCase
from mock import patch
from nose.tools import ok_, eq_
from .fixtures import (create_user, create_project, create_project_category,
create_template, create_template_category,
create_external_project)
from ..forms import ProjectEditForm, ExternalProjectEditForm
from ..models import (Project, Template, TemplateCategory, ProjectCategory,
ProjectCategoryMembership)
suppress_locale_middleware = patch.object(LocaleURLMiddleware,
'process_request',
lambda *args: None)
class PopcornIntegrationTestCase(TestCase):
def setUp(self):
self.user = create_user('bob', with_profile=True)
def tearDown(self):
for model in [Project, User, Template]:
model.objects.all().delete()
def get_url(self, name, user, project):
kwargs = {
'username': user.username,
'shortcode': project.shortcode
}
return reverse(name, kwargs=kwargs)
def assertContextMessage(self, context, message_status):
ok_('messages' in context)
for item in list(context['messages']):
eq_(item.tags, message_status)
class ProjectIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(ProjectIntegrationTest, self).setUp()
self.category = create_project_category(is_featured=True)
def tearDown(self):
super(ProjectIntegrationTest, self).tearDown()
ProjectCategory.objects.all().delete()
@suppress_locale_middleware
def test_project_list(self):
project = create_project(author=self.user, status=Project.LIVE,
is_shared=True)
project.categories.add(self.category)
response = self.client.get(reverse('project_list'))
context = response.context
eq_(len(context['project_list']), 1)
eq_(len(context['category_list']), 1)
eq_(context['category'], None)
@suppress_locale_middleware
def test_project_list_category(self):
project = create_project(author=self.user, status=Project.LIVE,
is_shared=True)
project.categories.add(self.category)
response = self.client.get(reverse('project_list_category',
args=[self.category.slug]))
context = response.context
eq_(len(context['project_list']), 1)
eq_(len(context['category_list']), 1)
eq_(context['category'], self.category)
@suppress_locale_middleware
def test_project_list_invalid_category(self):
project = create_project(author=self.user)
response = self.client.get(reverse('project_list_category',
args=['invalid']))
eq_(response.status_code, 404)
class DetailIntegrationTest(PopcornIntegrationTestCase):
@suppress_locale_middleware
def test_project_detail(self):
project = create_project(author=self.user, status=Project.LIVE)
url = self.get_url('user_project', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
@suppress_locale_middleware
def test_unpublished_project_anon(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_unpublished_project_user(self):
alex = create_user('alex', with_profile=True)
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_unpublished_project_owner(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
self.client.logout()
@suppress_locale_middleware
def test_removed_project(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
class EditIntegrationTest(PopcornIntegrationTestCase):
valid_data = {
'is_shared': False,
'is_forkable': False,
'name': 'Changed!',
'status': Project.HIDDEN,
'description': 'Description of the project',
}
@suppress_locale_middleware
def test_edited_project_anon(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 302)
@suppress_locale_middleware
def test_edited_project_anon_post(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
response = self.client.post(url, self.valid_data)
eq_(response.status_code, 302)
@suppress_locale_middleware
def test_edited_project_user(self):
project = create_project(author=self.user)
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_edited_project_user_post(self):
project = create_project(author=self.user)
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.post(url, self.valid_data)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_edited_project_owner(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
context = response.context
eq_(context['project'], project)
eq_(context['form'].instance, project)
ok_(isinstance(context['form'], ProjectEditForm))
self.client.logout()
@suppress_locale_middleware
def test_edited_project_owner_post(self):
project = create_project(author=self.user)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.post(url, self.valid_data)
self.assertRedirects(response, project.get_absolute_url())
project = Project.objects.get()
eq_(project.name, 'Changed!')
@suppress_locale_middleware
def test_edited_project_owner_post_removed(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project_edit', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.post(url, self.valid_data)
eq_(response.status_code, 404)
class EditProjectCategoryIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(EditProjectCategoryIntegrationTest, self).setUp()
self.project = create_project(author=self.user, status=Project.LIVE)
self.category = create_project_category()
self.data = {
'name': 'Awesome project!',
'description': 'Hello world!',
'status': Project.LIVE,
'is_shared': True,
'is_forkable': True,
'categories': [self.category.pk]
}
self.client.login(username=self.user.username, password='bob')
self.url = self.get_url('user_project_edit', self.user, self.project)
def tearDown(self):
super(EditProjectCategoryIntegrationTest, self).tearDown()
for model in [ProjectCategoryMembership, ProjectCategory, User]:
model.objects.all().delete()
self.client.logout()
def add_membership(self, status):
data = {
'user': self.user.profile,
'project_category': self.category,
'status': getattr(ProjectCategoryMembership, status)
}
return ProjectCategoryMembership.objects.create(**data)
@suppress_locale_middleware
def test_edit_project_category_get(self):
self.add_membership('APPROVED')
response = self.client.get(self.url)
eq_(response.status_code, 200)
ok_(response.context['form'].has_categories)
@suppress_locale_middleware
def test_edit_project_category_denied_get(self):
self.add_membership('DENIED')
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['form'].has_categories, False)
@suppress_locale_middleware
def test_edit_project_category_pending_get(self):
self.add_membership('PENDING')
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['form'].has_categories, False)
@suppress_locale_middleware
def test_edit_project_category_post(self):
self.add_membership('APPROVED')
response = self.client.post(self.url, self.data, follow=True)
self.assertContextMessage(response.context, 'success')
@suppress_locale_middleware
def test_edit_project_category_denied_post(self):
self.add_membership('DENIED')
response = self.client.post(self.url, self.data)
eq_(response.status_code, 200)
ok_(response.context['form'].errors)
@suppress_locale_middleware
def test_edit_project_category_pending_post(self):
self.add_membership('PENDING')
response = self.client.post(self.url, self.data)
eq_(response.status_code, 200)
ok_(response.context['form'].errors)
class MetadataIntegrationTest(PopcornIntegrationTestCase):
@suppress_locale_middleware
def test_project_detail(self):
project = create_project(author=self.user, status=Project.LIVE)
url = self.get_url('user_project_meta', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['project'], project.name)
@suppress_locale_middleware
def test_unpublished_project_anon(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_meta', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_unpublished_project_user(self):
alex = create_user('alex', with_profile=True)
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_meta', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_unpublished_project_owner(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_meta', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['project'], project.name)
self.client.logout()
@suppress_locale_middleware
def test_removed_project(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project_meta', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
class DataIntegrationTest(PopcornIntegrationTestCase):
@suppress_locale_middleware
def test_project_detail(self):
project = create_project(author=self.user, status=Project.LIVE)
url = self.get_url('user_project_data', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
ok_('data' in data)
ok_(not 'projectID' in data)
ok_(not 'name' in data)
@suppress_locale_middleware
def test_unpublished_project_anon(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_data', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_unpublished_project_user(self):
alex = create_user('alex', with_profile=True)
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_data', self.user, project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
self.client.logout()
@suppress_locale_middleware
def test_unpublished_project_owner(self):
project = create_project(author=self.user, status=Project.HIDDEN)
url = self.get_url('user_project_data', self.user, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
ok_('data' in data)
ok_('projectID' in data)
ok_('name' in data)
self.client.logout()
@suppress_locale_middleware
def test_published_project_not_owner(self):
other = create_user('other')
project = create_project(author=other, status=Project.LIVE)
url = self.get_url('user_project_data', other, project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
ok_('data' in data)
ok_(not 'projectID' in data)
ok_(not 'name' in data)
self.client.logout()
@suppress_locale_middleware
def test_removed_project(self):
project = create_project(author=self.user, status=Project.REMOVED)
url = self.get_url('user_project_data', self.user, project)
response = self.client.get(url)
eq_(response.status_code, 404)
class DeleteIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(DeleteIntegrationTest, self).setUp()
category = create_project_category(name='Special')
self.project = create_project(author=self.user)
self.project.categories.add(category)
def tearDown(self):
super(DeleteIntegrationTest, self).tearDown()
ProjectCategory.objects.all().delete()
self.client.logout()
@suppress_locale_middleware
def test_delete_get(self):
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=self.user.username, password='bob')
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response.context['project'], self.project)
@suppress_locale_middleware
def test_delete_post(self):
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=self.user.username, password='bob')
response = self.client.post(url, {})
eq_(response.status_code, 302)
ok_(reverse('users_dashboard'), response['Location'])
eq_(Project.objects.all().count(), 0)
eq_(ProjectCategory.objects.all().count(), 1)
eq_(User.objects.filter(id=self.user.id).count(), 1)
@suppress_locale_middleware
def test_delete_not_owner_get(self):
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=alex.username, password='alex')
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_delete_not_owner_post(self):
alex = create_user('alex', with_profile=True)
url = self.get_url('user_project_delete', self.user, self.project)
self.client.login(username=alex.username, password='alex')
response = self.client.post(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_delete_anon_get(self):
url = self.get_url('user_project_delete', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 302)
@suppress_locale_middleware
def test_delete_anon_post(self):
url = self.get_url('user_project_delete', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 302)
class CategoryIntegrationTest(TestCase):
def setUp(self):
self.category = create_project_category(is_featured=True)
self.user = create_user('bob', with_profile=True)
def tearDown(self):
for model in [Project, User, Template, ProjectCategory]:
model.objects.all().delete()
@suppress_locale_middleware
def test_project_category_detail(self):
project = create_project(author=self.user, status=Project.LIVE,
is_shared=True)
project.categories.add(self.category)
response = self.client.get(self.category.get_absolute_url())
context = response.context
eq_(context['category'], self.category)
eq_(len(context['project_list']), 1)
eq_(len(context['category_list']), 1)
@suppress_locale_middleware
def test_project_category_detail_non_shared(self):
project = create_project(author=self.user, is_shared=False)
project.categories.add(self.category)
response = self.client.get(self.category.get_absolute_url())
context = response.context
eq_(context['category'], self.category)
eq_(len(context['project_list']), 0)
@suppress_locale_middleware
def test_category_detail_removed(self):
project = create_project(author=self.user, status=Project.REMOVED)
project.categories.add(self.category)
response = self.client.get(self.category.get_absolute_url())
context = response.context
eq_(context['category'], self.category)
eq_(len(context['project_list']), 0)
class TemplateIntegrationTest(TestCase):
def setUp(self):
self.category = create_template_category(is_featured=True)
def tearDown(self):
for model in [Template, TemplateCategory, User]:
model.objects.all().delete()
@suppress_locale_middleware
def test_template_list(self):
template = create_template(is_featured=True)
response = self.client.get(reverse('template_list'))
context = response.context
eq_(len(context['template_list']), 1)
eq_(len(context['category_list']), 1)
@suppress_locale_middleware
def test_template_list_category(self):
category = create_template_category()
template = create_template(is_featured=True)
template.categories.add(category)
response = self.client.get(reverse('template_list_category',
args=[category.slug]))
context = response.context
eq_(len(context['template_list']), 1)
eq_(len(context['category_list']), 1)
eq_(context['category'], category)
@suppress_locale_middleware
def test_template_list_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_list'))
context = response.context
eq_(len(context['template_list']), 0)
eq_(len(context['category_list']), 1)
@suppress_locale_middleware
def test_template_list_category_hidden(self):
category = create_template_category()
template = create_template(status=Template.HIDDEN)
template.categories.add(category)
response = self.client.get(reverse('template_list_category',
args=[category.slug]))
context = response.context
eq_(len(context['template_list']), 0)
eq_(len(context['category_list']), 1)
eq_(context['category'], category)
@suppress_locale_middleware
def test_template_detail_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_detail',
args=[template.slug]))
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_template_detail(self):
template = create_template()
response = self.client.get(reverse('template_detail',
args=[template.slug]))
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
@suppress_locale_middleware
def test_template_summary_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_summary',
args=[template.slug]))
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_template_summary(self):
template = create_template()
response = self.client.get(reverse('template_summary',
args=[template.slug]))
eq_(response.status_code, 200)
eq_(response.context['template'], template)
ok_('object' in response.context)
ok_('project_list' in response.context)
ok_('tag_list' in response.context)
@suppress_locale_middleware
def test_template_config_hidden(self):
template = create_template(status=Template.HIDDEN)
response = self.client.get(reverse('template_config',
args=[template.slug]))
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_template_config(self):
template = create_template(status=Template.LIVE)
response = self.client.get(reverse('template_config',
args=[template.slug]))
eq_(response.status_code, 200)
response = json.loads(response.content)
ok_('savedDataUrl' in response)
ok_('baseDir' in response)
ok_('name' in response)
class TemplateAuthorIntegrationTest(TestCase):
def setUp(self):
self.user = create_user('bob', with_profile=True)
self.client.login(username=self.user.username, password='bob')
self.template = create_template(author=self.user, status=Project.HIDDEN)
def tearDown(self):
self.client.logout()
for model in [Template, User]:
model.objects.all().delete()
@suppress_locale_middleware
def test_template_summary(self):
response = self.client.get(reverse('template_summary',
args=[self.template.slug]))
eq_(response.status_code, 200)
eq_(response.context['template'], self.template)
@suppress_locale_middleware
def test_template_detail(self):
response = self.client.get(reverse('template_detail',
args=[self.template.slug]))
eq_(response.status_code, 200)
ok_('<!DOCTYPE html>' in response.content)
@suppress_locale_middleware
def test_template_config(self):
response = self.client.get(reverse('template_config',
args=[self.template.slug]))
eq_(response.status_code, 200)
class TestCategoryMembershipIntegrationTest(TestCase):
def setUp(self):
self.category = create_project_category()
self.user = create_user('bob', with_profile=True)
self.client.login(username=self.user.username, password='bob')
self.url = reverse('project_category_join', args=[self.category.slug])
def tearDown(self):
for model in [ProjectCategoryMembership, ProjectCategory, User]:
model.objects.all().delete()
self.client.logout()
def assertContextMessage(self, context, message_status):
ok_('messages' in context)
for item in list(context['messages']):
eq_(item.tags, message_status)
@suppress_locale_middleware
def test_membership_request_get(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
eq_(response.context['category'], self.category)
@suppress_locale_middleware
def test_membership_request_post(self):
response = self.client.post(self.url, {}, follow=True)
self.assertContextMessage(response.context, 'success')
@suppress_locale_middleware
def test_membership_request_post_admin_notification(self):
admin = create_user('admin', with_profile=True)
admin.is_staff = True
admin.is_superuser = True
admin.save()
response = self.client.post(self.url, {}, follow=True)
self.assertContextMessage(response.context, 'success')
eq_(len(mail.outbox), 1)
@suppress_locale_middleware
def test_duplicate_membership_request_get(self):
ProjectCategoryMembership.objects.create(user=self.user.profile,
project_category=self.category)
response = self.client.get(self.url, follow=True)
self.assertContextMessage(response.context, 'error')
@suppress_locale_middleware
def test_duplicate_membership_request_post(self):
ProjectCategoryMembership.objects.create(user=self.user.profile,
project_category=self.category)
response = self.client.post(self.url, {}, follow=True)
self.assertContextMessage(response.context, 'error')
class TestExternalProjectIntegrationTest(PopcornIntegrationTestCase):
def setUp(self):
super(TestExternalProjectIntegrationTest, self).setUp()
self.project = create_external_project(author=self.user,
status=Project.LIVE)
self.client.login(username=self.user.username, password='bob')
def tearDown(self):
super(TestExternalProjectIntegrationTest, self).tearDown()
self.client.logout()
@suppress_locale_middleware
def test_detail_user_project(self):
url = self.get_url('user_project', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_config(self):
url = self.get_url('user_project_config', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_meta(self):
url = self.get_url('user_project_meta', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_data(self):
url = self.get_url('user_project_data', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_fork(self):
url = self.get_url('user_project_fork', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_detail_user_project_fork_post(self):
url = self.get_url('user_project_fork', self.user, self.project)
response = self.client.post(url, {})
eq_(response.status_code, 404)
@suppress_locale_middleware
def test_edit_project_owner(self):
url = self.get_url('user_project_edit', self.user, self.project)
response = self.client.get(url)
eq_(response.status_code, 200)
context = response.context
eq_(context['project'], self.project)
eq_(context['form'].instance, self.project)
ok_(isinstance(context['form'], ExternalProjectEditForm))
self.client.logout()
@suppress_locale_middleware
def test_edit_project_owner_post(self):
data = {
'is_shared': False,
'name': 'Changed!',
'status': Project.HIDDEN,
'description': 'Description of the project',
}
url = self.get_url('user_project_edit', self.user, self.project)
response = self.client.post(url, data)
self.assertRedirects(response, self.project.get_absolute_url())
project = Project.objects.get()
eq_(project.name, 'Changed!')
| true
| true
|
1c488e2c187ea0a6436843c1c848f0714d8fb4c7
| 3,137
|
py
|
Python
|
blackbox/mixins.py
|
kosayoda/blackbox
|
6c3d85fb916a05273d7a2f8d9add65340fcc52b2
|
[
"MIT"
] | null | null | null |
blackbox/mixins.py
|
kosayoda/blackbox
|
6c3d85fb916a05273d7a2f8d9add65340fcc52b2
|
[
"MIT"
] | null | null | null |
blackbox/mixins.py
|
kosayoda/blackbox
|
6c3d85fb916a05273d7a2f8d9add65340fcc52b2
|
[
"MIT"
] | null | null | null |
import re
from itertools import chain
from typing import Optional
from blackbox.config import Blackbox
from blackbox.exceptions import ImproperlyConfigured
PARAMS_REGEX = r"(?:\?|&|;)([^=]+)=([^&|;]+)"
class ConnstringParserMixin:
"""A mixin class for handlers that depend on connstrings."""
# How will the connstring be parsed? Subclasses must define this expression.
connstring_regex = r""
# What are the valid URI protocols for this connstring?
valid_prefixes = []
def __init__(self):
"""Ensure that the connstrings are set up correctly."""
self.connstring = self._get_connstring()
@staticmethod
def _get_all_connstrings() -> list:
"""Get all connstrings in the config."""
return list(chain(
Blackbox.databases,
Blackbox.loggers,
Blackbox.notifiers,
Blackbox.storage
))
def _get_connstring(self) -> Optional[str]:
"""Ensure we only have a single connstring configured, and return it."""
# Get valid connstrings
connstrings = []
for connstring in self._get_all_connstrings():
for prefix in self.valid_prefixes:
if connstring.startswith(prefix):
connstrings.append(connstring)
# No connstrings configured
if len(connstrings) == 0:
return ""
# More than one connstring configured! Fail hard.
elif len(connstrings) > 1:
raise ImproperlyConfigured(
"You cannot configure more than one connstring of the same type at a time!"
)
# If only a single connstring is configured, return it!
return connstrings[0]
@property
def config(self) -> dict:
"""
Parse the connstring and return its constituent parts.
Uses the connstring_regex defined in the subclass, but also parses out any
URL-style additional parameters and adds those to the dictionary.
So, if you've got a connstring like `stuff://internet:dingdong?fire=ice&magic=blue`,
and you're working with a connstring_regex like `stuff://(?P<user>.+):(?P<password>.+)`,
self.config will look like this:
{
"user": "internet",
"password": "dingdong",
"fire": "ice",
"magic": "blue,
}
"""
config = {}
if self.enabled:
config = re.search(self.connstring_regex, self.connstring).groupdict()
# Now, let's parse out any params specified behind the connstring,
# like fruit and dino in `s3://user:password?fruit=lemon&dino=saurus`
for param, value in re.findall(PARAMS_REGEX, self.connstring):
config[param] = value
return config
@property
def enabled(self) -> bool:
"""
A property that tells us whether the handler is enabled or not.
This only has to be overridden if you need some sort of custom logic for it.
"""
if self.connstring and self.connstring_regex:
return True
return False
| 33.021053
| 96
| 0.611093
|
import re
from itertools import chain
from typing import Optional
from blackbox.config import Blackbox
from blackbox.exceptions import ImproperlyConfigured
PARAMS_REGEX = r"(?:\?|&|;)([^=]+)=([^&|;]+)"
class ConnstringParserMixin:
connstring_regex = r""
valid_prefixes = []
def __init__(self):
self.connstring = self._get_connstring()
@staticmethod
def _get_all_connstrings() -> list:
return list(chain(
Blackbox.databases,
Blackbox.loggers,
Blackbox.notifiers,
Blackbox.storage
))
def _get_connstring(self) -> Optional[str]:
connstrings = []
for connstring in self._get_all_connstrings():
for prefix in self.valid_prefixes:
if connstring.startswith(prefix):
connstrings.append(connstring)
if len(connstrings) == 0:
return ""
elif len(connstrings) > 1:
raise ImproperlyConfigured(
"You cannot configure more than one connstring of the same type at a time!"
)
return connstrings[0]
@property
def config(self) -> dict:
config = {}
if self.enabled:
config = re.search(self.connstring_regex, self.connstring).groupdict()
# like fruit and dino in `s3://user:password?fruit=lemon&dino=saurus`
for param, value in re.findall(PARAMS_REGEX, self.connstring):
config[param] = value
return config
@property
def enabled(self) -> bool:
if self.connstring and self.connstring_regex:
return True
return False
| true
| true
|
1c488f912f74fdb0e0dfff57d4935598a701f341
| 1,613
|
py
|
Python
|
nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | null | null | null |
nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | 2
|
2018-04-26T12:09:32.000Z
|
2018-04-27T06:36:49.000Z
|
nipype/interfaces/slicer/diffusion/tests/test_auto_DWIRicianLMMSEFilter.py
|
PAmcconnell/nipype
|
39fbd5411a844ce7c023964d3295eb7643b95af5
|
[
"Apache-2.0"
] | 1
|
2019-11-14T14:16:57.000Z
|
2019-11-14T14:16:57.000Z
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..diffusion import DWIRicianLMMSEFilter
def test_DWIRicianLMMSEFilter_inputs():
input_map = dict(
args=dict(argstr='%s', ),
compressOutput=dict(argstr='--compressOutput ', ),
environ=dict(
nohash=True,
usedefault=True,
),
hrf=dict(argstr='--hrf %f', ),
inputVolume=dict(
argstr='%s',
extensions=None,
position=-2,
),
iter=dict(argstr='--iter %d', ),
maxnstd=dict(argstr='--maxnstd %d', ),
minnstd=dict(argstr='--minnstd %d', ),
mnve=dict(argstr='--mnve %d', ),
mnvf=dict(argstr='--mnvf %d', ),
outputVolume=dict(
argstr='%s',
hash_files=False,
position=-1,
),
re=dict(
argstr='--re %s',
sep=',',
),
rf=dict(
argstr='--rf %s',
sep=',',
),
uav=dict(argstr='--uav ', ),
)
inputs = DWIRicianLMMSEFilter.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_DWIRicianLMMSEFilter_outputs():
output_map = dict(outputVolume=dict(
extensions=None,
position=-1,
), )
outputs = DWIRicianLMMSEFilter.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 29.87037
| 67
| 0.536268
|
from ..diffusion import DWIRicianLMMSEFilter
def test_DWIRicianLMMSEFilter_inputs():
input_map = dict(
args=dict(argstr='%s', ),
compressOutput=dict(argstr='--compressOutput ', ),
environ=dict(
nohash=True,
usedefault=True,
),
hrf=dict(argstr='--hrf %f', ),
inputVolume=dict(
argstr='%s',
extensions=None,
position=-2,
),
iter=dict(argstr='--iter %d', ),
maxnstd=dict(argstr='--maxnstd %d', ),
minnstd=dict(argstr='--minnstd %d', ),
mnve=dict(argstr='--mnve %d', ),
mnvf=dict(argstr='--mnvf %d', ),
outputVolume=dict(
argstr='%s',
hash_files=False,
position=-1,
),
re=dict(
argstr='--re %s',
sep=',',
),
rf=dict(
argstr='--rf %s',
sep=',',
),
uav=dict(argstr='--uav ', ),
)
inputs = DWIRicianLMMSEFilter.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_DWIRicianLMMSEFilter_outputs():
output_map = dict(outputVolume=dict(
extensions=None,
position=-1,
), )
outputs = DWIRicianLMMSEFilter.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| true
| true
|
1c4890b97f1a232ff24ff19737f4a56f3a253a7e
| 1,801
|
py
|
Python
|
src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py
|
mpminardi/grpc
|
ce9e6eeded3b9d4d7f17a5432bc79008929cb8b7
|
[
"Apache-2.0"
] | 9
|
2020-12-04T07:34:08.000Z
|
2022-03-07T21:10:35.000Z
|
src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py
|
mpminardi/grpc
|
ce9e6eeded3b9d4d7f17a5432bc79008929cb8b7
|
[
"Apache-2.0"
] | 62
|
2020-02-27T00:53:36.000Z
|
2021-02-05T06:10:53.000Z
|
src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py
|
mpminardi/grpc
|
ce9e6eeded3b9d4d7f17a5432bc79008929cb8b7
|
[
"Apache-2.0"
] | 12
|
2020-07-14T23:59:57.000Z
|
2022-03-22T09:59:18.000Z
|
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests the gRPC Core shutdown path."""
import time
import threading
import unittest
import datetime
import grpc
_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10)
class GrpcShutdownTest(unittest.TestCase):
def test_channel_close_with_connectivity_watcher(self):
"""Originated by https://github.com/grpc/grpc/issues/20299.
The grpc_shutdown happens synchronously, but there might be Core object
references left in Cython which might lead to ABORT or SIGSEGV.
"""
connection_failed = threading.Event()
def on_state_change(state):
if state in (grpc.ChannelConnectivity.TRANSIENT_FAILURE,
grpc.ChannelConnectivity.SHUTDOWN):
connection_failed.set()
# Connects to an void address, and subscribes state changes
channel = grpc.insecure_channel("0.1.1.1:12345")
channel.subscribe(on_state_change, True)
deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT
while datetime.datetime.now() < deadline:
time.sleep(0.1)
if connection_failed.is_set():
channel.close()
if __name__ == '__main__':
unittest.main(verbosity=2)
| 32.745455
| 79
| 0.704053
|
import time
import threading
import unittest
import datetime
import grpc
_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10)
class GrpcShutdownTest(unittest.TestCase):
def test_channel_close_with_connectivity_watcher(self):
connection_failed = threading.Event()
def on_state_change(state):
if state in (grpc.ChannelConnectivity.TRANSIENT_FAILURE,
grpc.ChannelConnectivity.SHUTDOWN):
connection_failed.set()
channel = grpc.insecure_channel("0.1.1.1:12345")
channel.subscribe(on_state_change, True)
deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT
while datetime.datetime.now() < deadline:
time.sleep(0.1)
if connection_failed.is_set():
channel.close()
if __name__ == '__main__':
unittest.main(verbosity=2)
| true
| true
|
1c489148634f4f40e71c6fc0526e789a306304a1
| 10,199
|
py
|
Python
|
tools/test.py
|
nytbliang/siamattnat
|
880643ee09e7e4fa6a0af9631a9a8b32dd06c94d
|
[
"Apache-2.0"
] | null | null | null |
tools/test.py
|
nytbliang/siamattnat
|
880643ee09e7e4fa6a0af9631a9a8b32dd06c94d
|
[
"Apache-2.0"
] | null | null | null |
tools/test.py
|
nytbliang/siamattnat
|
880643ee09e7e4fa6a0af9631a9a8b32dd06c94d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) SenseTime. All Rights Reserved.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import os
import cv2
import torch
import numpy as np
from pysot.core.config import cfg
from pysot.models.model_builder import ModelBuilder
from pysot.tracker.tracker_builder import build_tracker
from pysot.utils.bbox import get_axis_aligned_bbox
from pysot.utils.model_load import load_pretrain
from toolkit.datasets import DatasetFactory
from toolkit.utils.region import vot_overlap, vot_float2str
parser = argparse.ArgumentParser(description='siamrpn tracking')
parser.add_argument('--dataset', type=str,
help='datasets')
parser.add_argument('--config', default='', type=str,
help='config file')
parser.add_argument('--snapshot', default='', type=str,
help='snapshot of models to eval')
parser.add_argument('--video', default='', type=str,
help='eval one special video')
parser.add_argument('--vis', action='store_true',
help='whether visualzie result')
args = parser.parse_args()
torch.set_num_threads(1)
def main():
# load config
cfg.merge_from_file(args.config)
cur_dir = os.path.dirname(os.path.realpath(__file__))
dataset_root = os.path.join(cur_dir, '../testing_dataset', args.dataset)
# create model
model = ModelBuilder()
# load model
model = load_pretrain(model, args.snapshot).cuda().eval()
# build tracker
tracker = build_tracker(model)
# create dataset
dataset = DatasetFactory.create_dataset(name=args.dataset,
dataset_root=dataset_root,
load_img=False)
model_name = args.snapshot.split('/')[-1].split('.')[0]
total_lost = 0
if args.dataset in ['VOT2016', 'VOT2018', 'VOT2019']:
# restart tracking
for v_idx, video in enumerate(dataset):
if args.video != '':
# test one special video
if video.name != args.video:
continue
frame_counter = 0
lost_number = 0
toc = 0
pred_bboxes = []
for idx, (img, gt_bbox) in enumerate(video):
if len(gt_bbox) == 4:
gt_bbox = [gt_bbox[0], gt_bbox[1],
gt_bbox[0], gt_bbox[1]+gt_bbox[3]-1,
gt_bbox[0]+gt_bbox[2]-1, gt_bbox[1]+gt_bbox[3]-1,
gt_bbox[0]+gt_bbox[2]-1, gt_bbox[1]]
tic = cv2.getTickCount()
if idx == frame_counter:
cx, cy, w, h = get_axis_aligned_bbox(np.array(gt_bbox))
gt_bbox_ = [cx-(w-1)/2, cy-(h-1)/2, w, h]
tracker.init(img, gt_bbox_)
pred_bbox = gt_bbox_
pred_bboxes.append(1)
elif idx > frame_counter:
outputs = tracker.track(img)
pred_bbox = outputs['bbox']
if cfg.MASK.MASK:
pred_bbox = outputs['polygon']
overlap = vot_overlap(pred_bbox, gt_bbox, (img.shape[1], img.shape[0]))
if overlap > 0:
# not lost
pred_bboxes.append(pred_bbox)
else:
# lost object
pred_bboxes.append(2)
frame_counter = idx + 5 # skip 5 frames
lost_number += 1
else:
pred_bboxes.append(0)
toc += cv2.getTickCount() - tic
if idx == 0:
cv2.destroyAllWindows()
if args.vis and idx > frame_counter:
cv2.polylines(img, [np.array(gt_bbox, np.int).reshape((-1, 1, 2))],
True, (0, 255, 0), 3)
if cfg.MASK.MASK:
cv2.polylines(img, [np.array(pred_bbox, np.int).reshape((-1, 1, 2))],
True, (0, 255, 255), 3)
else:
bbox = list(map(int, pred_bbox))
cv2.rectangle(img, (bbox[0], bbox[1]),
(bbox[0]+bbox[2], bbox[1]+bbox[3]), (0, 255, 255), 3)
cv2.putText(img, str(idx), (40, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 2)
cv2.putText(img, str(lost_number), (40, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
cv2.imshow(video.name, img)
cv2.waitKey(1)
toc /= cv2.getTickFrequency()
# save results
video_path = os.path.join('results', args.dataset, model_name,
'baseline', video.name)
if not os.path.isdir(video_path):
os.makedirs(video_path)
result_path = os.path.join(video_path, '{}_001.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
if isinstance(x, int):
f.write("{:d}\n".format(x))
else:
f.write(','.join([vot_float2str("%.4f", i) for i in x])+'\n')
print('({:3d}) Video: {:12s} Time: {:4.1f}s Speed: {:3.1f}fps Lost: {:d}'.format(
v_idx+1, video.name, toc, idx / toc, lost_number))
total_lost += lost_number
print("{:s} total lost: {:d}".format(model_name, total_lost))
else:
# OPE tracking
for v_idx, video in enumerate(dataset):
if args.video != '':
# test one special video
if video.name != args.video:
continue
toc = 0
pred_bboxes = []
scores = []
track_times = []
for idx, (img, gt_bbox) in enumerate(video):
tic = cv2.getTickCount()
if idx == 0:
cx, cy, w, h = get_axis_aligned_bbox(np.array(gt_bbox))
gt_bbox_ = [cx-(w-1)/2, cy-(h-1)/2, w, h]
tracker.init(img, gt_bbox_)
pred_bbox = gt_bbox_
scores.append(None)
if 'VOT2018-LT' == args.dataset:
pred_bboxes.append([1])
else:
pred_bboxes.append(pred_bbox)
else:
outputs = tracker.track(img)
pred_bbox = outputs['bbox']
pred_bboxes.append(pred_bbox)
scores.append(outputs['best_score'])
# print(outputs['best_score'])
toc += cv2.getTickCount() - tic
track_times.append((cv2.getTickCount() - tic)/cv2.getTickFrequency())
if idx == 0:
cv2.destroyAllWindows()
if args.vis and idx > 0:
gt_bbox = list(map(int, gt_bbox))
pred_bbox = list(map(int, pred_bbox))
cv2.rectangle(img, (gt_bbox[0], gt_bbox[1]),
(gt_bbox[0]+gt_bbox[2], gt_bbox[1]+gt_bbox[3]), (0, 255, 0), 3)
cv2.rectangle(img, (pred_bbox[0], pred_bbox[1]),
(pred_bbox[0]+pred_bbox[2], pred_bbox[1]+pred_bbox[3]), (0, 255, 255), 3)
cv2.putText(img, str(idx), (40, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 2)
cv2.imshow(video.name, img)
cv2.waitKey(1)
toc /= cv2.getTickFrequency()
# save results
if 'VOT2018-LT' == args.dataset:
video_path = os.path.join('results', args.dataset, model_name,
'longterm', video.name)
if not os.path.isdir(video_path):
os.makedirs(video_path)
result_path = os.path.join(video_path,
'{}_001.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
f.write(','.join([str(i) for i in x])+'\n')
result_path = os.path.join(video_path,
'{}_001_confidence.value'.format(video.name))
with open(result_path, 'w') as f:
for x in scores:
f.write('\n') if x is None else f.write("{:.6f}\n".format(x))
result_path = os.path.join(video_path,
'{}_time.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in track_times:
f.write("{:.6f}\n".format(x))
elif 'GOT-10k' == args.dataset:
video_path = os.path.join('results', args.dataset, model_name, video.name)
if not os.path.isdir(video_path):
os.makedirs(video_path)
result_path = os.path.join(video_path, '{}_001.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
f.write(','.join([str(i) for i in x])+'\n')
result_path = os.path.join(video_path,
'{}_time.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in track_times:
f.write("{:.6f}\n".format(x))
else:
model_path = os.path.join('results', args.dataset, model_name)
if not os.path.isdir(model_path):
os.makedirs(model_path)
result_path = os.path.join(model_path, '{}.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
f.write(','.join([str(i) for i in x])+'\n')
print('({:3d}) Video: {:12s} Time: {:5.1f}s Speed: {:3.1f}fps'.format(
v_idx+1, video.name, toc, idx / toc))
if __name__ == '__main__':
main()
| 44.732456
| 109
| 0.494656
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import os
import cv2
import torch
import numpy as np
from pysot.core.config import cfg
from pysot.models.model_builder import ModelBuilder
from pysot.tracker.tracker_builder import build_tracker
from pysot.utils.bbox import get_axis_aligned_bbox
from pysot.utils.model_load import load_pretrain
from toolkit.datasets import DatasetFactory
from toolkit.utils.region import vot_overlap, vot_float2str
parser = argparse.ArgumentParser(description='siamrpn tracking')
parser.add_argument('--dataset', type=str,
help='datasets')
parser.add_argument('--config', default='', type=str,
help='config file')
parser.add_argument('--snapshot', default='', type=str,
help='snapshot of models to eval')
parser.add_argument('--video', default='', type=str,
help='eval one special video')
parser.add_argument('--vis', action='store_true',
help='whether visualzie result')
args = parser.parse_args()
torch.set_num_threads(1)
def main():
cfg.merge_from_file(args.config)
cur_dir = os.path.dirname(os.path.realpath(__file__))
dataset_root = os.path.join(cur_dir, '../testing_dataset', args.dataset)
model = ModelBuilder()
model = load_pretrain(model, args.snapshot).cuda().eval()
tracker = build_tracker(model)
dataset = DatasetFactory.create_dataset(name=args.dataset,
dataset_root=dataset_root,
load_img=False)
model_name = args.snapshot.split('/')[-1].split('.')[0]
total_lost = 0
if args.dataset in ['VOT2016', 'VOT2018', 'VOT2019']:
for v_idx, video in enumerate(dataset):
if args.video != '':
if video.name != args.video:
continue
frame_counter = 0
lost_number = 0
toc = 0
pred_bboxes = []
for idx, (img, gt_bbox) in enumerate(video):
if len(gt_bbox) == 4:
gt_bbox = [gt_bbox[0], gt_bbox[1],
gt_bbox[0], gt_bbox[1]+gt_bbox[3]-1,
gt_bbox[0]+gt_bbox[2]-1, gt_bbox[1]+gt_bbox[3]-1,
gt_bbox[0]+gt_bbox[2]-1, gt_bbox[1]]
tic = cv2.getTickCount()
if idx == frame_counter:
cx, cy, w, h = get_axis_aligned_bbox(np.array(gt_bbox))
gt_bbox_ = [cx-(w-1)/2, cy-(h-1)/2, w, h]
tracker.init(img, gt_bbox_)
pred_bbox = gt_bbox_
pred_bboxes.append(1)
elif idx > frame_counter:
outputs = tracker.track(img)
pred_bbox = outputs['bbox']
if cfg.MASK.MASK:
pred_bbox = outputs['polygon']
overlap = vot_overlap(pred_bbox, gt_bbox, (img.shape[1], img.shape[0]))
if overlap > 0:
pred_bboxes.append(pred_bbox)
else:
pred_bboxes.append(2)
frame_counter = idx + 5
lost_number += 1
else:
pred_bboxes.append(0)
toc += cv2.getTickCount() - tic
if idx == 0:
cv2.destroyAllWindows()
if args.vis and idx > frame_counter:
cv2.polylines(img, [np.array(gt_bbox, np.int).reshape((-1, 1, 2))],
True, (0, 255, 0), 3)
if cfg.MASK.MASK:
cv2.polylines(img, [np.array(pred_bbox, np.int).reshape((-1, 1, 2))],
True, (0, 255, 255), 3)
else:
bbox = list(map(int, pred_bbox))
cv2.rectangle(img, (bbox[0], bbox[1]),
(bbox[0]+bbox[2], bbox[1]+bbox[3]), (0, 255, 255), 3)
cv2.putText(img, str(idx), (40, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 2)
cv2.putText(img, str(lost_number), (40, 80), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
cv2.imshow(video.name, img)
cv2.waitKey(1)
toc /= cv2.getTickFrequency()
video_path = os.path.join('results', args.dataset, model_name,
'baseline', video.name)
if not os.path.isdir(video_path):
os.makedirs(video_path)
result_path = os.path.join(video_path, '{}_001.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
if isinstance(x, int):
f.write("{:d}\n".format(x))
else:
f.write(','.join([vot_float2str("%.4f", i) for i in x])+'\n')
print('({:3d}) Video: {:12s} Time: {:4.1f}s Speed: {:3.1f}fps Lost: {:d}'.format(
v_idx+1, video.name, toc, idx / toc, lost_number))
total_lost += lost_number
print("{:s} total lost: {:d}".format(model_name, total_lost))
else:
for v_idx, video in enumerate(dataset):
if args.video != '':
if video.name != args.video:
continue
toc = 0
pred_bboxes = []
scores = []
track_times = []
for idx, (img, gt_bbox) in enumerate(video):
tic = cv2.getTickCount()
if idx == 0:
cx, cy, w, h = get_axis_aligned_bbox(np.array(gt_bbox))
gt_bbox_ = [cx-(w-1)/2, cy-(h-1)/2, w, h]
tracker.init(img, gt_bbox_)
pred_bbox = gt_bbox_
scores.append(None)
if 'VOT2018-LT' == args.dataset:
pred_bboxes.append([1])
else:
pred_bboxes.append(pred_bbox)
else:
outputs = tracker.track(img)
pred_bbox = outputs['bbox']
pred_bboxes.append(pred_bbox)
scores.append(outputs['best_score'])
toc += cv2.getTickCount() - tic
track_times.append((cv2.getTickCount() - tic)/cv2.getTickFrequency())
if idx == 0:
cv2.destroyAllWindows()
if args.vis and idx > 0:
gt_bbox = list(map(int, gt_bbox))
pred_bbox = list(map(int, pred_bbox))
cv2.rectangle(img, (gt_bbox[0], gt_bbox[1]),
(gt_bbox[0]+gt_bbox[2], gt_bbox[1]+gt_bbox[3]), (0, 255, 0), 3)
cv2.rectangle(img, (pred_bbox[0], pred_bbox[1]),
(pred_bbox[0]+pred_bbox[2], pred_bbox[1]+pred_bbox[3]), (0, 255, 255), 3)
cv2.putText(img, str(idx), (40, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 255), 2)
cv2.imshow(video.name, img)
cv2.waitKey(1)
toc /= cv2.getTickFrequency()
if 'VOT2018-LT' == args.dataset:
video_path = os.path.join('results', args.dataset, model_name,
'longterm', video.name)
if not os.path.isdir(video_path):
os.makedirs(video_path)
result_path = os.path.join(video_path,
'{}_001.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
f.write(','.join([str(i) for i in x])+'\n')
result_path = os.path.join(video_path,
'{}_001_confidence.value'.format(video.name))
with open(result_path, 'w') as f:
for x in scores:
f.write('\n') if x is None else f.write("{:.6f}\n".format(x))
result_path = os.path.join(video_path,
'{}_time.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in track_times:
f.write("{:.6f}\n".format(x))
elif 'GOT-10k' == args.dataset:
video_path = os.path.join('results', args.dataset, model_name, video.name)
if not os.path.isdir(video_path):
os.makedirs(video_path)
result_path = os.path.join(video_path, '{}_001.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
f.write(','.join([str(i) for i in x])+'\n')
result_path = os.path.join(video_path,
'{}_time.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in track_times:
f.write("{:.6f}\n".format(x))
else:
model_path = os.path.join('results', args.dataset, model_name)
if not os.path.isdir(model_path):
os.makedirs(model_path)
result_path = os.path.join(model_path, '{}.txt'.format(video.name))
with open(result_path, 'w') as f:
for x in pred_bboxes:
f.write(','.join([str(i) for i in x])+'\n')
print('({:3d}) Video: {:12s} Time: {:5.1f}s Speed: {:3.1f}fps'.format(
v_idx+1, video.name, toc, idx / toc))
if __name__ == '__main__':
main()
| true
| true
|
1c4891605978f91a0f6833c19339ea41dd791d27
| 644
|
py
|
Python
|
txdav/carddav/__init__.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 462
|
2016-08-14T17:43:24.000Z
|
2022-03-17T07:38:16.000Z
|
txdav/carddav/__init__.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 72
|
2016-09-01T23:19:35.000Z
|
2020-02-05T02:09:26.000Z
|
txdav/carddav/__init__.py
|
backwardn/ccs-calendarserver
|
13c706b985fb728b9aab42dc0fef85aae21921c3
|
[
"Apache-2.0"
] | 171
|
2016-08-16T03:50:30.000Z
|
2022-03-26T11:49:55.000Z
|
##
# Copyright (c) 2010-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
CardDAV support for Twisted.
"""
| 32.2
| 74
| 0.75
| true
| true
|
|
1c4891769e7f37cc62e27529f9c065eea2bc5e76
| 10,685
|
py
|
Python
|
nums/core/linalg.py
|
gohar94/nums
|
2d8b0d7dd7b48c5b56641d4f03279b5ce2185db5
|
[
"Apache-2.0"
] | 111
|
2020-06-16T02:52:11.000Z
|
2022-03-29T10:24:19.000Z
|
nums/core/linalg.py
|
gohar94/nums
|
2d8b0d7dd7b48c5b56641d4f03279b5ce2185db5
|
[
"Apache-2.0"
] | 160
|
2020-10-07T21:49:36.000Z
|
2022-03-11T03:06:23.000Z
|
nums/core/linalg.py
|
gohar94/nums
|
2d8b0d7dd7b48c5b56641d4f03279b5ce2185db5
|
[
"Apache-2.0"
] | 25
|
2020-11-11T17:10:26.000Z
|
2022-03-07T23:17:16.000Z
|
# coding=utf-8
# Copyright (C) 2020 NumS Development Team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from nums.core.array.application import ArrayApplication
from nums.core.array.blockarray import BlockArray
from nums.core.grid.grid import ArrayGrid
def qr(app: ArrayApplication, X: BlockArray):
return indirect_tsqr(app, X)
def _qr_tree_reduce(
app: ArrayApplication, oid_list, result_grid_entry, result_grid_shape
):
if len(oid_list) == 1:
return oid_list[0][0]
q = oid_list
while len(q) > 1:
a_oid, a_ge, a_gs = q.pop(0)
b_oid, _, _ = q.pop(0)
ge, gs = (result_grid_entry, result_grid_shape) if len(q) == 0 else (a_ge, a_gs)
c_oid = app.cm.qr(
a_oid,
b_oid,
mode="r",
axis=0,
syskwargs={
"grid_entry": ge,
"grid_shape": gs,
"options": {"num_returns": 1},
},
)
q.append((c_oid, ge, gs))
r_oid, r_ge, r_gs = q.pop(0)
assert r_ge == result_grid_entry
assert r_gs == result_grid_shape
return r_oid
def indirect_tsr(app: ArrayApplication, X: BlockArray, reshape_output=True):
assert len(X.shape) == 2
# TODO (hme): This assertion is temporary and ensures returned
# shape of qr of block is correct.
assert X.block_shape[0] >= X.shape[1]
# Compute R for each block.
grid = X.grid
grid_shape = grid.grid_shape
shape = X.shape
block_shape = X.block_shape
R_oids = []
# Assume no blocking along second dim.
for i in range(grid_shape[0]):
# Select a row according to block_shape.
row = []
for j in range(grid_shape[1]):
row.append(X.blocks[i, j].oid)
ge, gs = (i, 0), (grid_shape[0], 1)
oid = app.cm.qr(
*row,
mode="r",
axis=1,
syskwargs={
"grid_entry": ge,
"grid_shape": gs,
"options": {"num_returns": 1},
}
)
R_oids.append((oid, ge, gs))
# Construct R by summing over R blocks.
# TODO (hme): Communication may be inefficient due to redundancy of data.
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
tsR = BlockArray(
ArrayGrid(shape=R_shape, block_shape=R_shape, dtype=X.dtype.__name__), app.cm
)
tsR.blocks[0, 0].oid = _qr_tree_reduce(app, R_oids, (0, 0), (1, 1))
# If blocking is "tall-skinny," then we're done.
if R_shape != R_block_shape:
if reshape_output:
R = tsR.reshape(R_shape, block_shape=R_block_shape)
else:
R = tsR
else:
R = tsR
return R
def indirect_tsqr(app: ArrayApplication, X: BlockArray, reshape_output=True):
shape = X.shape
block_shape = X.block_shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
tsR = indirect_tsr(app, X, reshape_output=False)
# Compute inverse of R.
tsR_inverse = inv(app, tsR)
# If blocking is "tall-skinny," then we're done.
if R_shape != R_block_shape:
R_inverse = tsR_inverse.reshape(R_shape, block_shape=R_block_shape)
if reshape_output:
R = tsR.reshape(R_shape, block_shape=R_block_shape)
else:
R = tsR
else:
R_inverse = tsR_inverse
R = tsR
Q = X @ R_inverse
return Q, R
def direct_tsqr(app: ArrayApplication, X, reshape_output=True):
assert len(X.shape) == 2
# Compute R for each block.
shape = X.shape
grid = X.grid
grid_shape = grid.grid_shape
block_shape = X.block_shape
Q_oids = []
R_oids = []
QR_dims = []
Q2_shape = [0, shape[1]]
for i in range(grid_shape[0]):
# Select a row according to block_shape.
row = []
for j in range(grid_shape[1]):
row.append(X.blocks[i, j].oid)
# We invoke "reduced", so q, r is returned with dimensions (M, K), (K, N), K = min(M, N)
M = grid.get_block_shape((i, 0))[0]
N = shape[1]
K = min(M, N)
QR_dims.append(((M, K), (K, N)))
Q2_shape[0] += K
# Run each row on separate nodes along first axis.
# This maintains some data locality.
Q_oid, R_oid = app.cm.qr(
*row,
mode="reduced",
axis=1,
syskwargs={
"grid_entry": (i, 0),
"grid_shape": (grid_shape[0], 1),
"options": {"num_returns": 2},
}
)
R_oids.append(R_oid)
Q_oids.append(Q_oid)
# TODO (hme): This pulls several order N^2 R matrices on a single node.
# A solution is the recursive extension to direct TSQR.
Q2_oid, R2_oid = app.cm.qr(
*R_oids,
mode="reduced",
axis=0,
syskwargs={
"grid_entry": (0, 0),
"grid_shape": (1, 1),
"options": {"num_returns": 2},
}
)
Q2_shape = tuple(Q2_shape)
Q2_block_shape = (QR_dims[0][1][0], shape[1])
Q2 = app.vec_from_oids(
[Q2_oid], shape=Q2_shape, block_shape=Q2_block_shape, dtype=X.dtype
)
# The resulting Q's from this operation are N^2 (same size as above R's).
Q2_oids = list(map(lambda block: block.oid, Q2.blocks.flatten()))
# Construct Q.
Q = app.zeros(shape=shape, block_shape=(block_shape[0], shape[1]), dtype=X.dtype)
for i, grid_entry in enumerate(Q.grid.get_entry_iterator()):
Q.blocks[grid_entry].oid = app.cm.bop(
"tensordot",
Q_oids[i],
Q2_oids[i],
a1_T=False,
a2_T=False,
axes=1,
syskwargs={"grid_entry": grid_entry, "grid_shape": Q.grid.grid_shape},
)
# Construct R.
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
tsR = app.vec_from_oids([R2_oid], shape=R_shape, block_shape=R_shape, dtype=X.dtype)
# If blocking is "tall-skinny," then we're done.
if R_shape == R_block_shape or not reshape_output:
R = tsR
else:
R = tsR.reshape(R_shape, block_shape=R_block_shape)
if Q.shape != block_shape or not reshape_output:
Q = Q.reshape(shape, block_shape=block_shape)
return Q, R
def svd(app: ArrayApplication, X):
# TODO(hme): Optimize by merging with direct qr to compute U directly,
# to avoid wasting space storing intermediate Q.
# This may not really help until we have operator fusion.
assert len(X.shape) == 2
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
Q, R = direct_tsqr(app, X, reshape_output=False)
assert R.shape == R.block_shape
R_U, S, VT = app.cm.svd(
R.blocks[(0, 0)].oid, syskwargs={"grid_entry": (0, 0), "grid_shape": (1, 1)}
)
R_U: BlockArray = app.vec_from_oids([R_U], R_shape, R_block_shape, X.dtype)
S: BlockArray = app.vec_from_oids([S], R_shape[:1], R_block_shape[:1], X.dtype)
VT = app.vec_from_oids([VT], R_shape, R_block_shape, X.dtype)
U = Q @ R_U
return U, S, VT
def inv(app: ArrayApplication, X: BlockArray):
# TODO (hme): Implement scalable version.
block_shape = X.block_shape
assert len(X.shape) == 2
assert X.shape[0] == X.shape[1]
single_block = X.shape[0] == X.block_shape[0] and X.shape[1] == X.block_shape[1]
if single_block:
result = X.copy()
else:
result = X.reshape(block_shape=X.shape)
result.blocks[0, 0].oid = app.cm.inv(
result.blocks[0, 0].oid, syskwargs={"grid_entry": (0, 0), "grid_shape": (1, 1)}
)
if not single_block:
result = result.reshape(block_shape=block_shape)
return result
def cholesky(app: ArrayApplication, X: BlockArray):
# TODO (hme): Implement scalable version.
# Note:
# A = Q, R
# A.T @ A = R.T @ R
# A.T @ A = L @ L.T
# => R == L.T
block_shape = X.block_shape
assert len(X.shape) == 2
assert X.shape[0] == X.shape[1]
single_block = X.shape[0] == X.block_shape[0] and X.shape[1] == X.block_shape[1]
if single_block:
result = X.copy()
else:
result = X.reshape(block_shape=X.shape)
result.blocks[0, 0].oid = app.cm.cholesky(
result.blocks[0, 0].oid, syskwargs={"grid_entry": (0, 0), "grid_shape": (1, 1)}
)
if not single_block:
result = result.reshape(block_shape=block_shape)
return result
def fast_linear_regression(app: ArrayApplication, X: BlockArray, y: BlockArray):
assert len(X.shape) == 2
assert len(y.shape) == 1
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
Q, R = indirect_tsqr(app, X, reshape_output=False)
R_inv = inv(app, R)
if R_shape != R_block_shape:
R_inv = R_inv.reshape(R_shape, block_shape=R_block_shape)
theta = R_inv @ (Q.T @ y)
return theta
def linear_regression(app: ArrayApplication, X: BlockArray, y: BlockArray):
assert len(X.shape) == 2
assert len(y.shape) == 1
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
Q, R = direct_tsqr(app, X, reshape_output=False)
# Invert R.
R_inv = inv(app, R)
if R_shape != R_block_shape:
R_inv = R_inv.reshape(R_shape, block_shape=R_block_shape)
theta = R_inv @ (Q.T @ y)
return theta
def ridge_regression(app: ArrayApplication, X: BlockArray, y: BlockArray, lamb: float):
assert len(X.shape) == 2
assert len(y.shape) == 1
assert lamb >= 0
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
R = indirect_tsr(app, X)
lamb_vec = app.array(lamb * np.eye(R_shape[0]), block_shape=R_block_shape)
# TODO (hme): A better solution exists, which inverts R by augmenting X and y.
# See Murphy 7.5.2.
theta = inv(app, lamb_vec + R.T @ R) @ (X.T @ y)
return theta
| 32.675841
| 96
| 0.604118
|
import numpy as np
from nums.core.array.application import ArrayApplication
from nums.core.array.blockarray import BlockArray
from nums.core.grid.grid import ArrayGrid
def qr(app: ArrayApplication, X: BlockArray):
return indirect_tsqr(app, X)
def _qr_tree_reduce(
app: ArrayApplication, oid_list, result_grid_entry, result_grid_shape
):
if len(oid_list) == 1:
return oid_list[0][0]
q = oid_list
while len(q) > 1:
a_oid, a_ge, a_gs = q.pop(0)
b_oid, _, _ = q.pop(0)
ge, gs = (result_grid_entry, result_grid_shape) if len(q) == 0 else (a_ge, a_gs)
c_oid = app.cm.qr(
a_oid,
b_oid,
mode="r",
axis=0,
syskwargs={
"grid_entry": ge,
"grid_shape": gs,
"options": {"num_returns": 1},
},
)
q.append((c_oid, ge, gs))
r_oid, r_ge, r_gs = q.pop(0)
assert r_ge == result_grid_entry
assert r_gs == result_grid_shape
return r_oid
def indirect_tsr(app: ArrayApplication, X: BlockArray, reshape_output=True):
assert len(X.shape) == 2
assert X.block_shape[0] >= X.shape[1]
grid = X.grid
grid_shape = grid.grid_shape
shape = X.shape
block_shape = X.block_shape
R_oids = []
for i in range(grid_shape[0]):
row = []
for j in range(grid_shape[1]):
row.append(X.blocks[i, j].oid)
ge, gs = (i, 0), (grid_shape[0], 1)
oid = app.cm.qr(
*row,
mode="r",
axis=1,
syskwargs={
"grid_entry": ge,
"grid_shape": gs,
"options": {"num_returns": 1},
}
)
R_oids.append((oid, ge, gs))
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
tsR = BlockArray(
ArrayGrid(shape=R_shape, block_shape=R_shape, dtype=X.dtype.__name__), app.cm
)
tsR.blocks[0, 0].oid = _qr_tree_reduce(app, R_oids, (0, 0), (1, 1))
if R_shape != R_block_shape:
if reshape_output:
R = tsR.reshape(R_shape, block_shape=R_block_shape)
else:
R = tsR
else:
R = tsR
return R
def indirect_tsqr(app: ArrayApplication, X: BlockArray, reshape_output=True):
shape = X.shape
block_shape = X.block_shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
tsR = indirect_tsr(app, X, reshape_output=False)
# Compute inverse of R.
tsR_inverse = inv(app, tsR)
# If blocking is "tall-skinny," then we're done.
if R_shape != R_block_shape:
R_inverse = tsR_inverse.reshape(R_shape, block_shape=R_block_shape)
if reshape_output:
R = tsR.reshape(R_shape, block_shape=R_block_shape)
else:
R = tsR
else:
R_inverse = tsR_inverse
R = tsR
Q = X @ R_inverse
return Q, R
def direct_tsqr(app: ArrayApplication, X, reshape_output=True):
assert len(X.shape) == 2
shape = X.shape
grid = X.grid
grid_shape = grid.grid_shape
block_shape = X.block_shape
Q_oids = []
R_oids = []
QR_dims = []
Q2_shape = [0, shape[1]]
for i in range(grid_shape[0]):
row = []
for j in range(grid_shape[1]):
row.append(X.blocks[i, j].oid)
M = grid.get_block_shape((i, 0))[0]
N = shape[1]
K = min(M, N)
QR_dims.append(((M, K), (K, N)))
Q2_shape[0] += K
Q_oid, R_oid = app.cm.qr(
*row,
mode="reduced",
axis=1,
syskwargs={
"grid_entry": (i, 0),
"grid_shape": (grid_shape[0], 1),
"options": {"num_returns": 2},
}
)
R_oids.append(R_oid)
Q_oids.append(Q_oid)
Q2_oid, R2_oid = app.cm.qr(
*R_oids,
mode="reduced",
axis=0,
syskwargs={
"grid_entry": (0, 0),
"grid_shape": (1, 1),
"options": {"num_returns": 2},
}
)
Q2_shape = tuple(Q2_shape)
Q2_block_shape = (QR_dims[0][1][0], shape[1])
Q2 = app.vec_from_oids(
[Q2_oid], shape=Q2_shape, block_shape=Q2_block_shape, dtype=X.dtype
)
Q2_oids = list(map(lambda block: block.oid, Q2.blocks.flatten()))
Q = app.zeros(shape=shape, block_shape=(block_shape[0], shape[1]), dtype=X.dtype)
for i, grid_entry in enumerate(Q.grid.get_entry_iterator()):
Q.blocks[grid_entry].oid = app.cm.bop(
"tensordot",
Q_oids[i],
Q2_oids[i],
a1_T=False,
a2_T=False,
axes=1,
syskwargs={"grid_entry": grid_entry, "grid_shape": Q.grid.grid_shape},
)
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
tsR = app.vec_from_oids([R2_oid], shape=R_shape, block_shape=R_shape, dtype=X.dtype)
if R_shape == R_block_shape or not reshape_output:
R = tsR
else:
R = tsR.reshape(R_shape, block_shape=R_block_shape)
if Q.shape != block_shape or not reshape_output:
Q = Q.reshape(shape, block_shape=block_shape)
return Q, R
def svd(app: ArrayApplication, X):
# TODO(hme): Optimize by merging with direct qr to compute U directly,
# to avoid wasting space storing intermediate Q.
# This may not really help until we have operator fusion.
assert len(X.shape) == 2
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
Q, R = direct_tsqr(app, X, reshape_output=False)
assert R.shape == R.block_shape
R_U, S, VT = app.cm.svd(
R.blocks[(0, 0)].oid, syskwargs={"grid_entry": (0, 0), "grid_shape": (1, 1)}
)
R_U: BlockArray = app.vec_from_oids([R_U], R_shape, R_block_shape, X.dtype)
S: BlockArray = app.vec_from_oids([S], R_shape[:1], R_block_shape[:1], X.dtype)
VT = app.vec_from_oids([VT], R_shape, R_block_shape, X.dtype)
U = Q @ R_U
return U, S, VT
def inv(app: ArrayApplication, X: BlockArray):
# TODO (hme): Implement scalable version.
block_shape = X.block_shape
assert len(X.shape) == 2
assert X.shape[0] == X.shape[1]
single_block = X.shape[0] == X.block_shape[0] and X.shape[1] == X.block_shape[1]
if single_block:
result = X.copy()
else:
result = X.reshape(block_shape=X.shape)
result.blocks[0, 0].oid = app.cm.inv(
result.blocks[0, 0].oid, syskwargs={"grid_entry": (0, 0), "grid_shape": (1, 1)}
)
if not single_block:
result = result.reshape(block_shape=block_shape)
return result
def cholesky(app: ArrayApplication, X: BlockArray):
# TODO (hme): Implement scalable version.
# Note:
# A = Q, R
# A.T @ A = R.T @ R
# A.T @ A = L @ L.T
# => R == L.T
block_shape = X.block_shape
assert len(X.shape) == 2
assert X.shape[0] == X.shape[1]
single_block = X.shape[0] == X.block_shape[0] and X.shape[1] == X.block_shape[1]
if single_block:
result = X.copy()
else:
result = X.reshape(block_shape=X.shape)
result.blocks[0, 0].oid = app.cm.cholesky(
result.blocks[0, 0].oid, syskwargs={"grid_entry": (0, 0), "grid_shape": (1, 1)}
)
if not single_block:
result = result.reshape(block_shape=block_shape)
return result
def fast_linear_regression(app: ArrayApplication, X: BlockArray, y: BlockArray):
assert len(X.shape) == 2
assert len(y.shape) == 1
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
Q, R = indirect_tsqr(app, X, reshape_output=False)
R_inv = inv(app, R)
if R_shape != R_block_shape:
R_inv = R_inv.reshape(R_shape, block_shape=R_block_shape)
theta = R_inv @ (Q.T @ y)
return theta
def linear_regression(app: ArrayApplication, X: BlockArray, y: BlockArray):
assert len(X.shape) == 2
assert len(y.shape) == 1
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
Q, R = direct_tsqr(app, X, reshape_output=False)
# Invert R.
R_inv = inv(app, R)
if R_shape != R_block_shape:
R_inv = R_inv.reshape(R_shape, block_shape=R_block_shape)
theta = R_inv @ (Q.T @ y)
return theta
def ridge_regression(app: ArrayApplication, X: BlockArray, y: BlockArray, lamb: float):
assert len(X.shape) == 2
assert len(y.shape) == 1
assert lamb >= 0
block_shape = X.block_shape
shape = X.shape
R_shape = (shape[1], shape[1])
R_block_shape = (block_shape[1], block_shape[1])
R = indirect_tsr(app, X)
lamb_vec = app.array(lamb * np.eye(R_shape[0]), block_shape=R_block_shape)
# TODO (hme): A better solution exists, which inverts R by augmenting X and y.
# See Murphy 7.5.2.
theta = inv(app, lamb_vec + R.T @ R) @ (X.T @ y)
return theta
| true
| true
|
1c489265a54907a8c20c316d809f6627620e1fb5
| 443
|
py
|
Python
|
app/core/migrations/0007_recipe_image.py
|
JamesGitauM/Recipe_app_API
|
92e89e32a940bd936e330f859c90725298a5b75a
|
[
"MIT"
] | null | null | null |
app/core/migrations/0007_recipe_image.py
|
JamesGitauM/Recipe_app_API
|
92e89e32a940bd936e330f859c90725298a5b75a
|
[
"MIT"
] | null | null | null |
app/core/migrations/0007_recipe_image.py
|
JamesGitauM/Recipe_app_API
|
92e89e32a940bd936e330f859c90725298a5b75a
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.15 on 2020-08-08 06:54
import core.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20200729_1906'),
]
operations = [
migrations.AddField(
model_name='recipe',
name='image',
field=models.ImageField(null=True, upload_to=core.models.recipe_image_file_path),
),
]
| 22.15
| 93
| 0.632054
|
import core.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20200729_1906'),
]
operations = [
migrations.AddField(
model_name='recipe',
name='image',
field=models.ImageField(null=True, upload_to=core.models.recipe_image_file_path),
),
]
| true
| true
|
1c489286238ff8d4a3501a9b5c36cc2ee0539d3e
| 298
|
py
|
Python
|
cerberus_collections/error_handlers/__init__.py
|
funkyfuture/cerberuse-collections
|
b6d5c9035e02375fb933c1077221659d483db348
|
[
"ISC"
] | 2
|
2017-08-04T15:01:28.000Z
|
2020-05-15T11:40:25.000Z
|
cerberus_collections/error_handlers/__init__.py
|
funkyfuture/cerberuse-collections
|
b6d5c9035e02375fb933c1077221659d483db348
|
[
"ISC"
] | 5
|
2016-09-03T15:25:00.000Z
|
2020-08-31T19:00:44.000Z
|
cerberus_collections/error_handlers/__init__.py
|
funkyfuture/cerberuse-collections
|
b6d5c9035e02375fb933c1077221659d483db348
|
[
"ISC"
] | 1
|
2020-01-03T09:37:13.000Z
|
2020-01-03T09:37:13.000Z
|
__all__ = []
from cerberus_collections.error_handlers.json import JSONErrorHandler # noqa: E402
__all__.append(JSONErrorHandler.__name__)
try:
from cerberus_collections.error_handlers.xml import XMLErrorHandler
except ImportError:
pass
else:
__all__.append(XMLErrorHandler.__name__)
| 24.833333
| 83
| 0.812081
|
__all__ = []
from cerberus_collections.error_handlers.json import JSONErrorHandler
__all__.append(JSONErrorHandler.__name__)
try:
from cerberus_collections.error_handlers.xml import XMLErrorHandler
except ImportError:
pass
else:
__all__.append(XMLErrorHandler.__name__)
| true
| true
|
1c4892c606ebc7ea5b49dd47c53165b2c79830eb
| 1,075
|
py
|
Python
|
src/htsql/ctl/__init__.py
|
sirex/htsql
|
52275f6a584b412c109822d2ed2a5e69ac522cdf
|
[
"Apache-2.0"
] | 15
|
2020-02-11T11:24:34.000Z
|
2022-03-03T20:46:34.000Z
|
src/htsql/ctl/__init__.py
|
sirex/htsql
|
52275f6a584b412c109822d2ed2a5e69ac522cdf
|
[
"Apache-2.0"
] | 1
|
2020-02-13T14:08:34.000Z
|
2020-02-13T14:16:04.000Z
|
src/htsql/ctl/__init__.py
|
sirex/htsql
|
52275f6a584b412c109822d2ed2a5e69ac522cdf
|
[
"Apache-2.0"
] | 2
|
2020-02-13T14:10:06.000Z
|
2021-02-25T04:36:05.000Z
|
#
# Copyright (c) 2006-2013, Prometheus Research, LLC
#
"""
:mod:`htsql.ctl`
================
This package implements the ``htsql-ctl`` script.
"""
from ..core.util import trim_doc
from .script import Script
import sys
class HTSQL_CTL(Script):
"""
Implements the ``htsql-ctl`` script.
Usage::
ctl = HTSQL_CTL(stdin, stdout, stderr)
exit_code = ctl.main(argv)
"""
routines_entry = 'htsql.routines'
hint = """HTSQL command-line administrative application"""
help = """
Run `%(executable)s help` for general usage and list of routines.
Run `%(executable)s help <routine>` for help on a specific routine.
"""
def get_copyright(self):
import htsql
return trim_doc(htsql.__copyright__)
def get_legal(self):
import htsql
return trim_doc(htsql.__legal__)
def main():
# This function is called when the `htsql-ctl` script is started.
# The return value is passed to `sys.exit()`.
ctl = HTSQL_CTL(sys.stdin, sys.stdout, sys.stderr)
return ctl.main(sys.argv)
| 21.078431
| 71
| 0.63814
|
from ..core.util import trim_doc
from .script import Script
import sys
class HTSQL_CTL(Script):
routines_entry = 'htsql.routines'
hint = """HTSQL command-line administrative application"""
help = """
Run `%(executable)s help` for general usage and list of routines.
Run `%(executable)s help <routine>` for help on a specific routine.
"""
def get_copyright(self):
import htsql
return trim_doc(htsql.__copyright__)
def get_legal(self):
import htsql
return trim_doc(htsql.__legal__)
def main():
ctl = HTSQL_CTL(sys.stdin, sys.stdout, sys.stderr)
return ctl.main(sys.argv)
| true
| true
|
1c489424a60b57cec880a829a71ef82152679fe6
| 3,472
|
py
|
Python
|
config/settings.py
|
yuyuyuhaoshi/Blog-BE
|
a485d5159076d619d4fd6019fe9b96ac04020d4d
|
[
"Apache-2.0"
] | null | null | null |
config/settings.py
|
yuyuyuhaoshi/Blog-BE
|
a485d5159076d619d4fd6019fe9b96ac04020d4d
|
[
"Apache-2.0"
] | null | null | null |
config/settings.py
|
yuyuyuhaoshi/Blog-BE
|
a485d5159076d619d4fd6019fe9b96ac04020d4d
|
[
"Apache-2.0"
] | null | null | null |
"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 2.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6v0ogpdc=oh5c%f)3rs)p5p=#0o0$ce+y4=6(4^yb&nx-u5s*t'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third-party apps
'rest_framework',
'corsheaders',
'django_filters',
# my app
'posts',
'website',
'tags',
'categories',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# 跨域
CORS_ORIGIN_ALLOW_ALL = True
# django-rest-framework settings
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'utils.pagination.CustomPageNumberPagination',
}
| 24.624113
| 91
| 0.693836
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '6v0ogpdc=oh5c%f)3rs)p5p=#0o0$ce+y4=6(4^yb&nx-u5s*t'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third-party apps
'rest_framework',
'corsheaders',
'django_filters',
# my app
'posts',
'website',
'tags',
'categories',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# 跨域
CORS_ORIGIN_ALLOW_ALL = True
# django-rest-framework settings
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'utils.pagination.CustomPageNumberPagination',
}
| true
| true
|
1c48948f08f5d589c6b7f9f5aef0879980113db8
| 1,200
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2019_02_01/models/flow_log_format_parameters.py
|
xiafu-msft/azure-sdk-for-python
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
azure-mgmt-network/azure/mgmt/network/v2019_02_01/models/flow_log_format_parameters.py
|
xiafu-msft/azure-sdk-for-python
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
azure-mgmt-network/azure/mgmt/network/v2019_02_01/models/flow_log_format_parameters.py
|
xiafu-msft/azure-sdk-for-python
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class FlowLogFormatParameters(Model):
"""Parameters that define the flow log format.
:param type: The file type of flow log. Possible values include: 'JSON'
:type type: str or
~azure.mgmt.network.v2019_02_01.models.FlowLogFormatType
:param version: The version (revision) of the flow log. Default value: 0 .
:type version: int
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'int'},
}
def __init__(self, **kwargs):
super(FlowLogFormatParameters, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', 0)
| 35.294118
| 78
| 0.5925
|
from msrest.serialization import Model
class FlowLogFormatParameters(Model):
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'version': {'key': 'version', 'type': 'int'},
}
def __init__(self, **kwargs):
super(FlowLogFormatParameters, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.version = kwargs.get('version', 0)
| true
| true
|
1c4894e574deda4b9d4ac0a8390574ca9c03dcf1
| 1,846
|
py
|
Python
|
spanner_orm/admin/migration.py
|
germanp173/python-spanner-orm
|
a5dd49efa173ab0f370a7a13ae3983c0216c4f2b
|
[
"Apache-2.0"
] | 2
|
2020-07-12T00:43:08.000Z
|
2021-01-30T03:03:32.000Z
|
spanner_orm/admin/migration.py
|
germanp173/python-spanner-orm
|
a5dd49efa173ab0f370a7a13ae3983c0216c4f2b
|
[
"Apache-2.0"
] | 20
|
2020-07-20T21:55:52.000Z
|
2021-02-03T21:53:00.000Z
|
spanner_orm/admin/migration.py
|
germanp173/python-spanner-orm
|
a5dd49efa173ab0f370a7a13ae3983c0216c4f2b
|
[
"Apache-2.0"
] | 3
|
2020-07-20T21:13:45.000Z
|
2021-01-29T18:41:20.000Z
|
# python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Holds information about a specific migration."""
from typing import Callable, Optional
from spanner_orm.admin import update
def no_update_callable() -> update.SchemaUpdate:
return update.NoUpdate()
class Migration:
"""Holds information about a specific migration."""
def __init__(
self,
migration_id: str,
prev_migration_id: Optional[str],
description: str,
upgrade: Optional[Callable[[], update.SchemaUpdate]] = None,
downgrade: Optional[Callable[[], update.SchemaUpdate]] = None,
):
self._id = migration_id
self._description = description
self._prev = prev_migration_id
self._upgrade = upgrade or no_update_callable
self._downgrade = downgrade or no_update_callable
@property
def migration_id(self) -> str:
return self._id
@property
def prev_migration_id(self) -> Optional[str]:
return self._prev
@property
def description(self) -> str:
return self._description
@property
def upgrade(self) -> Optional[Callable[[], update.SchemaUpdate]]:
return self._upgrade
@property
def downgrade(self) -> Optional[Callable[[], update.SchemaUpdate]]:
return self._downgrade
| 29.774194
| 74
| 0.695016
|
from typing import Callable, Optional
from spanner_orm.admin import update
def no_update_callable() -> update.SchemaUpdate:
return update.NoUpdate()
class Migration:
def __init__(
self,
migration_id: str,
prev_migration_id: Optional[str],
description: str,
upgrade: Optional[Callable[[], update.SchemaUpdate]] = None,
downgrade: Optional[Callable[[], update.SchemaUpdate]] = None,
):
self._id = migration_id
self._description = description
self._prev = prev_migration_id
self._upgrade = upgrade or no_update_callable
self._downgrade = downgrade or no_update_callable
@property
def migration_id(self) -> str:
return self._id
@property
def prev_migration_id(self) -> Optional[str]:
return self._prev
@property
def description(self) -> str:
return self._description
@property
def upgrade(self) -> Optional[Callable[[], update.SchemaUpdate]]:
return self._upgrade
@property
def downgrade(self) -> Optional[Callable[[], update.SchemaUpdate]]:
return self._downgrade
| true
| true
|
1c48959f90f37b6e83807d14a6e7d0a1088b8ed9
| 15,302
|
py
|
Python
|
old_train.py
|
orashi/PaintsPytorch
|
41cf321722a035101758c0717f082d71c12c6cf4
|
[
"MIT"
] | 6
|
2017-10-24T06:04:57.000Z
|
2020-01-20T07:22:14.000Z
|
old_train.py
|
RheaStrike/PaintsPytorch
|
41cf321722a035101758c0717f082d71c12c6cf4
|
[
"MIT"
] | null | null | null |
old_train.py
|
RheaStrike/PaintsPytorch
|
41cf321722a035101758c0717f082d71c12c6cf4
|
[
"MIT"
] | 2
|
2018-08-05T05:12:16.000Z
|
2019-05-19T20:10:23.000Z
|
import argparse
import random
import scipy.stats as stats
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torchvision.utils as vutils
from tensorboardX import SummaryWriter
from torch.autograd import grad
from data.proData import CreateDataLoader
from models.pro_model import *
parser = argparse.ArgumentParser()
parser.add_argument('--datarootC', required=True, help='path to colored dataset')
parser.add_argument('--datarootS', required=True, help='path to sketch dataset')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=4)
parser.add_argument('--batchSize', type=int, default=16, help='input batch size')
parser.add_argument('--imageSize', type=int, default=512, help='the height / width of the input image to network')
parser.add_argument('--cut', type=int, default=1, help='cut backup frequency')
parser.add_argument('--niter', type=int, default=700, help='number of epochs to train for')
parser.add_argument('--ngf', type=int, default=64)
parser.add_argument('--ndf', type=int, default=64)
parser.add_argument('--lrG', type=float, default=0.0001, help='learning rate, default=0.0001')
parser.add_argument('--lrD', type=float, default=0.0001, help='learning rate, default=0.0001')
parser.add_argument('--beta1', type=float, default=0.5, help='beta1 for adam. default=0.5')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--netG', default='', help="path to netG (to continue training)")
parser.add_argument('--netD', default='', help="path to netD (to continue training)")
parser.add_argument('--optim', action='store_true', help='load optimizer\'s checkpoint')
parser.add_argument('--outf', default='.', help='folder to output images and model checkpoints')
parser.add_argument('--Diters', type=int, default=1, help='number of D iters per each G iter')
parser.add_argument('--manualSeed', type=int, default=2345, help='random seed to use. Default=1234')
parser.add_argument('--baseGeni', type=int, default=2500, help='start base of pure pair L1 loss')
parser.add_argument('--geni', type=int, default=0, help='continue gen image num')
parser.add_argument('--epoi', type=int, default=0, help='continue epoch num')
parser.add_argument('--env', type=str, default=None, help='tensorboard env')
parser.add_argument('--advW', type=float, default=0.01, help='adversarial weight, default=0.01')
parser.add_argument('--gpW', type=float, default=10, help='gradient penalty weight')
parser.add_argument('--drift', type=float, default=0.001, help='wasserstein drift weight')
parser.add_argument('--mseW', type=float, default=0.01, help='MSE loss weight')
parser.add_argument('--MSE', action='store_true', help='enables pure MSE')
parser.add_argument('--feat', action='store_true', help='enables feat test')
opt = parser.parse_args()
print(opt)
####### regular set up
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
gen_iterations = opt.geni
try:
os.makedirs(opt.outf)
except OSError:
pass
# random seed setup # !!!!!
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
if opt.cuda:
torch.cuda.manual_seed(opt.manualSeed)
cudnn.benchmark = True
####### regular set up end
writer = SummaryWriter(log_dir=opt.env, comment='this is great')
dataloader = CreateDataLoader(opt)
netG = def_netG(ngf=opt.ngf)
if opt.netG != '':
netG.load_state_dict(torch.load(opt.netG))
print(netG)
netD = def_netD(ndf=opt.ndf)
if opt.netD != '':
netD.load_state_dict(torch.load(opt.netD))
print(netD)
netF = def_netF()
print(netD)
criterion_L1 = nn.L1Loss()
criterion_MSE = nn.MSELoss()
L2_dist = nn.PairwiseDistance(2)
one = torch.FloatTensor([1])
mone = one * -1
fixed_sketch = torch.FloatTensor()
fixed_hint = torch.FloatTensor()
saber = torch.FloatTensor([0.485 - 0.5, 0.456 - 0.5, 0.406 - 0.5]).view(1, 3, 1, 1)
diver = torch.FloatTensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1)
if opt.cuda:
netD.cuda()
netG.cuda()
netF.cuda()
fixed_sketch, fixed_hint = fixed_sketch.cuda(), fixed_hint.cuda()
saber, diver = saber.cuda(), diver.cuda()
criterion_L1.cuda()
criterion_MSE.cuda()
one, mone = one.cuda(), mone.cuda()
if opt.feat:
netF2 = def_netF2().cuda()
# setup optimizer
optimizerG = optim.Adam(netG.parameters(), lr=opt.lrG, betas=(opt.beta1, 0.9))
optimizerD = optim.Adam(netD.parameters(), lr=opt.lrD, betas=(opt.beta1, 0.9))
if opt.optim:
optimizerG.load_state_dict(torch.load('%s/optimG_checkpoint.pth' % opt.outf))
optimizerD.load_state_dict(torch.load('%s/optimD_checkpoint.pth' % opt.outf))
# schedulerG = lr_scheduler.ReduceLROnPlateau(optimizerG, mode='max', verbose=True, min_lr=0.0000005,
# patience=8) # 1.5*10^5 iter
# schedulerD = lr_scheduler.ReduceLROnPlateau(optimizerD, mode='max', verbose=True, min_lr=0.0000005,
# patience=8) # 1.5*10^5 iter
# schedulerG = lr_scheduler.MultiStepLR(optimizerG, milestones=[60, 120], gamma=0.1) # 1.5*10^5 iter
# schedulerD = lr_scheduler.MultiStepLR(optimizerD, milestones=[60, 120], gamma=0.1)
def calc_gradient_penalty(netD, real_data, fake_data, sketch):
alpha = torch.rand(opt.batchSize, 1, 1, 1)
alpha = alpha.cuda() if opt.cuda else alpha
interpolates = alpha * real_data + ((1 - alpha) * fake_data)
if opt.cuda:
interpolates = interpolates.cuda()
interpolates = Variable(interpolates, requires_grad=True)
disc_interpolates = netD(interpolates, Variable(sketch))
gradients = grad(outputs=disc_interpolates, inputs=interpolates,
grad_outputs=torch.ones(disc_interpolates.size()).cuda() if opt.cuda else torch.ones(
disc_interpolates.size()),
create_graph=True, retain_graph=True, only_inputs=True)[0]
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean() * opt.gpW
return gradient_penalty
flag = 1
lower, upper = 0, 1
mu, sigma = 1, 0.005
maskS = opt.imageSize // 4
X = stats.truncnorm(
(lower - mu) / sigma, (upper - mu) / sigma, loc=mu, scale=sigma)
for epoch in range(opt.niter):
data_iter = iter(dataloader)
i = 0
while i < len(dataloader) - 4:
############################
# (1) Update D network
###########################
for p in netD.parameters(): # reset requires_grad
p.requires_grad = True # they are set to False below in netG update
for p in netG.parameters():
p.requires_grad = False # to avoid computation
# train the discriminator Diters times
Diters = opt.Diters
if gen_iterations < opt.baseGeni: # L2 stage
Diters = 0
j = 0
while j < Diters and i < len(dataloader) - 4:
j += 1
netD.zero_grad()
data = data_iter.next()
real_cim, real_vim, real_sim = data
i += 1
###############################
if opt.cuda:
real_cim, real_vim, real_sim = real_cim.cuda(), real_vim.cuda(), real_sim.cuda()
mask = torch.cat([torch.rand(1, 1, maskS, maskS).ge(X.rvs(1)[0]).float() for _ in range(opt.batchSize)],
0).cuda()
hint = torch.cat((real_vim * mask, mask), 1)
# train with fake
fake_cim = netG(Variable(real_sim, volatile=True), Variable(hint, volatile=True)).data
errD_fake = netD(Variable(fake_cim), Variable(real_sim)).mean(0).view(1)
errD_fake.backward(one, retain_graph=True) # backward on score on real
errD_real = netD(Variable(real_cim), Variable(real_sim)).mean(0).view(1)
errD = errD_real - errD_fake
errD_realer = -1 * errD_real + errD_real.pow(2) * opt.drift
# additional penalty term to keep the scores from drifting too far from zero
errD_realer.backward(one, retain_graph=True) # backward on score on real
gradient_penalty = calc_gradient_penalty(netD, real_cim, fake_cim, real_sim)
gradient_penalty.backward()
#
# dist = L2_dist(Variable(real_cim).view(opt.batchSize, -1),
# Variable(fake_cim).view(opt.batchSize, -1)).mean()
# lip_est = (errD_real - errD_fake).abs() / (dist + 1e-8)
# lip_loss = opt.gpW * ((1.0 - lip_est) ** 2).mean(0).view(1)
# lip_loss.backward(one)
# gradient_penalty = lip_loss
# above is approximation
optimizerD.step()
############################
# (2) Update G network
############################
if i < len(dataloader) - 4:
if flag: # fix samples
data = zip(*[data_iter.next() for _ in range(4)])
real_cim, real_vim, real_sim = [torch.cat(dat, 0) for dat in data]
i += 1
if opt.cuda:
real_cim, real_vim, real_sim = real_cim.cuda(), real_vim.cuda(), real_sim.cuda()
mask = torch.cat(
[torch.rand(1, 1, maskS, maskS).ge(X.rvs(1)[0]).float() for _ in range(opt.batchSize * 4)],
0).cuda()
hint = torch.cat((real_vim * mask, mask), 1)
writer.add_image('target imgs', vutils.make_grid(real_cim.mul(0.5).add(0.5), nrow=4))
writer.add_image('sketch imgs', vutils.make_grid(real_sim.mul(0.5).add(0.5), nrow=4))
writer.add_image('hint', vutils.make_grid((real_vim * mask).mul(0.5).add(0.5), nrow=4))
vutils.save_image(real_cim.mul(0.5).add(0.5),
'%s/color_samples' % opt.outf + '.png')
vutils.save_image(real_sim.mul(0.5).add(0.5),
'%s/blur_samples' % opt.outf + '.png')
fixed_sketch.resize_as_(real_sim).copy_(real_sim)
fixed_hint.resize_as_(hint).copy_(hint)
flag -= 1
for p in netD.parameters():
p.requires_grad = False # to avoid computation
for p in netG.parameters():
p.requires_grad = True # to avoid computation
netG.zero_grad()
data = data_iter.next()
real_cim, real_vim, real_sim = data
i += 1
if opt.cuda:
real_cim, real_vim, real_sim = real_cim.cuda(), real_vim.cuda(), real_sim.cuda()
mask = torch.cat([torch.rand(1, 1, maskS, maskS).ge(X.rvs(1)[0]).float() for _ in range(opt.batchSize)],
0).cuda()
hint = torch.cat((real_vim * mask, mask), 1)
fake = netG(Variable(real_sim), Variable(hint))
if opt.MSE:
MSELoss = criterion_MSE(fake, Variable(real_cim))
errG = MSELoss
errG.backward()
contentLoss = MSELoss
elif opt.feat:
contentLoss = criterion_MSE(netF2((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF2(Variable((real_cim.mul(0.5) - saber) / diver)))
MSELoss = criterion_MSE(netF((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF(Variable((real_cim.mul(0.5) - saber) / diver)))
errG = (contentLoss + MSELoss) * 0.5
errG.backward()
elif gen_iterations < opt.baseGeni:
contentLoss = criterion_MSE(netF((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF(Variable((real_cim.mul(0.5) - saber) / diver)))
MSELoss = criterion_MSE(fake, Variable(real_cim))
errG = contentLoss + MSELoss * opt.mseW
errG.backward()
else:
errG = netD(fake, Variable(real_sim)).mean(0).view(1) * opt.advW
errG.backward(mone, retain_graph=True)
contentLoss = criterion_MSE(netF((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF(Variable((real_cim.mul(0.5) - saber) / diver)))
MSELoss = criterion_MSE(fake, Variable(real_cim))
errg = contentLoss + MSELoss * opt.mseW
errg.backward()
optimizerG.step()
############################
# (3) Report & 100 Batch checkpoint
############################
if gen_iterations < opt.baseGeni:
writer.add_scalar('VGG MSE Loss', contentLoss.data[0], gen_iterations)
writer.add_scalar('MSE Loss', MSELoss.data[0], gen_iterations)
print('[%d/%d][%d/%d][%d] content %f '
% (epoch, opt.niter, i, len(dataloader), gen_iterations, contentLoss.data[0]))
else:
writer.add_scalar('VGG MSE Loss', contentLoss.data[0], gen_iterations)
writer.add_scalar('MSE Loss', MSELoss.data[0], gen_iterations)
writer.add_scalar('wasserstein distance', errD.data[0], gen_iterations)
writer.add_scalar('errD_real', errD_real.data[0], gen_iterations)
writer.add_scalar('errD_fake', errD_fake.data[0], gen_iterations)
writer.add_scalar('Gnet loss toward real', errG.data[0], gen_iterations)
writer.add_scalar('gradient_penalty', gradient_penalty.data[0], gen_iterations)
print('[%d/%d][%d/%d][%d] errD: %f err_G: %f err_D_real: %f err_D_fake %f content loss %f'
% (epoch, opt.niter, i, len(dataloader), gen_iterations,
errD.data[0], errG.data[0], errD_real.data[0], errD_fake.data[0], contentLoss.data[0]))
if gen_iterations % 500 == 0:
fake = netG(Variable(fixed_sketch, volatile=True), Variable(fixed_hint, volatile=True))
writer.add_image('deblur imgs', vutils.make_grid(fake.data.mul(0.5).add(0.5), nrow=4),
gen_iterations)
# if gen_iterations % 2000 == 0:
# for name, param in netG.named_parameters():
# writer.add_histogram('netG ' + name, param.clone().cpu().data.numpy(), gen_iterations)
# for name, param in netD.named_parameters():
# writer.add_histogram('netD ' + name, param.clone().cpu().data.numpy(), gen_iterations)
# vutils.save_image(fake.data.mul(0.5).add(0.5),
# '%s/fake_samples_gen_iter_%08d.png' % (opt.outf, gen_iterations))
gen_iterations += 1
# do checkpointing
if opt.cut == 0:
torch.save(netG.state_dict(), '%s/netG_epoch_only.pth' % opt.outf)
torch.save(netD.state_dict(), '%s/netD_epoch_only.pth' % opt.outf)
elif epoch % opt.cut == 0:
torch.save(netG.state_dict(), '%s/netG_epoch_%d.pth' % (opt.outf, epoch))
torch.save(netD.state_dict(), '%s/netD_epoch_%d.pth' % (opt.outf, epoch))
torch.save(optimizerG.state_dict(), '%s/optimG_checkpoint.pth' % opt.outf)
torch.save(optimizerD.state_dict(), '%s/optimD_checkpoint.pth' % opt.outf)
| 44.8739
| 116
| 0.603647
|
import argparse
import random
import scipy.stats as stats
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torchvision.utils as vutils
from tensorboardX import SummaryWriter
from torch.autograd import grad
from data.proData import CreateDataLoader
from models.pro_model import *
parser = argparse.ArgumentParser()
parser.add_argument('--datarootC', required=True, help='path to colored dataset')
parser.add_argument('--datarootS', required=True, help='path to sketch dataset')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=4)
parser.add_argument('--batchSize', type=int, default=16, help='input batch size')
parser.add_argument('--imageSize', type=int, default=512, help='the height / width of the input image to network')
parser.add_argument('--cut', type=int, default=1, help='cut backup frequency')
parser.add_argument('--niter', type=int, default=700, help='number of epochs to train for')
parser.add_argument('--ngf', type=int, default=64)
parser.add_argument('--ndf', type=int, default=64)
parser.add_argument('--lrG', type=float, default=0.0001, help='learning rate, default=0.0001')
parser.add_argument('--lrD', type=float, default=0.0001, help='learning rate, default=0.0001')
parser.add_argument('--beta1', type=float, default=0.5, help='beta1 for adam. default=0.5')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--netG', default='', help="path to netG (to continue training)")
parser.add_argument('--netD', default='', help="path to netD (to continue training)")
parser.add_argument('--optim', action='store_true', help='load optimizer\'s checkpoint')
parser.add_argument('--outf', default='.', help='folder to output images and model checkpoints')
parser.add_argument('--Diters', type=int, default=1, help='number of D iters per each G iter')
parser.add_argument('--manualSeed', type=int, default=2345, help='random seed to use. Default=1234')
parser.add_argument('--baseGeni', type=int, default=2500, help='start base of pure pair L1 loss')
parser.add_argument('--geni', type=int, default=0, help='continue gen image num')
parser.add_argument('--epoi', type=int, default=0, help='continue epoch num')
parser.add_argument('--env', type=str, default=None, help='tensorboard env')
parser.add_argument('--advW', type=float, default=0.01, help='adversarial weight, default=0.01')
parser.add_argument('--gpW', type=float, default=10, help='gradient penalty weight')
parser.add_argument('--drift', type=float, default=0.001, help='wasserstein drift weight')
parser.add_argument('--mseW', type=float, default=0.01, help='MSE loss weight')
parser.add_argument('--MSE', action='store_true', help='enables pure MSE')
parser.add_argument('--feat', action='store_true', help='enables feat test')
opt = parser.parse_args()
print(opt)
####### regular set up
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
gen_iterations = opt.geni
try:
os.makedirs(opt.outf)
except OSError:
pass
# random seed setup # !!!!!
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
if opt.cuda:
torch.cuda.manual_seed(opt.manualSeed)
cudnn.benchmark = True
####### regular set up end
writer = SummaryWriter(log_dir=opt.env, comment='this is great')
dataloader = CreateDataLoader(opt)
netG = def_netG(ngf=opt.ngf)
if opt.netG != '':
netG.load_state_dict(torch.load(opt.netG))
print(netG)
netD = def_netD(ndf=opt.ndf)
if opt.netD != '':
netD.load_state_dict(torch.load(opt.netD))
print(netD)
netF = def_netF()
print(netD)
criterion_L1 = nn.L1Loss()
criterion_MSE = nn.MSELoss()
L2_dist = nn.PairwiseDistance(2)
one = torch.FloatTensor([1])
mone = one * -1
fixed_sketch = torch.FloatTensor()
fixed_hint = torch.FloatTensor()
saber = torch.FloatTensor([0.485 - 0.5, 0.456 - 0.5, 0.406 - 0.5]).view(1, 3, 1, 1)
diver = torch.FloatTensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1)
if opt.cuda:
netD.cuda()
netG.cuda()
netF.cuda()
fixed_sketch, fixed_hint = fixed_sketch.cuda(), fixed_hint.cuda()
saber, diver = saber.cuda(), diver.cuda()
criterion_L1.cuda()
criterion_MSE.cuda()
one, mone = one.cuda(), mone.cuda()
if opt.feat:
netF2 = def_netF2().cuda()
# setup optimizer
optimizerG = optim.Adam(netG.parameters(), lr=opt.lrG, betas=(opt.beta1, 0.9))
optimizerD = optim.Adam(netD.parameters(), lr=opt.lrD, betas=(opt.beta1, 0.9))
if opt.optim:
optimizerG.load_state_dict(torch.load('%s/optimG_checkpoint.pth' % opt.outf))
optimizerD.load_state_dict(torch.load('%s/optimD_checkpoint.pth' % opt.outf))
# schedulerG = lr_scheduler.ReduceLROnPlateau(optimizerG, mode='max', verbose=True, min_lr=0.0000005,
# patience=8) # 1.5*10^5 iter
# schedulerD = lr_scheduler.ReduceLROnPlateau(optimizerD, mode='max', verbose=True, min_lr=0.0000005,
# patience=8) # 1.5*10^5 iter
# schedulerG = lr_scheduler.MultiStepLR(optimizerG, milestones=[60, 120], gamma=0.1) # 1.5*10^5 iter
# schedulerD = lr_scheduler.MultiStepLR(optimizerD, milestones=[60, 120], gamma=0.1)
def calc_gradient_penalty(netD, real_data, fake_data, sketch):
alpha = torch.rand(opt.batchSize, 1, 1, 1)
alpha = alpha.cuda() if opt.cuda else alpha
interpolates = alpha * real_data + ((1 - alpha) * fake_data)
if opt.cuda:
interpolates = interpolates.cuda()
interpolates = Variable(interpolates, requires_grad=True)
disc_interpolates = netD(interpolates, Variable(sketch))
gradients = grad(outputs=disc_interpolates, inputs=interpolates,
grad_outputs=torch.ones(disc_interpolates.size()).cuda() if opt.cuda else torch.ones(
disc_interpolates.size()),
create_graph=True, retain_graph=True, only_inputs=True)[0]
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean() * opt.gpW
return gradient_penalty
flag = 1
lower, upper = 0, 1
mu, sigma = 1, 0.005
maskS = opt.imageSize // 4
X = stats.truncnorm(
(lower - mu) / sigma, (upper - mu) / sigma, loc=mu, scale=sigma)
for epoch in range(opt.niter):
data_iter = iter(dataloader)
i = 0
while i < len(dataloader) - 4:
############################
# (1) Update D network
###########################
for p in netD.parameters(): # reset requires_grad
p.requires_grad = True # they are set to False below in netG update
for p in netG.parameters():
p.requires_grad = False # to avoid computation
# train the discriminator Diters times
Diters = opt.Diters
if gen_iterations < opt.baseGeni: # L2 stage
Diters = 0
j = 0
while j < Diters and i < len(dataloader) - 4:
j += 1
netD.zero_grad()
data = data_iter.next()
real_cim, real_vim, real_sim = data
i += 1
###############################
if opt.cuda:
real_cim, real_vim, real_sim = real_cim.cuda(), real_vim.cuda(), real_sim.cuda()
mask = torch.cat([torch.rand(1, 1, maskS, maskS).ge(X.rvs(1)[0]).float() for _ in range(opt.batchSize)],
0).cuda()
hint = torch.cat((real_vim * mask, mask), 1)
# train with fake
fake_cim = netG(Variable(real_sim, volatile=True), Variable(hint, volatile=True)).data
errD_fake = netD(Variable(fake_cim), Variable(real_sim)).mean(0).view(1)
errD_fake.backward(one, retain_graph=True) # backward on score on real
errD_real = netD(Variable(real_cim), Variable(real_sim)).mean(0).view(1)
errD = errD_real - errD_fake
errD_realer = -1 * errD_real + errD_real.pow(2) * opt.drift
# additional penalty term to keep the scores from drifting too far from zero
errD_realer.backward(one, retain_graph=True) # backward on score on real
gradient_penalty = calc_gradient_penalty(netD, real_cim, fake_cim, real_sim)
gradient_penalty.backward()
#
# dist = L2_dist(Variable(real_cim).view(opt.batchSize, -1),
# Variable(fake_cim).view(opt.batchSize, -1)).mean()
# lip_est = (errD_real - errD_fake).abs() / (dist + 1e-8)
# lip_loss = opt.gpW * ((1.0 - lip_est) ** 2).mean(0).view(1)
# lip_loss.backward(one)
# gradient_penalty = lip_loss
# above is approximation
optimizerD.step()
############################
# (2) Update G network
############################
if i < len(dataloader) - 4:
if flag: # fix samples
data = zip(*[data_iter.next() for _ in range(4)])
real_cim, real_vim, real_sim = [torch.cat(dat, 0) for dat in data]
i += 1
if opt.cuda:
real_cim, real_vim, real_sim = real_cim.cuda(), real_vim.cuda(), real_sim.cuda()
mask = torch.cat(
[torch.rand(1, 1, maskS, maskS).ge(X.rvs(1)[0]).float() for _ in range(opt.batchSize * 4)],
0).cuda()
hint = torch.cat((real_vim * mask, mask), 1)
writer.add_image('target imgs', vutils.make_grid(real_cim.mul(0.5).add(0.5), nrow=4))
writer.add_image('sketch imgs', vutils.make_grid(real_sim.mul(0.5).add(0.5), nrow=4))
writer.add_image('hint', vutils.make_grid((real_vim * mask).mul(0.5).add(0.5), nrow=4))
vutils.save_image(real_cim.mul(0.5).add(0.5),
'%s/color_samples' % opt.outf + '.png')
vutils.save_image(real_sim.mul(0.5).add(0.5),
'%s/blur_samples' % opt.outf + '.png')
fixed_sketch.resize_as_(real_sim).copy_(real_sim)
fixed_hint.resize_as_(hint).copy_(hint)
flag -= 1
for p in netD.parameters():
p.requires_grad = False # to avoid computation
for p in netG.parameters():
p.requires_grad = True # to avoid computation
netG.zero_grad()
data = data_iter.next()
real_cim, real_vim, real_sim = data
i += 1
if opt.cuda:
real_cim, real_vim, real_sim = real_cim.cuda(), real_vim.cuda(), real_sim.cuda()
mask = torch.cat([torch.rand(1, 1, maskS, maskS).ge(X.rvs(1)[0]).float() for _ in range(opt.batchSize)],
0).cuda()
hint = torch.cat((real_vim * mask, mask), 1)
fake = netG(Variable(real_sim), Variable(hint))
if opt.MSE:
MSELoss = criterion_MSE(fake, Variable(real_cim))
errG = MSELoss
errG.backward()
contentLoss = MSELoss
elif opt.feat:
contentLoss = criterion_MSE(netF2((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF2(Variable((real_cim.mul(0.5) - saber) / diver)))
MSELoss = criterion_MSE(netF((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF(Variable((real_cim.mul(0.5) - saber) / diver)))
errG = (contentLoss + MSELoss) * 0.5
errG.backward()
elif gen_iterations < opt.baseGeni:
contentLoss = criterion_MSE(netF((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF(Variable((real_cim.mul(0.5) - saber) / diver)))
MSELoss = criterion_MSE(fake, Variable(real_cim))
errG = contentLoss + MSELoss * opt.mseW
errG.backward()
else:
errG = netD(fake, Variable(real_sim)).mean(0).view(1) * opt.advW
errG.backward(mone, retain_graph=True)
contentLoss = criterion_MSE(netF((fake.mul(0.5) - Variable(saber)) / Variable(diver)),
netF(Variable((real_cim.mul(0.5) - saber) / diver)))
MSELoss = criterion_MSE(fake, Variable(real_cim))
errg = contentLoss + MSELoss * opt.mseW
errg.backward()
optimizerG.step()
############################
# (3) Report & 100 Batch checkpoint
############################
if gen_iterations < opt.baseGeni:
writer.add_scalar('VGG MSE Loss', contentLoss.data[0], gen_iterations)
writer.add_scalar('MSE Loss', MSELoss.data[0], gen_iterations)
print('[%d/%d][%d/%d][%d] content %f '
% (epoch, opt.niter, i, len(dataloader), gen_iterations, contentLoss.data[0]))
else:
writer.add_scalar('VGG MSE Loss', contentLoss.data[0], gen_iterations)
writer.add_scalar('MSE Loss', MSELoss.data[0], gen_iterations)
writer.add_scalar('wasserstein distance', errD.data[0], gen_iterations)
writer.add_scalar('errD_real', errD_real.data[0], gen_iterations)
writer.add_scalar('errD_fake', errD_fake.data[0], gen_iterations)
writer.add_scalar('Gnet loss toward real', errG.data[0], gen_iterations)
writer.add_scalar('gradient_penalty', gradient_penalty.data[0], gen_iterations)
print('[%d/%d][%d/%d][%d] errD: %f err_G: %f err_D_real: %f err_D_fake %f content loss %f'
% (epoch, opt.niter, i, len(dataloader), gen_iterations,
errD.data[0], errG.data[0], errD_real.data[0], errD_fake.data[0], contentLoss.data[0]))
if gen_iterations % 500 == 0:
fake = netG(Variable(fixed_sketch, volatile=True), Variable(fixed_hint, volatile=True))
writer.add_image('deblur imgs', vutils.make_grid(fake.data.mul(0.5).add(0.5), nrow=4),
gen_iterations)
# if gen_iterations % 2000 == 0:
# for name, param in netG.named_parameters():
# writer.add_histogram('netG ' + name, param.clone().cpu().data.numpy(), gen_iterations)
# for name, param in netD.named_parameters():
# writer.add_histogram('netD ' + name, param.clone().cpu().data.numpy(), gen_iterations)
# vutils.save_image(fake.data.mul(0.5).add(0.5),
# '%s/fake_samples_gen_iter_%08d.png' % (opt.outf, gen_iterations))
gen_iterations += 1
# do checkpointing
if opt.cut == 0:
torch.save(netG.state_dict(), '%s/netG_epoch_only.pth' % opt.outf)
torch.save(netD.state_dict(), '%s/netD_epoch_only.pth' % opt.outf)
elif epoch % opt.cut == 0:
torch.save(netG.state_dict(), '%s/netG_epoch_%d.pth' % (opt.outf, epoch))
torch.save(netD.state_dict(), '%s/netD_epoch_%d.pth' % (opt.outf, epoch))
torch.save(optimizerG.state_dict(), '%s/optimG_checkpoint.pth' % opt.outf)
torch.save(optimizerD.state_dict(), '%s/optimD_checkpoint.pth' % opt.outf)
| true
| true
|
1c489617d9fd207bbe9807beb2752951bfbeeb6d
| 6,096
|
py
|
Python
|
measure/c2c.py
|
lanshanxunmeng/rce
|
85cd8d2911a374df7f4185ff96c32155fc8f1644
|
[
"Apache-2.0"
] | 13
|
2018-03-19T02:31:43.000Z
|
2022-03-18T22:58:48.000Z
|
measure/c2c.py
|
lanshanxunmeng/rce
|
85cd8d2911a374df7f4185ff96c32155fc8f1644
|
[
"Apache-2.0"
] | 1
|
2019-02-19T03:58:10.000Z
|
2020-07-15T10:01:03.000Z
|
measure/c2c.py
|
lanshanxunmeng/rce
|
85cd8d2911a374df7f4185ff96c32155fc8f1644
|
[
"Apache-2.0"
] | 14
|
2018-07-30T12:06:57.000Z
|
2021-12-19T22:53:17.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# c2c.py
#
# This file is part of the RoboEarth Cloud Engine framework.
#
# This file was originally created for RoboEearth
# http://www.roboearth.org/
#
# The research leading to these results has received funding from
# the European Union Seventh Framework Programme FP7/2007-2013 under
# grant agreement no248942 RoboEarth.
#
# Copyright 2013 RoboEarth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# \author/s: Dominique Hunziker
#
#
# Python specific imports
import json
# twisted specific imports
from twisted.internet.defer import Deferred
# rce specific imports
from rce.client.connection import Connection
# local imports
from base import PASSES, SIZES, delay, RemoteTest
class Measurement(object):
TAG = 'tester'
TYPES = ('service', 'topic')
def __init__(self, runs, conn, robot, reactor):
self._conn = conn
self._robot = robot
self._tests = [RemoteTest(conn, self.TAG, name) for name in self.TYPES]
self._deferred = d = Deferred()
d.addCallback(self._setup)
d.addCallback(delay, 20, reactor)
for _ in xrange(runs):
for test in self._tests:
d.addCallback(test.run)
d.addCallback(self._postProcess)
d.addCallback(delay, 2, reactor)
def stop(_):
reactor.stop()
print('\ndone')
d.addCallback(stop)
def run(self, _):
if not self._tests:
print('No tests to run.')
return
if self._deferred.called:
print('Can run the measurement only once.')
self._deferred.callback(None)
def _setup(self, _):
print('Setup environments...')
### Add containers
self._cTag = ('c1', 'c2')
for cTag in self._cTag:
self._conn.createContainer(cTag)
### Add the nodes
self._conn.addNode(self._cTag[0], 'strTester', 'Test',
'stringTester.py')
self._conn.addNode(self._cTag[1], 'strEcho', 'Test', 'stringEcho.py')
### Add interfaces and connections
# Connections Robot - StringTester
tag = self.TAG
cls = 'Test/StringTest'
self._conn.addInterface(self._cTag[0], tag, 'ServiceClientInterface',
cls, 'stringTest')
self._conn.addInterface(self._robot, tag, 'ServiceProviderConverter',
cls)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._robot, tag))
# Connections StringTester - StringEcho (service)
tag = 'testEchoSrv'
cls = 'Test/StringEcho'
srv = 'stringEchoService'
self._conn.addInterface(self._cTag[0], tag, 'ServiceProviderInterface',
cls, srv)
self._conn.addInterface(self._cTag[1], tag, 'ServiceClientInterface',
cls, srv)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._cTag[1], tag))
# Connections StringTester - StringEcho (topic)
tag = 'testEchoReq'
cls = 'std_msgs/String'
tpc = 'stringEchoReq'
self._conn.addInterface(self._cTag[0], tag, 'SubscriberInterface',
cls, tpc)
self._conn.addInterface(self._cTag[1], tag, 'PublisherInterface',
cls, tpc)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._cTag[1], tag))
tag = 'testEchoResp'
cls = 'std_msgs/String'
tpc = 'stringEchoResp'
self._conn.addInterface(self._cTag[0], tag, 'PublisherInterface',
cls, tpc)
self._conn.addInterface(self._cTag[1], tag, 'SubscriberInterface',
cls, tpc)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._cTag[1], tag))
def _postProcess(self, _):
with open('c2c.data', 'w') as f:
f.write(json.dumps(SIZES))
f.write('\n')
for test in self._tests:
f.write(str(test))
f.write('\n')
for cTag in self._cTag:
self._conn.destroyContainer(cTag)
def _get_argparse():
from argparse import ArgumentParser
parser = ArgumentParser(prog='c2c',
description='Run communication measurement for RCE '
'between two containers using '
'a string message.')
parser.add_argument('--passes', help='Number of passes to do.',
type=int, default=PASSES)
parser.add_argument('ipMaster', help='IP address of master process.',
type=str)
return parser
def main(reactor, passes, ip):
user = 'testUser'
robot = 'testRobot'
connection = Connection(user, robot, user, reactor)
measurement = Measurement(passes, connection, robot, reactor)
print('Connect...')
d = Deferred()
d.addCallback(measurement.run)
connection.connect('http://{0}:9000/'.format(ip), d)
reactor.run()
if __name__ == '__main__':
from twisted.internet import reactor
args = _get_argparse().parse_args()
main(reactor, args.passes, args.ipMaster)
| 32.253968
| 80
| 0.577264
|
import json
from twisted.internet.defer import Deferred
from rce.client.connection import Connection
from base import PASSES, SIZES, delay, RemoteTest
class Measurement(object):
TAG = 'tester'
TYPES = ('service', 'topic')
def __init__(self, runs, conn, robot, reactor):
self._conn = conn
self._robot = robot
self._tests = [RemoteTest(conn, self.TAG, name) for name in self.TYPES]
self._deferred = d = Deferred()
d.addCallback(self._setup)
d.addCallback(delay, 20, reactor)
for _ in xrange(runs):
for test in self._tests:
d.addCallback(test.run)
d.addCallback(self._postProcess)
d.addCallback(delay, 2, reactor)
def stop(_):
reactor.stop()
print('\ndone')
d.addCallback(stop)
def run(self, _):
if not self._tests:
print('No tests to run.')
return
if self._deferred.called:
print('Can run the measurement only once.')
self._deferred.callback(None)
def _setup(self, _):
print('Setup environments...')
)
for cTag in self._cTag:
self._conn.createContainer(cTag)
f._cTag[0], 'strTester', 'Test',
'stringTester.py')
self._conn.addNode(self._cTag[1], 'strEcho', 'Test', 'stringEcho.py')
self._conn.addInterface(self._cTag[0], tag, 'ServiceClientInterface',
cls, 'stringTest')
self._conn.addInterface(self._robot, tag, 'ServiceProviderConverter',
cls)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._robot, tag))
tag = 'testEchoSrv'
cls = 'Test/StringEcho'
srv = 'stringEchoService'
self._conn.addInterface(self._cTag[0], tag, 'ServiceProviderInterface',
cls, srv)
self._conn.addInterface(self._cTag[1], tag, 'ServiceClientInterface',
cls, srv)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._cTag[1], tag))
tag = 'testEchoReq'
cls = 'std_msgs/String'
tpc = 'stringEchoReq'
self._conn.addInterface(self._cTag[0], tag, 'SubscriberInterface',
cls, tpc)
self._conn.addInterface(self._cTag[1], tag, 'PublisherInterface',
cls, tpc)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._cTag[1], tag))
tag = 'testEchoResp'
cls = 'std_msgs/String'
tpc = 'stringEchoResp'
self._conn.addInterface(self._cTag[0], tag, 'PublisherInterface',
cls, tpc)
self._conn.addInterface(self._cTag[1], tag, 'SubscriberInterface',
cls, tpc)
self._conn.addConnection('{0}/{1}'.format(self._cTag[0], tag),
'{0}/{1}'.format(self._cTag[1], tag))
def _postProcess(self, _):
with open('c2c.data', 'w') as f:
f.write(json.dumps(SIZES))
f.write('\n')
for test in self._tests:
f.write(str(test))
f.write('\n')
for cTag in self._cTag:
self._conn.destroyContainer(cTag)
def _get_argparse():
from argparse import ArgumentParser
parser = ArgumentParser(prog='c2c',
description='Run communication measurement for RCE '
'between two containers using '
'a string message.')
parser.add_argument('--passes', help='Number of passes to do.',
type=int, default=PASSES)
parser.add_argument('ipMaster', help='IP address of master process.',
type=str)
return parser
def main(reactor, passes, ip):
user = 'testUser'
robot = 'testRobot'
connection = Connection(user, robot, user, reactor)
measurement = Measurement(passes, connection, robot, reactor)
print('Connect...')
d = Deferred()
d.addCallback(measurement.run)
connection.connect('http://{0}:9000/'.format(ip), d)
reactor.run()
if __name__ == '__main__':
from twisted.internet import reactor
args = _get_argparse().parse_args()
main(reactor, args.passes, args.ipMaster)
| true
| true
|
1c4896d7129120c99558c87cc228086d7992affa
| 343
|
py
|
Python
|
taobao-tianmao/top/api/rest/LogisticsAddressSearchRequest.py
|
ScottLeeF/python-example
|
0b230ba80fe5020d70329a9d73e058013f0ca111
|
[
"Apache-2.0"
] | null | null | null |
taobao-tianmao/top/api/rest/LogisticsAddressSearchRequest.py
|
ScottLeeF/python-example
|
0b230ba80fe5020d70329a9d73e058013f0ca111
|
[
"Apache-2.0"
] | 7
|
2021-03-19T02:12:42.000Z
|
2022-03-12T00:25:28.000Z
|
taobao-tianmao/top/api/rest/LogisticsAddressSearchRequest.py
|
ScottLeeF/python-example
|
0b230ba80fe5020d70329a9d73e058013f0ca111
|
[
"Apache-2.0"
] | null | null | null |
'''
Created by auto_sdk on 2018.07.26
'''
from top.api.base import RestApi
class LogisticsAddressSearchRequest(RestApi):
def __init__(self, domain='gw.api.taobao.com', port=80):
RestApi.__init__(self, domain, port)
self.rdef = None
def getapiname(self):
return 'taobao.logistics.address.search'
| 24.5
| 61
| 0.664723
|
from top.api.base import RestApi
class LogisticsAddressSearchRequest(RestApi):
def __init__(self, domain='gw.api.taobao.com', port=80):
RestApi.__init__(self, domain, port)
self.rdef = None
def getapiname(self):
return 'taobao.logistics.address.search'
| true
| true
|
1c489710cf497c8388e0ab66f48d405937b4e014
| 3,316
|
py
|
Python
|
direct/interval/IntervalManager.py
|
theclashingfritz/Cog-Invasion-Online-Dump
|
2561abbacb3e2e288e06f3f04b935b5ed589c8f8
|
[
"Apache-2.0"
] | 1
|
2020-03-12T16:44:10.000Z
|
2020-03-12T16:44:10.000Z
|
direct/interval/IntervalManager.py
|
theclashingfritz/Cog-Invasion-Online-Dump
|
2561abbacb3e2e288e06f3f04b935b5ed589c8f8
|
[
"Apache-2.0"
] | null | null | null |
direct/interval/IntervalManager.py
|
theclashingfritz/Cog-Invasion-Online-Dump
|
2561abbacb3e2e288e06f3f04b935b5ed589c8f8
|
[
"Apache-2.0"
] | null | null | null |
# uncompyle6 version 3.2.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.15 (v2.7.15:ca079a3ea3, Apr 30 2018, 16:30:26) [MSC v.1500 64 bit (AMD64)]
# Embedded file name: direct.interval.IntervalManager
__all__ = [
'IntervalManager', 'ivalMgr']
from panda3d.core import *
from panda3d.direct import *
from direct.directnotify.DirectNotifyGlobal import *
from direct.showbase import EventManager
import fnmatch
class IntervalManager(CIntervalManager):
def __init__(self, globalPtr=0):
if globalPtr:
self.cObj = CIntervalManager.getGlobalPtr()
Dtool_BorrowThisReference(self, self.cObj)
self.dd = self
else:
CIntervalManager.__init__(self)
self.eventQueue = EventQueue()
self.MyEventmanager = EventManager.EventManager(self.eventQueue)
self.setEventQueue(self.eventQueue)
self.ivals = []
self.removedIvals = {}
def addInterval(self, interval):
index = self.addCInterval(interval, 1)
self.__storeInterval(interval, index)
def removeInterval(self, interval):
index = self.findCInterval(interval.getName())
if index >= 0:
self.removeCInterval(index)
if index < len(self.ivals):
self.ivals[index] = None
return 1
return 0
def getInterval(self, name):
index = self.findCInterval(name)
if index >= 0:
if index < len(self.ivals) and self.ivals[index]:
return self.ivals[index]
return self.getCInterval(index)
return
def getIntervalsMatching(self, pattern):
ivals = []
count = 0
maxIndex = self.getMaxIndex()
for index in range(maxIndex):
ival = self.getCInterval(index)
if ival and fnmatch.fnmatchcase(ival.getName(), pattern):
count += 1
if index < len(self.ivals) and self.ivals[index]:
ivals.append(self.ivals[index])
else:
ivals.append(ival)
return ivals
def finishIntervalsMatching(self, pattern):
ivals = self.getIntervalsMatching(pattern)
for ival in ivals:
ival.finish()
return len(ivals)
def pauseIntervalsMatching(self, pattern):
ivals = self.getIntervalsMatching(pattern)
for ival in ivals:
ival.pause()
return len(ivals)
def step(self):
CIntervalManager.step(self)
self.__doPythonCallbacks()
def interrupt(self):
CIntervalManager.interrupt(self)
self.__doPythonCallbacks()
def __doPythonCallbacks(self):
index = self.getNextRemoval()
while index >= 0:
ival = self.ivals[index]
self.ivals[index] = None
ival.privPostEvent()
index = self.getNextRemoval()
index = self.getNextEvent()
while index >= 0:
self.ivals[index].privPostEvent()
index = self.getNextEvent()
self.MyEventmanager.doEvents()
return
def __storeInterval(self, interval, index):
while index >= len(self.ivals):
self.ivals.append(None)
self.ivals[index] = interval
return
ivalMgr = IntervalManager(1)
| 30.145455
| 104
| 0.606152
|
__all__ = [
'IntervalManager', 'ivalMgr']
from panda3d.core import *
from panda3d.direct import *
from direct.directnotify.DirectNotifyGlobal import *
from direct.showbase import EventManager
import fnmatch
class IntervalManager(CIntervalManager):
def __init__(self, globalPtr=0):
if globalPtr:
self.cObj = CIntervalManager.getGlobalPtr()
Dtool_BorrowThisReference(self, self.cObj)
self.dd = self
else:
CIntervalManager.__init__(self)
self.eventQueue = EventQueue()
self.MyEventmanager = EventManager.EventManager(self.eventQueue)
self.setEventQueue(self.eventQueue)
self.ivals = []
self.removedIvals = {}
def addInterval(self, interval):
index = self.addCInterval(interval, 1)
self.__storeInterval(interval, index)
def removeInterval(self, interval):
index = self.findCInterval(interval.getName())
if index >= 0:
self.removeCInterval(index)
if index < len(self.ivals):
self.ivals[index] = None
return 1
return 0
def getInterval(self, name):
index = self.findCInterval(name)
if index >= 0:
if index < len(self.ivals) and self.ivals[index]:
return self.ivals[index]
return self.getCInterval(index)
return
def getIntervalsMatching(self, pattern):
ivals = []
count = 0
maxIndex = self.getMaxIndex()
for index in range(maxIndex):
ival = self.getCInterval(index)
if ival and fnmatch.fnmatchcase(ival.getName(), pattern):
count += 1
if index < len(self.ivals) and self.ivals[index]:
ivals.append(self.ivals[index])
else:
ivals.append(ival)
return ivals
def finishIntervalsMatching(self, pattern):
ivals = self.getIntervalsMatching(pattern)
for ival in ivals:
ival.finish()
return len(ivals)
def pauseIntervalsMatching(self, pattern):
ivals = self.getIntervalsMatching(pattern)
for ival in ivals:
ival.pause()
return len(ivals)
def step(self):
CIntervalManager.step(self)
self.__doPythonCallbacks()
def interrupt(self):
CIntervalManager.interrupt(self)
self.__doPythonCallbacks()
def __doPythonCallbacks(self):
index = self.getNextRemoval()
while index >= 0:
ival = self.ivals[index]
self.ivals[index] = None
ival.privPostEvent()
index = self.getNextRemoval()
index = self.getNextEvent()
while index >= 0:
self.ivals[index].privPostEvent()
index = self.getNextEvent()
self.MyEventmanager.doEvents()
return
def __storeInterval(self, interval, index):
while index >= len(self.ivals):
self.ivals.append(None)
self.ivals[index] = interval
return
ivalMgr = IntervalManager(1)
| true
| true
|
1c489753a042f89c1412e6a49974168a517d4eef
| 1,732
|
py
|
Python
|
rpython/jit/metainterp/jitdriver.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 381
|
2018-08-18T03:37:22.000Z
|
2022-02-06T23:57:36.000Z
|
rpython/jit/metainterp/jitdriver.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 16
|
2018-09-22T18:12:47.000Z
|
2022-02-22T20:03:59.000Z
|
rpython/jit/metainterp/jitdriver.py
|
nanjekyejoannah/pypy
|
e80079fe13c29eda7b2a6b4cd4557051f975a2d9
|
[
"Apache-2.0",
"OpenSSL"
] | 55
|
2015-08-16T02:41:30.000Z
|
2022-03-20T20:33:35.000Z
|
class JitDriverStaticData(object):
"""There is one instance of this class per JitDriver used in the program.
"""
# This is just a container with the following attributes (... set by):
# self.jitdriver ... rpython.jit.metainterp.warmspot
# self.portal_graph ... rpython.jit.metainterp.warmspot
# self.portal_runner_ptr ... rpython.jit.metainterp.warmspot
# self.portal_runner_adr ... rpython.jit.metainterp.warmspot
# self.portal_calldescr ... rpython.jit.metainterp.warmspot
# self.num_green_args ... rpython.jit.metainterp.warmspot
# self.num_red_args ... rpython.jit.metainterp.warmspot
# self.red_args_types ... rpython.jit.metainterp.warmspot
# self.result_type ... rpython.jit.metainterp.warmspot
# self.virtualizable_info... rpython.jit.metainterp.warmspot
# self.greenfield_info ... rpython.jit.metainterp.warmspot
# self.warmstate ... rpython.jit.metainterp.warmspot
# self.handle_jitexc_from_bh rpython.jit.metainterp.warmspot
# self.no_loop_header ... rpython.jit.metainterp.warmspot
# self.portal_finishtoken... rpython.jit.metainterp.pyjitpl
# self.propagate_exc_descr.. rpython.jit.metainterp.pyjitpl
# self.index ... rpython.jit.codewriter.call
# self.mainjitcode ... rpython.jit.codewriter.call
# These attributes are read by the backend in CALL_ASSEMBLER:
# self.assembler_helper_adr
# self.index_of_virtualizable
# self.vable_token_descr
# self.portal_calldescr
# warmspot sets extra attributes starting with '_' for its own use.
def _freeze_(self):
return True
| 48.111111
| 77
| 0.677829
|
class JitDriverStaticData(object):
def _freeze_(self):
return True
| true
| true
|
1c4897c09868496a98ec5952939a8321eede329e
| 6,342
|
py
|
Python
|
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 1
|
2021-06-02T08:01:35.000Z
|
2021-06-02T08:01:35.000Z
|
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | 1
|
2020-03-06T05:57:16.000Z
|
2020-03-06T05:57:16.000Z
|
sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/common_data_service_for_apps_linked_service_py3.py
|
tzhanl/azure-sdk-for-python
|
18cd03f4ab8fd76cc0498f03e80fbc99f217c96e
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .linked_service_py3 import LinkedService
class CommonDataServiceForAppsLinkedService(LinkedService):
"""Common Data Service for Apps linked service.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param connect_via: The integration runtime reference.
:type connect_via:
~azure.mgmt.datafactory.models.IntegrationRuntimeReference
:param description: Linked service description.
:type description: str
:param parameters: Parameters for linked service.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
linked service.
:type annotations: list[object]
:param type: Required. Constant filled by server.
:type type: str
:param deployment_type: Required. The deployment type of the Common Data
Service for Apps instance. 'Online' for Common Data Service for Apps
Online and 'OnPremisesWithIfd' for Common Data Service for Apps
on-premises with Ifd. Type: string (or Expression with resultType string).
Possible values include: 'Online', 'OnPremisesWithIfd'
:type deployment_type: str or
~azure.mgmt.datafactory.models.DynamicsDeploymentType
:param host_name: The host name of the on-premises Common Data Service for
Apps server. The property is required for on-prem and not allowed for
online. Type: string (or Expression with resultType string).
:type host_name: object
:param port: The port of on-premises Common Data Service for Apps server.
The property is required for on-prem and not allowed for online. Default
is 443. Type: integer (or Expression with resultType integer), minimum: 0.
:type port: object
:param service_uri: The URL to the Microsoft Common Data Service for Apps
server. The property is required for on-line and not allowed for on-prem.
Type: string (or Expression with resultType string).
:type service_uri: object
:param organization_name: The organization name of the Common Data Service
for Apps instance. The property is required for on-prem and required for
online when there are more than one Common Data Service for Apps instances
associated with the user. Type: string (or Expression with resultType
string).
:type organization_name: object
:param authentication_type: Required. The authentication type to connect
to Common Data Service for Apps server. 'Office365' for online scenario,
'Ifd' for on-premises with Ifd scenario. Type: string (or Expression with
resultType string). Possible values include: 'Office365', 'Ifd'
:type authentication_type: str or
~azure.mgmt.datafactory.models.DynamicsAuthenticationType
:param username: Required. User name to access the Common Data Service for
Apps instance. Type: string (or Expression with resultType string).
:type username: object
:param password: Password to access the Common Data Service for Apps
instance.
:type password: ~azure.mgmt.datafactory.models.SecretBase
:param encrypted_credential: The encrypted credential used for
authentication. Credentials are encrypted using the integration runtime
credential manager. Type: string (or Expression with resultType string).
:type encrypted_credential: object
"""
_validation = {
'type': {'required': True},
'deployment_type': {'required': True},
'authentication_type': {'required': True},
'username': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None:
super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.deployment_type = deployment_type
self.host_name = host_name
self.port = port
self.service_uri = service_uri
self.organization_name = organization_name
self.authentication_type = authentication_type
self.username = username
self.password = password
self.encrypted_credential = encrypted_credential
self.type = 'CommonDataServiceForApps'
| 54.672414
| 307
| 0.699306
|
from .linked_service_py3 import LinkedService
class CommonDataServiceForAppsLinkedService(LinkedService):
_validation = {
'type': {'required': True},
'deployment_type': {'required': True},
'authentication_type': {'required': True},
'username': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'},
'description': {'key': 'description', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'type': {'key': 'type', 'type': 'str'},
'deployment_type': {'key': 'typeProperties.deploymentType', 'type': 'str'},
'host_name': {'key': 'typeProperties.hostName', 'type': 'object'},
'port': {'key': 'typeProperties.port', 'type': 'object'},
'service_uri': {'key': 'typeProperties.serviceUri', 'type': 'object'},
'organization_name': {'key': 'typeProperties.organizationName', 'type': 'object'},
'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'},
'username': {'key': 'typeProperties.username', 'type': 'object'},
'password': {'key': 'typeProperties.password', 'type': 'SecretBase'},
'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'},
}
def __init__(self, *, deployment_type, authentication_type, username, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, host_name=None, port=None, service_uri=None, organization_name=None, password=None, encrypted_credential=None, **kwargs) -> None:
super(CommonDataServiceForAppsLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs)
self.deployment_type = deployment_type
self.host_name = host_name
self.port = port
self.service_uri = service_uri
self.organization_name = organization_name
self.authentication_type = authentication_type
self.username = username
self.password = password
self.encrypted_credential = encrypted_credential
self.type = 'CommonDataServiceForApps'
| true
| true
|
1c489884eecaeb3cdc0d1b3241dbc37a737987c1
| 619
|
py
|
Python
|
sqrl/admin.py
|
JamesonNetworks/django-sqrl
|
b9a3e188267b393c8790e255d7ba7c0ab5b150af
|
[
"MIT"
] | 15
|
2015-05-26T13:26:33.000Z
|
2020-11-24T22:38:05.000Z
|
sqrl/admin.py
|
JamesonNetworks/django-sqrl
|
b9a3e188267b393c8790e255d7ba7c0ab5b150af
|
[
"MIT"
] | 8
|
2015-03-18T05:18:58.000Z
|
2021-03-05T11:27:58.000Z
|
sqrl/admin.py
|
JamesonNetworks/django-sqrl
|
b9a3e188267b393c8790e255d7ba7c0ab5b150af
|
[
"MIT"
] | 6
|
2018-03-05T21:11:54.000Z
|
2021-03-05T11:28:08.000Z
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
from django.contrib import admin
from .models import SQRLIdentity, SQRLNut
class SQRLIdentityAdmin(admin.ModelAdmin):
model = SQRLIdentity
list_display = (
'user',
'is_enabled',
'is_only_sqrl',
)
raw_id_fields = (
'user',
)
class SQRLNutAdmin(admin.ModelAdmin):
model = SQRLNut
list_display = (
'nonce',
'is_transaction_complete',
'ip_address',
)
admin.site.register(SQRLNut, SQRLNutAdmin)
admin.site.register(SQRLIdentity, SQRLIdentityAdmin)
| 19.34375
| 55
| 0.659128
|
from __future__ import print_function, unicode_literals
from django.contrib import admin
from .models import SQRLIdentity, SQRLNut
class SQRLIdentityAdmin(admin.ModelAdmin):
model = SQRLIdentity
list_display = (
'user',
'is_enabled',
'is_only_sqrl',
)
raw_id_fields = (
'user',
)
class SQRLNutAdmin(admin.ModelAdmin):
model = SQRLNut
list_display = (
'nonce',
'is_transaction_complete',
'ip_address',
)
admin.site.register(SQRLNut, SQRLNutAdmin)
admin.site.register(SQRLIdentity, SQRLIdentityAdmin)
| true
| true
|
1c489893d6296039de8c94c076948082c5a5f64d
| 2,041
|
py
|
Python
|
vtools/datastore/station_info.py
|
CADWRDeltaModeling/vtools3
|
226bd2920c73f36dfc2f4eaedda8adccdfd1dfc3
|
[
"Apache-2.0"
] | 5
|
2019-11-01T00:52:38.000Z
|
2021-08-21T09:23:40.000Z
|
vtools/datastore/station_info.py
|
CADWRDeltaModeling/vtools3
|
226bd2920c73f36dfc2f4eaedda8adccdfd1dfc3
|
[
"Apache-2.0"
] | 24
|
2019-11-08T17:16:51.000Z
|
2022-03-15T22:55:23.000Z
|
vtools/datastore/station_info.py
|
CADWRDeltaModeling/vtools3
|
226bd2920c73f36dfc2f4eaedda8adccdfd1dfc3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import pandas as pd
import argparse
from vtools.datastore import station_config
def station_info(search):
station_lookup = station_config.config_file("station_dbase")
if search == "config":
print(station_config.configuration())
return
#vlookup = station_config.config_file("variable_mappings")
slookup = pd.read_csv(station_lookup,sep=",",comment="#",header=0,usecols=["id","agency",
"agency_id","name",
"x","y"]).squeeze()
slookup["id"] = slookup.id.str.lower()
lsearch = search.lower()
match_id = slookup["id"].str.lower().str.contains(lsearch)
match_name = slookup.name.str.lower().str.contains(lsearch)
match_agency_id = slookup.agency_id.str.lower().str.contains(lsearch)
match_agency = slookup.agency.str.lower().str.contains(lsearch)
matches = match_id | match_name | match_agency_id | match_agency
print("Matches:")
mlook =slookup.loc[matches,["id","agency","agency_id","name","x","y"]].sort_values(axis=0,by='id').set_index("id")
if mlook.shape[0] == 0:
print("None")
else:
print(mlook.to_string())
return mlook
def create_arg_parser():
parser = argparse.ArgumentParser("Lookup station metadata by partial string match on id or name")
parser.add_argument('--config',default=False,action ="store_true",help="Print configuration and location of lookup files")
parser.add_argument('searchphrase',nargs='?',default="",help = 'Search phrase which can be blank if using --config')
return parser
def main():
parser = create_arg_parser()
args = parser.parse_args()
searchphrase = args.searchphrase
if args.config:
searchphrase = "config"
if searchphrase is None and not args.config:
raise ValueError("searchphrase required")
station_info(searchphrase)
| 40.019608
| 126
| 0.636453
|
import sys
import pandas as pd
import argparse
from vtools.datastore import station_config
def station_info(search):
station_lookup = station_config.config_file("station_dbase")
if search == "config":
print(station_config.configuration())
return
slookup = pd.read_csv(station_lookup,sep=",",comment="#",header=0,usecols=["id","agency",
"agency_id","name",
"x","y"]).squeeze()
slookup["id"] = slookup.id.str.lower()
lsearch = search.lower()
match_id = slookup["id"].str.lower().str.contains(lsearch)
match_name = slookup.name.str.lower().str.contains(lsearch)
match_agency_id = slookup.agency_id.str.lower().str.contains(lsearch)
match_agency = slookup.agency.str.lower().str.contains(lsearch)
matches = match_id | match_name | match_agency_id | match_agency
print("Matches:")
mlook =slookup.loc[matches,["id","agency","agency_id","name","x","y"]].sort_values(axis=0,by='id').set_index("id")
if mlook.shape[0] == 0:
print("None")
else:
print(mlook.to_string())
return mlook
def create_arg_parser():
parser = argparse.ArgumentParser("Lookup station metadata by partial string match on id or name")
parser.add_argument('--config',default=False,action ="store_true",help="Print configuration and location of lookup files")
parser.add_argument('searchphrase',nargs='?',default="",help = 'Search phrase which can be blank if using --config')
return parser
def main():
parser = create_arg_parser()
args = parser.parse_args()
searchphrase = args.searchphrase
if args.config:
searchphrase = "config"
if searchphrase is None and not args.config:
raise ValueError("searchphrase required")
station_info(searchphrase)
| true
| true
|
1c4898cb2432e1e81866bb361fc93755e430fd8a
| 970
|
py
|
Python
|
twilio/rest/preview/studio/__init__.py
|
scotta/twilio-python
|
93cf463f914f55c4c4bd1c259b834953dd81609d
|
[
"MIT"
] | 30
|
2018-06-12T12:00:53.000Z
|
2021-05-02T01:27:16.000Z
|
twilio/rest/preview/studio/__init__.py
|
kkrlee/twilio-python
|
260de9df17c5a1440d9c037a971e2182da7f4ced
|
[
"MIT"
] | 10
|
2020-06-06T01:10:07.000Z
|
2022-03-12T00:12:22.000Z
|
twilio/rest/preview/studio/__init__.py
|
kkrlee/twilio-python
|
260de9df17c5a1440d9c037a971e2182da7f4ced
|
[
"MIT"
] | 4
|
2018-06-12T14:14:20.000Z
|
2018-06-19T16:01:49.000Z
|
# coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base.version import Version
from twilio.rest.preview.studio.flow import FlowList
class Studio(Version):
def __init__(self, domain):
"""
Initialize the Studio version of Preview
:returns: Studio version of Preview
:rtype: twilio.rest.preview.studio.Studio.Studio
"""
super(Studio, self).__init__(domain)
self.version = 'Studio'
self._flows = None
@property
def flows(self):
"""
:rtype: twilio.rest.preview.studio.flow.FlowList
"""
if self._flows is None:
self._flows = FlowList(self)
return self._flows
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Preview.Studio>'
| 22.55814
| 56
| 0.574227
|
from twilio.base.version import Version
from twilio.rest.preview.studio.flow import FlowList
class Studio(Version):
def __init__(self, domain):
super(Studio, self).__init__(domain)
self.version = 'Studio'
self._flows = None
@property
def flows(self):
if self._flows is None:
self._flows = FlowList(self)
return self._flows
def __repr__(self):
return '<Twilio.Preview.Studio>'
| true
| true
|
1c489ade360f7d303b3c54cdbfaad47137ac097e
| 1,879
|
py
|
Python
|
auth/views.py
|
raducc/micro-poc
|
a96f5107643910bed890470d85acf97a458aa3b3
|
[
"MIT"
] | null | null | null |
auth/views.py
|
raducc/micro-poc
|
a96f5107643910bed890470d85acf97a458aa3b3
|
[
"MIT"
] | null | null | null |
auth/views.py
|
raducc/micro-poc
|
a96f5107643910bed890470d85acf97a458aa3b3
|
[
"MIT"
] | null | null | null |
import datetime
import jwt
from flask import Blueprint, request, jsonify, make_response, current_app
from werkzeug.security import generate_password_hash, check_password_hash
from .models import db, Users
auth_app = Blueprint("auth", __name__)
@auth_app.route("/signup", methods=["POST"])
def signup():
data = request.get_json()
try:
username = data["username"]
password = data["password"]
except (AttributeError, KeyError):
return "error"
if not (username and password):
return "error"
hashed_password = generate_password_hash(password, method="sha256")
new_user = Users(name=username, password=hashed_password)
db.session.add(new_user)
db.session.commit()
return jsonify({"message": "registered successfully"})
@auth_app.route("/signup", methods=["GET"])
def signup_info():
return 'Json content with {"username": "jondoe", "password": "pass"} required'
@auth_app.route("/login", methods=["POST"])
def login():
auth = request.authorization
if not (auth and auth.username and auth.password):
return make_response(
"could not verify",
401,
{"WWW.Authentication": 'Basic realm: "login required"'},
)
user = Users.query.filter_by(name=auth.username).first()
if user and check_password_hash(user.password, auth.password):
token = jwt.encode(
{
"id": user.id,
"exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=30),
},
current_app.config["SECRET_KEY"],
)
return jsonify({"header": "x-access-tokens", "token": token})
return make_response(
"could not verify", 401, {"WWW.Authentication": 'Basic realm: "login required"'}
)
@auth_app.route("/login", methods=["GET"])
def login_info():
return "Basic Auth"
| 28.044776
| 88
| 0.641299
|
import datetime
import jwt
from flask import Blueprint, request, jsonify, make_response, current_app
from werkzeug.security import generate_password_hash, check_password_hash
from .models import db, Users
auth_app = Blueprint("auth", __name__)
@auth_app.route("/signup", methods=["POST"])
def signup():
data = request.get_json()
try:
username = data["username"]
password = data["password"]
except (AttributeError, KeyError):
return "error"
if not (username and password):
return "error"
hashed_password = generate_password_hash(password, method="sha256")
new_user = Users(name=username, password=hashed_password)
db.session.add(new_user)
db.session.commit()
return jsonify({"message": "registered successfully"})
@auth_app.route("/signup", methods=["GET"])
def signup_info():
return 'Json content with {"username": "jondoe", "password": "pass"} required'
@auth_app.route("/login", methods=["POST"])
def login():
auth = request.authorization
if not (auth and auth.username and auth.password):
return make_response(
"could not verify",
401,
{"WWW.Authentication": 'Basic realm: "login required"'},
)
user = Users.query.filter_by(name=auth.username).first()
if user and check_password_hash(user.password, auth.password):
token = jwt.encode(
{
"id": user.id,
"exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=30),
},
current_app.config["SECRET_KEY"],
)
return jsonify({"header": "x-access-tokens", "token": token})
return make_response(
"could not verify", 401, {"WWW.Authentication": 'Basic realm: "login required"'}
)
@auth_app.route("/login", methods=["GET"])
def login_info():
return "Basic Auth"
| true
| true
|
1c489c82219bd48494ee350e6a9089e747467533
| 372
|
py
|
Python
|
projeto.py
|
laurourbano/Projetos_Python
|
50e7f4a7ff34158385ea7b635bac95ec8a0363a1
|
[
"MIT"
] | 1
|
2021-12-28T02:51:34.000Z
|
2021-12-28T02:51:34.000Z
|
projeto.py
|
laurourbano/Projetos_Python
|
50e7f4a7ff34158385ea7b635bac95ec8a0363a1
|
[
"MIT"
] | null | null | null |
projeto.py
|
laurourbano/Projetos_Python
|
50e7f4a7ff34158385ea7b635bac95ec8a0363a1
|
[
"MIT"
] | null | null | null |
import datetime
print("++++++++++++++++++")
print("ESCALA DE TRABALHO")
print("++++++++++++++++++")
def informar_cnpj():
cnpj = input("Informe o CNPJ da Empresa: ")
print("O CNPJ digitado foi: ", cnpj)
def informar_cpf():
cnpj = input("Informe o CNPJ da Empresa: ")
print("O CNPJ digitado foi: ", cnpj)
def informar_horário():
pass
informar_cnpj()
| 20.666667
| 47
| 0.596774
|
import datetime
print("++++++++++++++++++")
print("ESCALA DE TRABALHO")
print("++++++++++++++++++")
def informar_cnpj():
cnpj = input("Informe o CNPJ da Empresa: ")
print("O CNPJ digitado foi: ", cnpj)
def informar_cpf():
cnpj = input("Informe o CNPJ da Empresa: ")
print("O CNPJ digitado foi: ", cnpj)
def informar_horário():
pass
informar_cnpj()
| true
| true
|
1c489ce456622e5f90885b80f7adb80f022c877e
| 4,520
|
py
|
Python
|
redash/utils/parameterized_query.py
|
howyi/redash
|
b68051d3c50df9a727df4c6ca5c6f70750d387d3
|
[
"BSD-2-Clause"
] | 1
|
2019-03-24T03:38:32.000Z
|
2019-03-24T03:38:32.000Z
|
redash/utils/parameterized_query.py
|
howyi/redash
|
b68051d3c50df9a727df4c6ca5c6f70750d387d3
|
[
"BSD-2-Clause"
] | null | null | null |
redash/utils/parameterized_query.py
|
howyi/redash
|
b68051d3c50df9a727df4c6ca5c6f70750d387d3
|
[
"BSD-2-Clause"
] | 1
|
2019-03-20T09:22:43.000Z
|
2019-03-20T09:22:43.000Z
|
import pystache
from functools import partial
from flask_login import current_user
from numbers import Number
from redash.utils import mustache_render, json_loads
from redash.permissions import require_access, view_only
from funcy import distinct
from dateutil.parser import parse
def _pluck_name_and_value(default_column, row):
row = {k.lower(): v for k, v in row.items()}
name_column = "name" if "name" in row.keys() else default_column.lower()
value_column = "value" if "value" in row.keys() else default_column.lower()
return {"name": row[name_column], "value": row[value_column]}
def _load_result(query_id):
from redash.authentication.org_resolving import current_org
from redash import models
query = models.Query.get_by_id_and_org(query_id, current_org)
require_access(query.data_source.groups, current_user, view_only)
query_result = models.QueryResult.get_by_id_and_org(query.latest_query_data_id, current_org)
return json_loads(query_result.data)
def dropdown_values(query_id):
data = _load_result(query_id)
first_column = data["columns"][0]["name"]
pluck = partial(_pluck_name_and_value, first_column)
return map(pluck, data["rows"])
def _collect_key_names(nodes):
keys = []
for node in nodes._parse_tree:
if isinstance(node, pystache.parser._EscapeNode):
keys.append(node.key)
elif isinstance(node, pystache.parser._SectionNode):
keys.append(node.key)
keys.extend(_collect_key_names(node.parsed))
return distinct(keys)
def _collect_query_parameters(query):
nodes = pystache.parse(query)
keys = _collect_key_names(nodes)
return keys
def _parameter_names(parameter_values):
names = []
for key, value in parameter_values.iteritems():
if isinstance(value, dict):
for inner_key in value.keys():
names.append(u'{}.{}'.format(key, inner_key))
else:
names.append(key)
return names
def _is_date(string):
try:
parse(string)
return True
except ValueError:
return False
def _is_date_range(obj):
try:
return _is_date(obj["start"]) and _is_date(obj["end"])
except (KeyError, TypeError):
return False
class ParameterizedQuery(object):
def __init__(self, template, schema=None):
self.schema = schema or []
self.template = template
self.query = template
self.parameters = {}
def apply(self, parameters):
invalid_parameter_names = [key for (key, value) in parameters.iteritems() if not self._valid(key, value)]
if invalid_parameter_names:
raise InvalidParameterError(invalid_parameter_names)
else:
self.parameters.update(parameters)
self.query = mustache_render(self.template, self.parameters)
return self
def _valid(self, name, value):
if not self.schema:
return True
definition = next((definition for definition in self.schema if definition["name"] == name), None)
if not definition:
return False
validators = {
"text": lambda value: isinstance(value, basestring),
"number": lambda value: isinstance(value, Number),
"enum": lambda value: value in definition["enumOptions"],
"query": lambda value: value in [v["value"] for v in dropdown_values(definition["queryId"])],
"date": _is_date,
"datetime-local": _is_date,
"datetime-with-seconds": _is_date,
"date-range": _is_date_range,
"datetime-range": _is_date_range,
"datetime-range-with-seconds": _is_date_range,
}
validate = validators.get(definition["type"], lambda x: False)
return validate(value)
@property
def is_safe(self):
text_parameters = filter(lambda p: p["type"] == "text", self.schema)
return not any(text_parameters)
@property
def missing_params(self):
query_parameters = set(_collect_query_parameters(self.template))
return set(query_parameters) - set(_parameter_names(self.parameters))
@property
def text(self):
return self.query
class InvalidParameterError(Exception):
def __init__(self, parameters):
message = u"The following parameter values are incompatible with their definitions: {}".format(", ".join(parameters))
super(InvalidParameterError, self).__init__(message)
| 31.388889
| 125
| 0.669469
|
import pystache
from functools import partial
from flask_login import current_user
from numbers import Number
from redash.utils import mustache_render, json_loads
from redash.permissions import require_access, view_only
from funcy import distinct
from dateutil.parser import parse
def _pluck_name_and_value(default_column, row):
row = {k.lower(): v for k, v in row.items()}
name_column = "name" if "name" in row.keys() else default_column.lower()
value_column = "value" if "value" in row.keys() else default_column.lower()
return {"name": row[name_column], "value": row[value_column]}
def _load_result(query_id):
from redash.authentication.org_resolving import current_org
from redash import models
query = models.Query.get_by_id_and_org(query_id, current_org)
require_access(query.data_source.groups, current_user, view_only)
query_result = models.QueryResult.get_by_id_and_org(query.latest_query_data_id, current_org)
return json_loads(query_result.data)
def dropdown_values(query_id):
data = _load_result(query_id)
first_column = data["columns"][0]["name"]
pluck = partial(_pluck_name_and_value, first_column)
return map(pluck, data["rows"])
def _collect_key_names(nodes):
keys = []
for node in nodes._parse_tree:
if isinstance(node, pystache.parser._EscapeNode):
keys.append(node.key)
elif isinstance(node, pystache.parser._SectionNode):
keys.append(node.key)
keys.extend(_collect_key_names(node.parsed))
return distinct(keys)
def _collect_query_parameters(query):
nodes = pystache.parse(query)
keys = _collect_key_names(nodes)
return keys
def _parameter_names(parameter_values):
names = []
for key, value in parameter_values.iteritems():
if isinstance(value, dict):
for inner_key in value.keys():
names.append(u'{}.{}'.format(key, inner_key))
else:
names.append(key)
return names
def _is_date(string):
try:
parse(string)
return True
except ValueError:
return False
def _is_date_range(obj):
try:
return _is_date(obj["start"]) and _is_date(obj["end"])
except (KeyError, TypeError):
return False
class ParameterizedQuery(object):
def __init__(self, template, schema=None):
self.schema = schema or []
self.template = template
self.query = template
self.parameters = {}
def apply(self, parameters):
invalid_parameter_names = [key for (key, value) in parameters.iteritems() if not self._valid(key, value)]
if invalid_parameter_names:
raise InvalidParameterError(invalid_parameter_names)
else:
self.parameters.update(parameters)
self.query = mustache_render(self.template, self.parameters)
return self
def _valid(self, name, value):
if not self.schema:
return True
definition = next((definition for definition in self.schema if definition["name"] == name), None)
if not definition:
return False
validators = {
"text": lambda value: isinstance(value, basestring),
"number": lambda value: isinstance(value, Number),
"enum": lambda value: value in definition["enumOptions"],
"query": lambda value: value in [v["value"] for v in dropdown_values(definition["queryId"])],
"date": _is_date,
"datetime-local": _is_date,
"datetime-with-seconds": _is_date,
"date-range": _is_date_range,
"datetime-range": _is_date_range,
"datetime-range-with-seconds": _is_date_range,
}
validate = validators.get(definition["type"], lambda x: False)
return validate(value)
@property
def is_safe(self):
text_parameters = filter(lambda p: p["type"] == "text", self.schema)
return not any(text_parameters)
@property
def missing_params(self):
query_parameters = set(_collect_query_parameters(self.template))
return set(query_parameters) - set(_parameter_names(self.parameters))
@property
def text(self):
return self.query
class InvalidParameterError(Exception):
def __init__(self, parameters):
message = u"The following parameter values are incompatible with their definitions: {}".format(", ".join(parameters))
super(InvalidParameterError, self).__init__(message)
| true
| true
|
1c489d5952b9993695d02695326626bda781ef10
| 1,604
|
py
|
Python
|
h2o-py/tests/testdir_algos/gbm/pyunit_offset_tweedieGBM.py
|
ChristosChristofidis/h2o-3
|
2a926c0950a98eff5a4c06aeaf0373e17176ecd8
|
[
"Apache-2.0"
] | null | null | null |
h2o-py/tests/testdir_algos/gbm/pyunit_offset_tweedieGBM.py
|
ChristosChristofidis/h2o-3
|
2a926c0950a98eff5a4c06aeaf0373e17176ecd8
|
[
"Apache-2.0"
] | null | null | null |
h2o-py/tests/testdir_algos/gbm/pyunit_offset_tweedieGBM.py
|
ChristosChristofidis/h2o-3
|
2a926c0950a98eff5a4c06aeaf0373e17176ecd8
|
[
"Apache-2.0"
] | 1
|
2020-12-18T19:20:02.000Z
|
2020-12-18T19:20:02.000Z
|
import sys
sys.path.insert(1, "../../../")
import h2o
def offset_tweedie(ip,port):
# Connect to a pre-existing cluster
h2o.init(ip,port)
insurance = h2o.import_frame(h2o.locate("smalldata/glm_test/insurance.csv"))
insurance["offset"] = insurance["Holders"].log()
gbm = h2o.gbm(x=insurance[0:3], y=insurance["Claims"], distribution="tweedie", ntrees=600, max_depth=1, min_rows=1,
learn_rate=.1, offset_column="offset", training_frame=insurance)
predictions = gbm.predict(insurance)
# Comparison result generated from harrysouthworth's gbm:
# fit2 = gbm(Claims ~ District + Group + Age+ offset(log(Holders)) , interaction.depth = 1,n.minobsinnode = 1,shrinkage = .1,bag.fraction = 1,train.fraction = 1,
# data = Insurance, distribution ="tweedie", n.trees = 600)
# pr = predict(fit2, Insurance)
# pr = exp(pr+log(Insurance$Holders))
assert abs(-1.869702 - gbm._model_json['output']['init_f']) < 1e-5, "expected init_f to be {0}, but got {1}".\
format(-1.869702, gbm._model_json['output']['init_f'])
assert abs(49.21591 - predictions.mean()) < 1e-4, "expected prediction mean to be {0}, but got {1}". \
format(49.21591, predictions.mean())
assert abs(1.0258 - predictions.min()) < 1e-4, "expected prediction min to be {0}, but got {1}". \
format(1.0258, predictions.min())
assert abs(392.4651 - predictions.max()) < 1e-2, "expected prediction max to be {0}, but got {1}". \
format(392.4651, predictions.max())
if __name__ == "__main__":
h2o.run_test(sys.argv, offset_tweedie)
| 47.176471
| 165
| 0.65212
|
import sys
sys.path.insert(1, "../../../")
import h2o
def offset_tweedie(ip,port):
h2o.init(ip,port)
insurance = h2o.import_frame(h2o.locate("smalldata/glm_test/insurance.csv"))
insurance["offset"] = insurance["Holders"].log()
gbm = h2o.gbm(x=insurance[0:3], y=insurance["Claims"], distribution="tweedie", ntrees=600, max_depth=1, min_rows=1,
learn_rate=.1, offset_column="offset", training_frame=insurance)
predictions = gbm.predict(insurance)
# fit2 = gbm(Claims ~ District + Group + Age+ offset(log(Holders)) , interaction.depth = 1,n.minobsinnode = 1,shrinkage = .1,bag.fraction = 1,train.fraction = 1,
# data = Insurance, distribution ="tweedie", n.trees = 600)
# pr = predict(fit2, Insurance)
# pr = exp(pr+log(Insurance$Holders))
assert abs(-1.869702 - gbm._model_json['output']['init_f']) < 1e-5, "expected init_f to be {0}, but got {1}".\
format(-1.869702, gbm._model_json['output']['init_f'])
assert abs(49.21591 - predictions.mean()) < 1e-4, "expected prediction mean to be {0}, but got {1}". \
format(49.21591, predictions.mean())
assert abs(1.0258 - predictions.min()) < 1e-4, "expected prediction min to be {0}, but got {1}". \
format(1.0258, predictions.min())
assert abs(392.4651 - predictions.max()) < 1e-2, "expected prediction max to be {0}, but got {1}". \
format(392.4651, predictions.max())
if __name__ == "__main__":
h2o.run_test(sys.argv, offset_tweedie)
| true
| true
|
1c489ee1ca98570d16d82d34d9ab7209e83f6bce
| 182
|
py
|
Python
|
ldscriptures/cache.py
|
gabbarreiro/ldscriptures
|
f1de3acd6ae41886f1960a92aa26354621410916
|
[
"Unlicense"
] | 7
|
2018-07-07T17:05:21.000Z
|
2020-09-06T20:27:47.000Z
|
ldscriptures/cache.py
|
TGSec/ldscriptures
|
f1de3acd6ae41886f1960a92aa26354621410916
|
[
"Unlicense"
] | null | null | null |
ldscriptures/cache.py
|
TGSec/ldscriptures
|
f1de3acd6ae41886f1960a92aa26354621410916
|
[
"Unlicense"
] | 1
|
2021-05-06T05:38:23.000Z
|
2021-05-06T05:38:23.000Z
|
'''
import cachetools
from . import lang
enabled = True
maxsize = 1000
scriptures = {}
for each in lang.available:
scriptures[each] = cachetools.LRUCache(maxsize=maxsize)
'''
| 14
| 59
| 0.714286
| true
| true
|
|
1c489f41d4e4c12cd6f6407813ef2eac519893b1
| 276
|
py
|
Python
|
mlpipeline/__init__.py
|
ahmed-shariff/mlpipeline
|
03a07da44eab14171305e41e6d162def6c32c6ac
|
[
"MIT"
] | 5
|
2019-09-04T06:37:33.000Z
|
2021-02-13T14:09:37.000Z
|
mlpipeline/__init__.py
|
ahmed-shariff/mlpipeline
|
03a07da44eab14171305e41e6d162def6c32c6ac
|
[
"MIT"
] | 1
|
2020-02-21T20:06:07.000Z
|
2020-02-21T20:06:17.000Z
|
mlpipeline/__init__.py
|
ahmed-shariff/mlpipeline
|
03a07da44eab14171305e41e6d162def6c32c6ac
|
[
"MIT"
] | null | null | null |
__version__ = "2.0.a.7.post.1"
from mlpipeline.utils._utils import (Versions,
log,
MetricContainer,
iterator)
__all__ = [Versions, log, MetricContainer, iterator]
| 30.666667
| 53
| 0.456522
|
__version__ = "2.0.a.7.post.1"
from mlpipeline.utils._utils import (Versions,
log,
MetricContainer,
iterator)
__all__ = [Versions, log, MetricContainer, iterator]
| true
| true
|
1c48a02d0d934feb80680ef6f5e9dc4fa0948c10
| 1,636
|
py
|
Python
|
tests/test_util.py
|
prafullat/fava
|
27b1cc7368922696841b130b6efb419db691e95f
|
[
"MIT"
] | null | null | null |
tests/test_util.py
|
prafullat/fava
|
27b1cc7368922696841b130b6efb419db691e95f
|
[
"MIT"
] | null | null | null |
tests/test_util.py
|
prafullat/fava
|
27b1cc7368922696841b130b6efb419db691e95f
|
[
"MIT"
] | null | null | null |
# pylint: disable=missing-docstring
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
from fava.util import simple_wsgi, slugify, pairwise, listify, send_file_inline
from .conftest import data_file
def test_listify():
@listify
def fun():
for i in [1, 2, 3]:
yield i
assert fun() == [1, 2, 3]
def test_pairwise():
assert list(pairwise([1, 2, 3])) == [(1, 2), (2, 3)]
assert list(pairwise([])) == []
def test_simple_wsgi():
client = Client(simple_wsgi, BaseResponse)
resp = client.get("/any_path")
assert resp.status_code == 200
assert resp.data == b""
def test_slugify():
assert slugify("Example Beancount File") == "example-beancount-file"
assert slugify(" Example Beancount File ") == "example-beancount-file"
assert slugify("test") == "test"
assert slugify("烫烫烫") == "烫烫烫"
assert slugify("nonun烫icode 烫烫") == "nonun烫icode-烫烫"
assert slugify("%✓") == ""
assert slugify("söße") == "söße"
assert slugify("ASDF") == "asdf"
assert slugify("ASDF test test") == "asdf-test-test"
def test_send_file_inline(app):
with app.test_request_context():
resp = send_file_inline(data_file("example-balances.csv"))
assert (
resp.headers["Content-Disposition"]
== "inline; filename*=UTF-8''example-balances.csv"
)
resp = send_file_inline(data_file("example-utf8-🦁.txt"))
# pylint: disable=line-too-long
assert (
resp.headers["Content-Disposition"]
== "inline; filename*=UTF-8''example-utf8-%F0%9F%A6%81.txt"
)
| 28.701754
| 79
| 0.625306
|
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
from fava.util import simple_wsgi, slugify, pairwise, listify, send_file_inline
from .conftest import data_file
def test_listify():
@listify
def fun():
for i in [1, 2, 3]:
yield i
assert fun() == [1, 2, 3]
def test_pairwise():
assert list(pairwise([1, 2, 3])) == [(1, 2), (2, 3)]
assert list(pairwise([])) == []
def test_simple_wsgi():
client = Client(simple_wsgi, BaseResponse)
resp = client.get("/any_path")
assert resp.status_code == 200
assert resp.data == b""
def test_slugify():
assert slugify("Example Beancount File") == "example-beancount-file"
assert slugify(" Example Beancount File ") == "example-beancount-file"
assert slugify("test") == "test"
assert slugify("烫烫烫") == "烫烫烫"
assert slugify("nonun烫icode 烫烫") == "nonun烫icode-烫烫"
assert slugify("%✓") == ""
assert slugify("söße") == "söße"
assert slugify("ASDF") == "asdf"
assert slugify("ASDF test test") == "asdf-test-test"
def test_send_file_inline(app):
with app.test_request_context():
resp = send_file_inline(data_file("example-balances.csv"))
assert (
resp.headers["Content-Disposition"]
== "inline; filename*=UTF-8''example-balances.csv"
)
resp = send_file_inline(data_file("example-utf8-🦁.txt"))
assert (
resp.headers["Content-Disposition"]
== "inline; filename*=UTF-8''example-utf8-%F0%9F%A6%81.txt"
)
| true
| true
|
1c48a04f443135c1a234bcf0ad9484b86d1bdd54
| 82
|
py
|
Python
|
src/westpa/westext/adaptvoronoi/__init__.py
|
burntyellow/adelman_ci
|
cca251a51b34843faed0275cce01d7a307829993
|
[
"MIT"
] | 140
|
2015-01-07T23:30:36.000Z
|
2022-03-28T17:15:30.000Z
|
src/westext/adaptvoronoi/__init__.py
|
burntyellow/westpa
|
9dc62478fcef0001b9c038cd56a40b6be1b9d64a
|
[
"MIT"
] | 157
|
2015-01-03T03:38:36.000Z
|
2022-03-31T14:12:16.000Z
|
src/westext/adaptvoronoi/__init__.py
|
burntyellow/westpa
|
9dc62478fcef0001b9c038cd56a40b6be1b9d64a
|
[
"MIT"
] | 56
|
2015-01-02T21:21:40.000Z
|
2022-03-03T16:27:54.000Z
|
from . import adaptVor_driver
from .adaptVor_driver import AdaptiveVoronoiDriver
| 20.5
| 50
| 0.865854
|
from . import adaptVor_driver
from .adaptVor_driver import AdaptiveVoronoiDriver
| true
| true
|
1c48a2353bbf942e33e7ee6d51eedceae67ac6aa
| 850
|
py
|
Python
|
alerta/plugins/remote_ip.py
|
sepich/alerta
|
64d4e9f7bca6e41bda2d0553c846fbe68b494ab2
|
[
"Apache-2.0"
] | 1
|
2019-08-15T02:37:21.000Z
|
2019-08-15T02:37:21.000Z
|
alerta/plugins/remote_ip.py
|
sepich/alerta
|
64d4e9f7bca6e41bda2d0553c846fbe68b494ab2
|
[
"Apache-2.0"
] | null | null | null |
alerta/plugins/remote_ip.py
|
sepich/alerta
|
64d4e9f7bca6e41bda2d0553c846fbe68b494ab2
|
[
"Apache-2.0"
] | 1
|
2021-03-11T18:19:22.000Z
|
2021-03-11T18:19:22.000Z
|
import logging
from flask import request
from alerta.plugins import PluginBase
LOG = logging.getLogger('alerta.plugins')
class RemoteIpAddr(PluginBase):
"""
Add originating IP address of HTTP client as an alert attribute. This information
can be used for debugging, access control, or generating geolocation data.
"""
def pre_receive(self, alert, **kwargs):
if request.headers.getlist('X-Forwarded-For'):
alert.attributes.update(ip=request.headers.getlist('X-Forwarded-For')[0])
else:
alert.attributes.update(ip=request.remote_addr)
return alert
def post_receive(self, alert, **kwargs):
return
def status_change(self, alert, status, text, **kwargs):
return
def take_action(self, alert, action, text, **kwargs):
raise NotImplementedError
| 27.419355
| 85
| 0.683529
|
import logging
from flask import request
from alerta.plugins import PluginBase
LOG = logging.getLogger('alerta.plugins')
class RemoteIpAddr(PluginBase):
def pre_receive(self, alert, **kwargs):
if request.headers.getlist('X-Forwarded-For'):
alert.attributes.update(ip=request.headers.getlist('X-Forwarded-For')[0])
else:
alert.attributes.update(ip=request.remote_addr)
return alert
def post_receive(self, alert, **kwargs):
return
def status_change(self, alert, status, text, **kwargs):
return
def take_action(self, alert, action, text, **kwargs):
raise NotImplementedError
| true
| true
|
1c48a28d36981f4ff51423932bebd436d4b82cc8
| 6,491
|
py
|
Python
|
python/ray/tests/conftest.py
|
ruiminshen/ray
|
b3564cda3d214cd19e9ea3804c21a46d5e14a914
|
[
"Apache-2.0"
] | 3
|
2020-12-03T17:48:45.000Z
|
2022-01-22T08:09:46.000Z
|
python/ray/tests/conftest.py
|
ruiminshen/ray
|
b3564cda3d214cd19e9ea3804c21a46d5e14a914
|
[
"Apache-2.0"
] | 6
|
2022-03-18T14:06:24.000Z
|
2022-03-26T07:13:16.000Z
|
python/ray/tests/conftest.py
|
ruiminshen/ray
|
b3564cda3d214cd19e9ea3804c21a46d5e14a914
|
[
"Apache-2.0"
] | 1
|
2020-12-12T13:59:22.000Z
|
2020-12-12T13:59:22.000Z
|
"""
This file defines the common pytest fixtures used in current directory.
"""
import os
from contextlib import contextmanager
import pytest
import subprocess
import ray
from ray.cluster_utils import Cluster
from ray.test_utils import init_error_pubsub
@pytest.fixture
def shutdown_only():
yield None
# The code after the yield will run as teardown code.
ray.shutdown()
def get_default_fixure_system_config():
system_config = {
"object_timeout_milliseconds": 200,
"num_heartbeats_timeout": 10,
"object_store_full_max_retries": 3,
"object_store_full_initial_delay_ms": 100,
}
return system_config
def get_default_fixture_ray_kwargs():
system_config = get_default_fixure_system_config()
ray_kwargs = {
"num_cpus": 1,
"object_store_memory": 150 * 1024 * 1024,
"_system_config": system_config,
}
return ray_kwargs
@contextmanager
def _ray_start(**kwargs):
init_kwargs = get_default_fixture_ray_kwargs()
init_kwargs.update(kwargs)
# Start the Ray processes.
address_info = ray.init(**init_kwargs)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_start_with_dashboard(request):
param = getattr(request, "param", {})
with _ray_start(
num_cpus=1, include_dashboard=True, **param) as address_info:
yield address_info
# The following fixture will start ray with 0 cpu.
@pytest.fixture
def ray_start_no_cpu(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=0, **param) as res:
yield res
# The following fixture will start ray with 1 cpu.
@pytest.fixture
def ray_start_regular(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture(scope="module")
def ray_start_regular_shared(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture(
scope="module", params=[{
"local_mode": True
}, {
"local_mode": False
}])
def ray_start_shared_local_modes(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture
def ray_start_2_cpus(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=2, **param) as res:
yield res
@pytest.fixture
def ray_start_10_cpus(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=10, **param) as res:
yield res
@contextmanager
def _ray_start_cluster(**kwargs):
init_kwargs = get_default_fixture_ray_kwargs()
num_nodes = 0
do_init = False
# num_nodes & do_init are not arguments for ray.init, so delete them.
if "num_nodes" in kwargs:
num_nodes = kwargs["num_nodes"]
del kwargs["num_nodes"]
if "do_init" in kwargs:
do_init = kwargs["do_init"]
del kwargs["do_init"]
elif num_nodes > 0:
do_init = True
init_kwargs.update(kwargs)
cluster = Cluster()
remote_nodes = []
for i in range(num_nodes):
if i > 0 and "_system_config" in init_kwargs:
del init_kwargs["_system_config"]
remote_nodes.append(cluster.add_node(**init_kwargs))
# We assume driver will connect to the head (first node),
# so ray init will be invoked if do_init is true
if len(remote_nodes) == 1 and do_init:
ray.init(address=cluster.address)
yield cluster
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
# This fixture will start a cluster with empty nodes.
@pytest.fixture
def ray_start_cluster(request):
param = getattr(request, "param", {})
with _ray_start_cluster(**param) as res:
yield res
@pytest.fixture
def ray_start_cluster_head(request):
param = getattr(request, "param", {})
with _ray_start_cluster(do_init=True, num_nodes=1, **param) as res:
yield res
@pytest.fixture
def ray_start_cluster_2_nodes(request):
param = getattr(request, "param", {})
with _ray_start_cluster(do_init=True, num_nodes=2, **param) as res:
yield res
@pytest.fixture
def ray_start_object_store_memory(request):
# Start the Ray processes.
store_size = request.param
system_config = get_default_fixure_system_config()
init_kwargs = {
"num_cpus": 1,
"_system_config": system_config,
"object_store_memory": store_size,
}
ray.init(**init_kwargs)
yield store_size
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def call_ray_start(request):
parameter = getattr(
request, "param", "ray start --head --num-cpus=1 --min-worker-port=0 "
"--max-worker-port=0 --port 0")
command_args = parameter.split(" ")
out = ray.utils.decode(
subprocess.check_output(command_args, stderr=subprocess.STDOUT))
# Get the redis address from the output.
redis_substring_prefix = "--address='"
address_location = (
out.find(redis_substring_prefix) + len(redis_substring_prefix))
address = out[address_location:]
address = address.split("'")[0]
yield address
# Disconnect from the Ray cluster.
ray.shutdown()
# Kill the Ray cluster.
subprocess.check_call(["ray", "stop"])
@pytest.fixture
def call_ray_stop_only():
yield
subprocess.check_call(["ray", "stop"])
@pytest.fixture
def enable_pickle_debug():
os.environ["RAY_PICKLE_VERBOSE_DEBUG"] = "1"
yield
del os.environ["RAY_PICKLE_VERBOSE_DEBUG"]
@pytest.fixture()
def two_node_cluster():
system_config = {
"object_timeout_milliseconds": 200,
"num_heartbeats_timeout": 10,
}
cluster = ray.cluster_utils.Cluster(
head_node_args={"_system_config": system_config})
for _ in range(2):
remote_node = cluster.add_node(num_cpus=1)
ray.init(address=cluster.address)
yield cluster, remote_node
# The code after the yield will run as teardown code.
ray.shutdown()
cluster.shutdown()
@pytest.fixture()
def error_pubsub():
p = init_error_pubsub()
yield p
p.close()
@pytest.fixture()
def log_pubsub():
p = ray.worker.global_worker.redis_client.pubsub(
ignore_subscribe_messages=True)
log_channel = ray.gcs_utils.LOG_FILE_CHANNEL
p.psubscribe(log_channel)
yield p
p.close()
| 26.279352
| 78
| 0.67817
|
import os
from contextlib import contextmanager
import pytest
import subprocess
import ray
from ray.cluster_utils import Cluster
from ray.test_utils import init_error_pubsub
@pytest.fixture
def shutdown_only():
yield None
ray.shutdown()
def get_default_fixure_system_config():
system_config = {
"object_timeout_milliseconds": 200,
"num_heartbeats_timeout": 10,
"object_store_full_max_retries": 3,
"object_store_full_initial_delay_ms": 100,
}
return system_config
def get_default_fixture_ray_kwargs():
system_config = get_default_fixure_system_config()
ray_kwargs = {
"num_cpus": 1,
"object_store_memory": 150 * 1024 * 1024,
"_system_config": system_config,
}
return ray_kwargs
@contextmanager
def _ray_start(**kwargs):
init_kwargs = get_default_fixture_ray_kwargs()
init_kwargs.update(kwargs)
address_info = ray.init(**init_kwargs)
yield address_info
ray.shutdown()
@pytest.fixture
def ray_start_with_dashboard(request):
param = getattr(request, "param", {})
with _ray_start(
num_cpus=1, include_dashboard=True, **param) as address_info:
yield address_info
@pytest.fixture
def ray_start_no_cpu(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=0, **param) as res:
yield res
@pytest.fixture
def ray_start_regular(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture(scope="module")
def ray_start_regular_shared(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture(
scope="module", params=[{
"local_mode": True
}, {
"local_mode": False
}])
def ray_start_shared_local_modes(request):
param = getattr(request, "param", {})
with _ray_start(**param) as res:
yield res
@pytest.fixture
def ray_start_2_cpus(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=2, **param) as res:
yield res
@pytest.fixture
def ray_start_10_cpus(request):
param = getattr(request, "param", {})
with _ray_start(num_cpus=10, **param) as res:
yield res
@contextmanager
def _ray_start_cluster(**kwargs):
init_kwargs = get_default_fixture_ray_kwargs()
num_nodes = 0
do_init = False
if "num_nodes" in kwargs:
num_nodes = kwargs["num_nodes"]
del kwargs["num_nodes"]
if "do_init" in kwargs:
do_init = kwargs["do_init"]
del kwargs["do_init"]
elif num_nodes > 0:
do_init = True
init_kwargs.update(kwargs)
cluster = Cluster()
remote_nodes = []
for i in range(num_nodes):
if i > 0 and "_system_config" in init_kwargs:
del init_kwargs["_system_config"]
remote_nodes.append(cluster.add_node(**init_kwargs))
if len(remote_nodes) == 1 and do_init:
ray.init(address=cluster.address)
yield cluster
ray.shutdown()
cluster.shutdown()
@pytest.fixture
def ray_start_cluster(request):
param = getattr(request, "param", {})
with _ray_start_cluster(**param) as res:
yield res
@pytest.fixture
def ray_start_cluster_head(request):
param = getattr(request, "param", {})
with _ray_start_cluster(do_init=True, num_nodes=1, **param) as res:
yield res
@pytest.fixture
def ray_start_cluster_2_nodes(request):
param = getattr(request, "param", {})
with _ray_start_cluster(do_init=True, num_nodes=2, **param) as res:
yield res
@pytest.fixture
def ray_start_object_store_memory(request):
store_size = request.param
system_config = get_default_fixure_system_config()
init_kwargs = {
"num_cpus": 1,
"_system_config": system_config,
"object_store_memory": store_size,
}
ray.init(**init_kwargs)
yield store_size
ray.shutdown()
@pytest.fixture
def call_ray_start(request):
parameter = getattr(
request, "param", "ray start --head --num-cpus=1 --min-worker-port=0 "
"--max-worker-port=0 --port 0")
command_args = parameter.split(" ")
out = ray.utils.decode(
subprocess.check_output(command_args, stderr=subprocess.STDOUT))
redis_substring_prefix = "--address='"
address_location = (
out.find(redis_substring_prefix) + len(redis_substring_prefix))
address = out[address_location:]
address = address.split("'")[0]
yield address
ray.shutdown()
subprocess.check_call(["ray", "stop"])
@pytest.fixture
def call_ray_stop_only():
yield
subprocess.check_call(["ray", "stop"])
@pytest.fixture
def enable_pickle_debug():
os.environ["RAY_PICKLE_VERBOSE_DEBUG"] = "1"
yield
del os.environ["RAY_PICKLE_VERBOSE_DEBUG"]
@pytest.fixture()
def two_node_cluster():
system_config = {
"object_timeout_milliseconds": 200,
"num_heartbeats_timeout": 10,
}
cluster = ray.cluster_utils.Cluster(
head_node_args={"_system_config": system_config})
for _ in range(2):
remote_node = cluster.add_node(num_cpus=1)
ray.init(address=cluster.address)
yield cluster, remote_node
ray.shutdown()
cluster.shutdown()
@pytest.fixture()
def error_pubsub():
p = init_error_pubsub()
yield p
p.close()
@pytest.fixture()
def log_pubsub():
p = ray.worker.global_worker.redis_client.pubsub(
ignore_subscribe_messages=True)
log_channel = ray.gcs_utils.LOG_FILE_CHANNEL
p.psubscribe(log_channel)
yield p
p.close()
| true
| true
|
1c48a2bf8665c80a31e6b38f8e3c8439a55e959d
| 13,218
|
py
|
Python
|
bokeh/application/application.py
|
jensencl/bokeh_composite_formatter
|
fe497342779fe3d78a4710bd4dd6b48f2ffeb9a9
|
[
"BSD-3-Clause"
] | 1
|
2020-02-07T16:57:56.000Z
|
2020-02-07T16:57:56.000Z
|
bokeh/application/application.py
|
jakubwro/bokeh
|
950f133f033df349d6f63b9750a909d5e64de21d
|
[
"BSD-3-Clause"
] | null | null | null |
bokeh/application/application.py
|
jakubwro/bokeh
|
950f133f033df349d6f63b9750a909d5e64de21d
|
[
"BSD-3-Clause"
] | null | null | null |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide the ``Application`` class.
Application instances are factories for creating new Bokeh Documents.
When a Bokeh server session is initiated, the Bokeh server asks the Application
for a new Document to service the session. To do this, the Application first
creates a new empty Document, then it passes this new Document to the
``modify_document`` method of each of its handlers. When all handlers have
updated the Document, it is used to service the user session.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from abc import ABCMeta, abstractmethod
# Bokeh imports
from ..document import Document
from ..settings import settings
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'Application',
'ServerContext',
'SessionContext',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
class Application(object):
''' An Application is a factory for Document instances.
'''
# This is so that bokeh.io.show can check if a passed in object is an
# Application without having to import Application directly. This module
# depends on tornado and we have made a commitment that "basic" modules
# will function without bringing in tornado.
_is_a_bokeh_application_class = True
def __init__(self, *handlers, **kwargs):
''' Application factory.
Args:
handlers (seq[Handler]): List of handlers to call.
The URL is taken from the first one only.
Keyword Args:
metadata (dict): arbitrary user-supplied JSON data to make available
with the application.
The server will provide a URL ``http://applicationurl/metadata``
which returns a JSON blob of the form:
.. code-block:: json
{
"data": {
"hi": "hi",
"there": "there"
},
"url": "/myapp"
}
The user-supplied metadata is returned as-is under the
``"data"`` key in the blob.
'''
metadata = kwargs.pop('metadata', None)
if kwargs:
raise TypeError("Invalid keyword argument: %s" %
list(kwargs.keys())[0])
self._static_path = None
self._handlers = []
self._metadata = metadata
for h in handlers:
self.add(h)
# Properties --------------------------------------------------------------
@property
def handlers(self):
''' The ordered list of handlers this Application is configured with.
'''
return tuple(self._handlers)
@property
def metadata(self):
''' Arbitrary user-supplied metadata to associate with this application.
'''
return self._metadata
@property
def safe_to_fork(self):
'''
'''
return all(handler.safe_to_fork for handler in self._handlers)
@property
def static_path(self):
''' Path to any (optional) static resources specified by handlers.
'''
return self._static_path
# Public methods ----------------------------------------------------------
def add(self, handler):
''' Add a handler to the pipeline used to initialize new documents.
Args:
handler (Handler) : a handler for this Application to use to
process Documents
'''
self._handlers.append(handler)
# make sure there is at most one static path
static_paths = set(h.static_path() for h in self.handlers)
static_paths.discard(None)
if len(static_paths) > 1:
raise RuntimeError("More than one static path requested for app: %r" % list(static_paths))
elif len(static_paths) == 1:
self._static_path = static_paths.pop()
else:
self._static_path = None
def create_document(self):
''' Creates and initializes a document using the Application's handlers.
'''
doc = Document()
self.initialize_document(doc)
return doc
def initialize_document(self, doc):
''' Fills in a new document using the Application's handlers.
'''
for h in self._handlers:
# TODO (havocp) we need to check the 'failed' flag on each handler
# and build a composite error display. In develop mode, we want to
# somehow get these errors to the client.
h.modify_document(doc)
if h.failed:
log.error("Error running application handler %r: %s %s ", h, h.error, h.error_detail)
if settings.perform_document_validation():
doc.validate()
def on_server_loaded(self, server_context):
''' Invoked to execute code when a new session is created.
This method calls ``on_server_loaded`` on each handler, in order,
with the server context passed as the only argument.
'''
for h in self._handlers:
h.on_server_loaded(server_context)
def on_server_unloaded(self, server_context):
''' Invoked to execute code when the server cleanly exits. (Before
stopping the server's ``IOLoop``.)
This method calls ``on_server_unloaded`` on each handler, in order,
with the server context passed as the only argument.
.. warning::
In practice this code may not run, since servers are often killed
by a signal.
'''
for h in self._handlers:
h.on_server_unloaded(server_context)
async def on_session_created(self, session_context):
''' Invoked to execute code when a new session is created.
This method calls ``on_session_created`` on each handler, in order,
with the session context passed as the only argument.
May return a ``Future`` which will delay session creation until the
``Future`` completes.
'''
for h in self._handlers:
await h.on_session_created(session_context)
return None
async def on_session_destroyed(self, session_context):
''' Invoked to execute code when a session is destroyed.
This method calls ``on_session_destroyed`` on each handler, in order,
with the session context passed as the only argument.
Afterwards, ``session_context.destroyed`` will be ``True``.
'''
for h in self._handlers:
await h.on_session_destroyed(session_context)
return None
class ServerContext(metaclass=ABCMeta):
''' A harness for server-specific information and tasks related to
collections of Bokeh sessions.
*This base class is probably not of interest to general users.*
'''
# Properties --------------------------------------------------------------
@property
@abstractmethod
def sessions(self):
''' ``SessionContext`` instances belonging to this application.
*Subclasses must implement this method.*
'''
pass
# Public methods ----------------------------------------------------------
@abstractmethod
def add_next_tick_callback(self, callback):
''' Add a callback to be run on the next tick of the event loop.
*Subclasses must implement this method.*
Args:
callback (callable) : a callback to add
The callback will execute on the next tick of the event loop,
and should have the form ``def callback()`` (i.e. it should
not accept any arguments)
Returns:
an ID that can be used with ``remove_next_tick_callback``.
'''
pass
@abstractmethod
def add_periodic_callback(self, callback, period_milliseconds):
''' Add a callback to be run periodically until it is removed.
*Subclasses must implement this method.*
Args:
callback (callable) : a callback to add
The callback will execute periodically on the event loop
as specified, and should have the form ``def callback()``
(i.e. it should not accept any arguments)
period_milliseconds (int) : number of milliseconds to wait
between executing the callback.
Returns:
an ID that can be used with ``remove_periodic_callback``.
'''
pass
@abstractmethod
def add_timeout_callback(self, callback, timeout_milliseconds):
''' Add a callback to be run once after timeout_milliseconds.
*Subclasses must implement this method.*
Args:
callback (callable) : a callback to add
The callback will execute once on the event loop after the
timeout has passed, and should have the form ``def callback()``
(i.e. it should not accept any arguments)
timeout_milliseconds (int) : number of milliseconds to wait before
executing the callback.
Returns:
an ID that can be used with ``remove_timeout_callback``.
'''
pass
@abstractmethod
def remove_next_tick_callback(self, callback_id):
''' Remove a callback added with ``add_next_tick_callback``, before
it runs.
*Subclasses must implement this method.*
Args:
callback_id : the ID returned from ``add_next_tick_callback``
'''
pass
@abstractmethod
def remove_periodic_callback(self, callback_id):
''' Removes a callback added with ``add_periodic_callback``.
*Subclasses must implement this method.*
Args:
callback_id : the ID returned from ``add_periodic_callback``
'''
pass
@abstractmethod
def remove_timeout_callback(self, callback_id):
''' Remove a callback added with ``add_timeout_callback``, before it
runs.
*Subclasses must implement this method.*
Args:
callback_id : the ID returned from ``add_timeout_callback``
'''
pass
class SessionContext(metaclass=ABCMeta):
''' A harness for server-specific information and tasks related to
Bokeh sessions.
*This base class is probably not of interest to general users.*
'''
def __init__(self, server_context, session_id):
'''
'''
self._server_context = server_context
self._id = session_id
# Properties --------------------------------------------------------------
@property
@abstractmethod
def destroyed(self):
''' If ``True``, the session has been discarded and cannot be used.
A new session with the same ID could be created later but this instance
will not come back to life.
'''
pass
@property
def id(self):
''' The unique ID for the session associated with this context.
'''
return self._id
@property
def server_context(self):
''' The server context for this session context
'''
return self._server_context
# Public methods ----------------------------------------------------------
@abstractmethod
def with_locked_document(self, func):
''' Runs a function with the document lock held, passing the
document to the function.
*Subclasses must implement this method.*
Args:
func (callable): function that takes a single parameter (the Document)
and returns ``None`` or a ``Future``
Returns:
a ``Future`` containing the result of the function
'''
pass
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| 31.396675
| 102
| 0.53987
|
import logging
log = logging.getLogger(__name__)
from abc import ABCMeta, abstractmethod
from ..document import Document
from ..settings import settings
__all__ = (
'Application',
'ServerContext',
'SessionContext',
)
class Application(object):
_is_a_bokeh_application_class = True
def __init__(self, *handlers, **kwargs):
metadata = kwargs.pop('metadata', None)
if kwargs:
raise TypeError("Invalid keyword argument: %s" %
list(kwargs.keys())[0])
self._static_path = None
self._handlers = []
self._metadata = metadata
for h in handlers:
self.add(h)
@property
def handlers(self):
return tuple(self._handlers)
@property
def metadata(self):
return self._metadata
@property
def safe_to_fork(self):
return all(handler.safe_to_fork for handler in self._handlers)
@property
def static_path(self):
return self._static_path
def add(self, handler):
self._handlers.append(handler)
static_paths = set(h.static_path() for h in self.handlers)
static_paths.discard(None)
if len(static_paths) > 1:
raise RuntimeError("More than one static path requested for app: %r" % list(static_paths))
elif len(static_paths) == 1:
self._static_path = static_paths.pop()
else:
self._static_path = None
def create_document(self):
doc = Document()
self.initialize_document(doc)
return doc
def initialize_document(self, doc):
for h in self._handlers:
h.modify_document(doc)
if h.failed:
log.error("Error running application handler %r: %s %s ", h, h.error, h.error_detail)
if settings.perform_document_validation():
doc.validate()
def on_server_loaded(self, server_context):
for h in self._handlers:
h.on_server_loaded(server_context)
def on_server_unloaded(self, server_context):
for h in self._handlers:
h.on_server_unloaded(server_context)
async def on_session_created(self, session_context):
for h in self._handlers:
await h.on_session_created(session_context)
return None
async def on_session_destroyed(self, session_context):
for h in self._handlers:
await h.on_session_destroyed(session_context)
return None
class ServerContext(metaclass=ABCMeta):
@property
@abstractmethod
def sessions(self):
pass
@abstractmethod
def add_next_tick_callback(self, callback):
pass
@abstractmethod
def add_periodic_callback(self, callback, period_milliseconds):
pass
@abstractmethod
def add_timeout_callback(self, callback, timeout_milliseconds):
pass
@abstractmethod
def remove_next_tick_callback(self, callback_id):
pass
@abstractmethod
def remove_periodic_callback(self, callback_id):
pass
@abstractmethod
def remove_timeout_callback(self, callback_id):
pass
class SessionContext(metaclass=ABCMeta):
def __init__(self, server_context, session_id):
self._server_context = server_context
self._id = session_id
@property
@abstractmethod
def destroyed(self):
pass
@property
def id(self):
return self._id
@property
def server_context(self):
return self._server_context
@abstractmethod
def with_locked_document(self, func):
pass
| true
| true
|
1c48a338a32c3c7e5b475c6feadb8c720cb47519
| 4,073
|
py
|
Python
|
stage/configuration/test_encrypt_and_decrypt_fields_processor.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_encrypt_and_decrypt_fields_processor.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | 1
|
2019-04-24T11:06:38.000Z
|
2019-04-24T11:06:38.000Z
|
stage/configuration/test_encrypt_and_decrypt_fields_processor.py
|
anubandhan/datacollector-tests
|
301c024c66d68353735256b262b681dd05ba16cc
|
[
"Apache-2.0"
] | 2
|
2019-05-24T06:34:37.000Z
|
2020-03-30T11:48:18.000Z
|
import pytest
from streamsets.testframework.decorators import stub
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}])
def test_access_key_id(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'USER'}])
def test_base64_encoded_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_cache_capacity(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'cipher': 'ALG_AES_128_GCM_IV12_TAG16_HKDF_SHA256'},
{'cipher': 'ALG_AES_128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256'},
{'cipher': 'ALG_AES_128_GCM_IV12_TAG16_NO_KDF'},
{'cipher': 'ALG_AES_192_GCM_IV12_TAG16_HKDF_SHA256'},
{'cipher': 'ALG_AES_192_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384'},
{'cipher': 'ALG_AES_192_GCM_IV12_TAG16_NO_KDF'},
{'cipher': 'ALG_AES_256_GCM_IV12_TAG16_HKDF_SHA256'},
{'cipher': 'ALG_AES_256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384'},
{'cipher': 'ALG_AES_256_GCM_IV12_TAG16_NO_KDF'}])
def test_cipher(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': False, 'mode': 'ENCRYPT'},
{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_data_key_caching(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'mode': 'ENCRYPT'}])
def test_encryption_context_in_aad(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_fields(sdc_builder, sdc_executor):
pass
@stub
def test_frame_size(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'USER'}])
def test_key_id_in_optional(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}])
def test_kms_key_arn(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}, {'master_key_provider': 'USER'}])
def test_master_key_provider(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_max_bytes_per_data_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_max_data_key_age(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_max_records_per_data_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'mode': 'DECRYPT'}, {'mode': 'ENCRYPT'}])
def test_mode(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}])
def test_secret_access_key(sdc_builder, sdc_executor, stage_attributes):
pass
| 32.325397
| 115
| 0.676897
|
import pytest
from streamsets.testframework.decorators import stub
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}])
def test_access_key_id(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'USER'}])
def test_base64_encoded_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_cache_capacity(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'cipher': 'ALG_AES_128_GCM_IV12_TAG16_HKDF_SHA256'},
{'cipher': 'ALG_AES_128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256'},
{'cipher': 'ALG_AES_128_GCM_IV12_TAG16_NO_KDF'},
{'cipher': 'ALG_AES_192_GCM_IV12_TAG16_HKDF_SHA256'},
{'cipher': 'ALG_AES_192_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384'},
{'cipher': 'ALG_AES_192_GCM_IV12_TAG16_NO_KDF'},
{'cipher': 'ALG_AES_256_GCM_IV12_TAG16_HKDF_SHA256'},
{'cipher': 'ALG_AES_256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384'},
{'cipher': 'ALG_AES_256_GCM_IV12_TAG16_NO_KDF'}])
def test_cipher(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': False, 'mode': 'ENCRYPT'},
{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_data_key_caching(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'mode': 'ENCRYPT'}])
def test_encryption_context_in_aad(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_fields(sdc_builder, sdc_executor):
pass
@stub
def test_frame_size(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'USER'}])
def test_key_id_in_optional(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}])
def test_kms_key_arn(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}, {'master_key_provider': 'USER'}])
def test_master_key_provider(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_max_bytes_per_data_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_max_data_key_age(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_key_caching': True, 'mode': 'ENCRYPT'}])
def test_max_records_per_data_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'mode': 'DECRYPT'}, {'mode': 'ENCRYPT'}])
def test_mode(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'master_key_provider': 'AWS_KMS'}])
def test_secret_access_key(sdc_builder, sdc_executor, stage_attributes):
pass
| true
| true
|
1c48a343d81bb32c43a561e24e4152b81e1da4b9
| 534
|
py
|
Python
|
day_3_p2.py
|
96arjan/AdventOfCode
|
12f7e9fc1af37ba540b06ff4769a772acacca3cb
|
[
"MIT"
] | null | null | null |
day_3_p2.py
|
96arjan/AdventOfCode
|
12f7e9fc1af37ba540b06ff4769a772acacca3cb
|
[
"MIT"
] | null | null | null |
day_3_p2.py
|
96arjan/AdventOfCode
|
12f7e9fc1af37ba540b06ff4769a772acacca3cb
|
[
"MIT"
] | null | null | null |
report = open(r"./data/day_3.txt", "r").read().split("\n")[:-1]
def do_recur(report, index, co2):
if len(report) == 1: return report[0]
ones, zeros, count = [], [], 0
for num in report:
if int(num[index]): count, ones = count + 1, ones + [num]
else: count, zeros = count - 1, zeros + [num]
if co2: count = - (count + 1)
if count >= 0: report = ones
else: report = zeros
return do_recur(report, index + 1, co2)
print(int(do_recur(report, 0, True), 2) * int(do_recur(report, 0, False), 2))
| 35.6
| 77
| 0.574906
|
report = open(r"./data/day_3.txt", "r").read().split("\n")[:-1]
def do_recur(report, index, co2):
if len(report) == 1: return report[0]
ones, zeros, count = [], [], 0
for num in report:
if int(num[index]): count, ones = count + 1, ones + [num]
else: count, zeros = count - 1, zeros + [num]
if co2: count = - (count + 1)
if count >= 0: report = ones
else: report = zeros
return do_recur(report, index + 1, co2)
print(int(do_recur(report, 0, True), 2) * int(do_recur(report, 0, False), 2))
| true
| true
|
1c48a4221dda8e843fd3d8133c8d23f341c15095
| 6,335
|
py
|
Python
|
data/external/repositories_2to3/139781/ndsb-master/caffe-dev/python/caffe/draw.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/139781/ndsb-master/caffe-dev/python/caffe/draw.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/139781/ndsb-master/caffe-dev/python/caffe/draw.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | 1
|
2019-12-04T08:23:33.000Z
|
2019-12-04T08:23:33.000Z
|
"""
Caffe network visualization: draw the NetParameter protobuffer.
NOTE: this requires pydot>=1.0.2, which is not included in requirements.txt
since it requires graphviz and other prerequisites outside the scope of the
Caffe.
"""
from caffe.proto import caffe_pb2
from google.protobuf import text_format
import pydot
# Internal layer and blob styles.
LAYER_STYLE_DEFAULT = {'shape': 'record', 'fillcolor': '#6495ED',
'style': 'filled'}
NEURON_LAYER_STYLE = {'shape': 'record', 'fillcolor': '#90EE90',
'style': 'filled'}
BLOB_STYLE = {'shape': 'octagon', 'fillcolor': '#E0E0E0',
'style': 'filled'}
def get_enum_name_by_value():
desc = caffe_pb2.LayerParameter.LayerType.DESCRIPTOR
d = {}
for k,v in list(desc.values_by_name.items()):
d[v.number] = k
return d
def get_pooling_types_dict():
"""Get dictionary mapping pooling type number to type name
"""
desc = caffe_pb2.PoolingParameter.PoolMethod.DESCRIPTOR
d = {}
for k,v in list(desc.values_by_name.items()):
d[v.number] = k
return d
def determine_edge_label_by_layertype(layer, layertype):
"""Define edge label based on layer type
"""
if layertype == 'DATA':
edge_label = 'Batch ' + str(layer.data_param.batch_size)
elif layertype == 'CONVOLUTION':
edge_label = str(layer.convolution_param.num_output)
elif layertype == 'INNER_PRODUCT':
edge_label = str(layer.inner_product_param.num_output)
else:
edge_label = '""'
return edge_label
def determine_node_label_by_layertype(layer, layertype, rankdir):
"""Define node label based on layer type
"""
if rankdir in ('TB', 'BT'):
# If graph orientation is vertical, horizontal space is free and
# vertical space is not; separate words with spaces
separator = ' '
else:
# If graph orientation is horizontal, vertical space is free and
# horizontal space is not; separate words with newlines
separator = '\n'
if layertype == 'CONVOLUTION':
# Outer double quotes needed or else colon characters don't parse
# properly
node_label = '"%s%s(%s)%skernel size: %d%sstride: %d%spad: %d"' %\
(layer.name,
separator,
layertype,
separator,
layer.convolution_param.kernel_size,
separator,
layer.convolution_param.stride,
separator,
layer.convolution_param.pad)
elif layertype == 'POOLING':
pooling_types_dict = get_pooling_types_dict()
node_label = '"%s%s(%s %s)%skernel size: %d%sstride: %d%spad: %d"' %\
(layer.name,
separator,
pooling_types_dict[layer.pooling_param.pool],
layertype,
separator,
layer.pooling_param.kernel_size,
separator,
layer.pooling_param.stride,
separator,
layer.pooling_param.pad)
else:
node_label = '"%s%s(%s)"' % (layer.name, separator, layertype)
return node_label
def choose_color_by_layertype(layertype):
"""Define colors for nodes based on the layer type
"""
color = '#6495ED' # Default
if layertype == 'CONVOLUTION':
color = '#FF5050'
elif layertype == 'POOLING':
color = '#FF9900'
elif layertype == 'INNER_PRODUCT':
color = '#CC33FF'
return color
def get_pydot_graph(caffe_net, rankdir, label_edges=True):
pydot_graph = pydot.Dot(caffe_net.name, graph_type='digraph', rankdir=rankdir)
pydot_nodes = {}
pydot_edges = []
d = get_enum_name_by_value()
for layer in caffe_net.layers:
name = layer.name
layertype = d[layer.type]
node_label = determine_node_label_by_layertype(layer, layertype, rankdir)
if (len(layer.bottom) == 1 and len(layer.top) == 1 and
layer.bottom[0] == layer.top[0]):
# We have an in-place neuron layer.
pydot_nodes[name + '_' + layertype] = pydot.Node(
node_label, **NEURON_LAYER_STYLE)
else:
layer_style = LAYER_STYLE_DEFAULT
layer_style['fillcolor'] = choose_color_by_layertype(layertype)
pydot_nodes[name + '_' + layertype] = pydot.Node(
node_label, **layer_style)
for bottom_blob in layer.bottom:
pydot_nodes[bottom_blob + '_blob'] = pydot.Node(
'%s' % (bottom_blob), **BLOB_STYLE)
edge_label = '""'
pydot_edges.append({'src': bottom_blob + '_blob',
'dst': name + '_' + layertype,
'label': edge_label})
for top_blob in layer.top:
pydot_nodes[top_blob + '_blob'] = pydot.Node(
'%s' % (top_blob))
if label_edges:
edge_label = determine_edge_label_by_layertype(layer, layertype)
else:
edge_label = '""'
pydot_edges.append({'src': name + '_' + layertype,
'dst': top_blob + '_blob',
'label': edge_label})
# Now, add the nodes and edges to the graph.
for node in list(pydot_nodes.values()):
pydot_graph.add_node(node)
for edge in pydot_edges:
pydot_graph.add_edge(
pydot.Edge(pydot_nodes[edge['src']], pydot_nodes[edge['dst']],
label=edge['label']))
return pydot_graph
def draw_net(caffe_net, rankdir, ext='png'):
"""Draws a caffe net and returns the image string encoded using the given
extension.
Input:
caffe_net: a caffe.proto.caffe_pb2.NetParameter protocol buffer.
ext: the image extension. Default 'png'.
"""
return get_pydot_graph(caffe_net, rankdir).create(format=ext)
def draw_net_to_file(caffe_net, filename, rankdir='LR'):
"""Draws a caffe net, and saves it to file using the format given as the
file extension. Use '.raw' to output raw text that you can manually feed
to graphviz to draw graphs.
"""
ext = filename[filename.rfind('.')+1:]
with open(filename, 'wb') as fid:
fid.write(draw_net(caffe_net, rankdir, ext))
| 36.408046
| 81
| 0.599842
|
from caffe.proto import caffe_pb2
from google.protobuf import text_format
import pydot
LAYER_STYLE_DEFAULT = {'shape': 'record', 'fillcolor': '#6495ED',
'style': 'filled'}
NEURON_LAYER_STYLE = {'shape': 'record', 'fillcolor': '#90EE90',
'style': 'filled'}
BLOB_STYLE = {'shape': 'octagon', 'fillcolor': '#E0E0E0',
'style': 'filled'}
def get_enum_name_by_value():
desc = caffe_pb2.LayerParameter.LayerType.DESCRIPTOR
d = {}
for k,v in list(desc.values_by_name.items()):
d[v.number] = k
return d
def get_pooling_types_dict():
desc = caffe_pb2.PoolingParameter.PoolMethod.DESCRIPTOR
d = {}
for k,v in list(desc.values_by_name.items()):
d[v.number] = k
return d
def determine_edge_label_by_layertype(layer, layertype):
if layertype == 'DATA':
edge_label = 'Batch ' + str(layer.data_param.batch_size)
elif layertype == 'CONVOLUTION':
edge_label = str(layer.convolution_param.num_output)
elif layertype == 'INNER_PRODUCT':
edge_label = str(layer.inner_product_param.num_output)
else:
edge_label = '""'
return edge_label
def determine_node_label_by_layertype(layer, layertype, rankdir):
if rankdir in ('TB', 'BT'):
separator = ' '
else:
separator = '\n'
if layertype == 'CONVOLUTION':
# properly
node_label = '"%s%s(%s)%skernel size: %d%sstride: %d%spad: %d"' %\
(layer.name,
separator,
layertype,
separator,
layer.convolution_param.kernel_size,
separator,
layer.convolution_param.stride,
separator,
layer.convolution_param.pad)
elif layertype == 'POOLING':
pooling_types_dict = get_pooling_types_dict()
node_label = '"%s%s(%s %s)%skernel size: %d%sstride: %d%spad: %d"' %\
(layer.name,
separator,
pooling_types_dict[layer.pooling_param.pool],
layertype,
separator,
layer.pooling_param.kernel_size,
separator,
layer.pooling_param.stride,
separator,
layer.pooling_param.pad)
else:
node_label = '"%s%s(%s)"' % (layer.name, separator, layertype)
return node_label
def choose_color_by_layertype(layertype):
color = '
if layertype == 'CONVOLUTION':
color = '
elif layertype == 'POOLING':
color = '
elif layertype == 'INNER_PRODUCT':
color = '
return color
def get_pydot_graph(caffe_net, rankdir, label_edges=True):
pydot_graph = pydot.Dot(caffe_net.name, graph_type='digraph', rankdir=rankdir)
pydot_nodes = {}
pydot_edges = []
d = get_enum_name_by_value()
for layer in caffe_net.layers:
name = layer.name
layertype = d[layer.type]
node_label = determine_node_label_by_layertype(layer, layertype, rankdir)
if (len(layer.bottom) == 1 and len(layer.top) == 1 and
layer.bottom[0] == layer.top[0]):
# We have an in-place neuron layer.
pydot_nodes[name + '_' + layertype] = pydot.Node(
node_label, **NEURON_LAYER_STYLE)
else:
layer_style = LAYER_STYLE_DEFAULT
layer_style['fillcolor'] = choose_color_by_layertype(layertype)
pydot_nodes[name + '_' + layertype] = pydot.Node(
node_label, **layer_style)
for bottom_blob in layer.bottom:
pydot_nodes[bottom_blob + '_blob'] = pydot.Node(
'%s' % (bottom_blob), **BLOB_STYLE)
edge_label = '""'
pydot_edges.append({'src': bottom_blob + '_blob',
'dst': name + '_' + layertype,
'label': edge_label})
for top_blob in layer.top:
pydot_nodes[top_blob + '_blob'] = pydot.Node(
'%s' % (top_blob))
if label_edges:
edge_label = determine_edge_label_by_layertype(layer, layertype)
else:
edge_label = '""'
pydot_edges.append({'src': name + '_' + layertype,
'dst': top_blob + '_blob',
'label': edge_label})
# Now, add the nodes and edges to the graph.
for node in list(pydot_nodes.values()):
pydot_graph.add_node(node)
for edge in pydot_edges:
pydot_graph.add_edge(
pydot.Edge(pydot_nodes[edge['src']], pydot_nodes[edge['dst']],
label=edge['label']))
return pydot_graph
def draw_net(caffe_net, rankdir, ext='png'):
return get_pydot_graph(caffe_net, rankdir).create(format=ext)
def draw_net_to_file(caffe_net, filename, rankdir='LR'):
ext = filename[filename.rfind('.')+1:]
with open(filename, 'wb') as fid:
fid.write(draw_net(caffe_net, rankdir, ext))
| true
| true
|
1c48a5349351b9ed7a38ac04c2382b185a8a4863
| 164
|
py
|
Python
|
categorias/iniciante/python/1013.py
|
carlos3g/URI-solutions
|
dc7f9b896cdff88aedf67611917b178d3ad60ab3
|
[
"MIT"
] | 1
|
2022-01-26T23:38:17.000Z
|
2022-01-26T23:38:17.000Z
|
categorias/iniciante/python/1013.py
|
carlos3g/URI-solutions
|
dc7f9b896cdff88aedf67611917b178d3ad60ab3
|
[
"MIT"
] | 1
|
2020-07-12T00:49:35.000Z
|
2021-06-26T20:53:18.000Z
|
categorias/iniciante/python/1013.py
|
carlos3g/URI-solutions
|
dc7f9b896cdff88aedf67611917b178d3ad60ab3
|
[
"MIT"
] | 1
|
2020-07-04T03:27:04.000Z
|
2020-07-04T03:27:04.000Z
|
# -*- coding: utf-8 -*-
a, b, c = map(int, input().split())
maior = (a+b+abs(a-b))/2
resultado = (maior+c+abs(maior-c))/2
print('{} eh o maior'.format(resultado))
| 23.428571
| 40
| 0.579268
|
a, b, c = map(int, input().split())
maior = (a+b+abs(a-b))/2
resultado = (maior+c+abs(maior-c))/2
print('{} eh o maior'.format(resultado))
| true
| true
|
1c48a59a6c3513233dcdc72f1f349d057f1c6cb0
| 4,298
|
py
|
Python
|
dashlivesim/tests/test_startnr.py
|
Dash-Industry-Forum/dash-live-source-simulator
|
23cb15c35656a731d9f6d78a30f2713eff2ec20d
|
[
"BSD-3-Clause"
] | 133
|
2015-04-26T04:37:35.000Z
|
2022-02-27T16:37:40.000Z
|
dashlivesim/tests/test_startnr.py
|
Mani5GRockers/dash-live-source-simulator
|
747cb23ba130049fd01c6446533f8dc1d3e9f62a
|
[
"BSD-3-Clause"
] | 83
|
2015-05-18T07:56:07.000Z
|
2022-01-26T16:09:25.000Z
|
dashlivesim/tests/test_startnr.py
|
Mani5GRockers/dash-live-source-simulator
|
747cb23ba130049fd01c6446533f8dc1d3e9f62a
|
[
"BSD-3-Clause"
] | 49
|
2015-06-29T22:46:21.000Z
|
2022-01-07T14:32:34.000Z
|
# The copyright in this software is being made available under the BSD License,
# included below. This software may be subject to other third party and contributor
# rights, including patent rights, and no such rights are granted under this license.
#
# Copyright (c) 2015, Dash Industry Forum.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# * Neither the name of Dash Industry Forum nor the names of its
# contributors may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
from os.path import join
from dashlivesim.tests.dash_test_util import OUT_DIR
from dashlivesim.dashlib import dash_proxy, mpd_proxy
from dashlivesim.tests.dash_test_util import VOD_CONFIG_DIR, CONTENT_ROOT
from dashlivesim.tests.dash_test_util import findAllIndexes
class TestMpdChange(unittest.TestCase):
"Test that MPD gets startNr changed in an appropriate way"
def testMpdWithNormalStartNr(self):
"Check that startNumber=0."
urlParts = ['pdash', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
with open(join(OUT_DIR, 'tmp.mpd'), 'wb') as ofh:
ofh.write(d.encode('utf-8'))
self.assertEqual(len(findAllIndexes('startNumber="0"', d)), 2)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
def testMpdWitdStartNrIs111(self):
"Check that startNumber=111."
urlParts = ['pdash', 'snr_111', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
self.assertEqual(len(findAllIndexes('startNumber="111"', d)), 2)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
def testMpdWithStartNrIs1(self):
"Check that startNumber=1."
urlParts = ['pdash', 'snr_1', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
self.assertEqual(len(findAllIndexes('startNumber="1"', d)), 2)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
def testMpdWithImplicitStartNr(self):
"Check that startNumber is not present in MPD."
urlParts = ['pdash', 'snr_-1', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
self.assertTrue(d.find('startNumber=') < 0)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
# Could add tests to check availability time of segments depending on startNr
# Add test to check if segmentNumber and tfdt are OK depending on startNr.
# Just running the reference player with different values seems to show that it is working properly, though.
| 54.405063
| 108
| 0.734528
|
import unittest
from os.path import join
from dashlivesim.tests.dash_test_util import OUT_DIR
from dashlivesim.dashlib import dash_proxy, mpd_proxy
from dashlivesim.tests.dash_test_util import VOD_CONFIG_DIR, CONTENT_ROOT
from dashlivesim.tests.dash_test_util import findAllIndexes
class TestMpdChange(unittest.TestCase):
def testMpdWithNormalStartNr(self):
urlParts = ['pdash', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
with open(join(OUT_DIR, 'tmp.mpd'), 'wb') as ofh:
ofh.write(d.encode('utf-8'))
self.assertEqual(len(findAllIndexes('startNumber="0"', d)), 2)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
def testMpdWitdStartNrIs111(self):
urlParts = ['pdash', 'snr_111', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
self.assertEqual(len(findAllIndexes('startNumber="111"', d)), 2)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
def testMpdWithStartNrIs1(self):
urlParts = ['pdash', 'snr_1', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
self.assertEqual(len(findAllIndexes('startNumber="1"', d)), 2)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
def testMpdWithImplicitStartNr(self):
urlParts = ['pdash', 'snr_-1', 'testpic', 'Manifest.mpd']
dp = dash_proxy.DashProvider("streamtest.eu", urlParts, None, VOD_CONFIG_DIR, CONTENT_ROOT, now=0)
d = mpd_proxy.get_mpd(dp)
self.assertTrue(d.find('startNumber=') < 0)
self.assertTrue(d.find('availabilityStartTime="1970-01-01T00:00:00Z"') > 0)
| true
| true
|
1c48a5b2e1b795384dc2e165d3eec19397fae69f
| 26,141
|
py
|
Python
|
pyia/data.py
|
adrn/gaia
|
dac05003f7952af88697b271295a90bb0df091ec
|
[
"MIT"
] | 15
|
2018-04-24T17:14:26.000Z
|
2021-05-14T19:28:10.000Z
|
pyia/data.py
|
adrn/gaia
|
dac05003f7952af88697b271295a90bb0df091ec
|
[
"MIT"
] | 4
|
2019-03-11T22:59:36.000Z
|
2021-10-17T14:37:24.000Z
|
pyia/data.py
|
adrn/pyia
|
dac05003f7952af88697b271295a90bb0df091ec
|
[
"MIT"
] | 7
|
2018-04-24T04:15:34.000Z
|
2021-10-15T21:14:59.000Z
|
# coding: utf-8
""" Data structures. """
# Standard library
import pathlib
# Third-party
import astropy.coordinates as coord
from astropy.table import Table, Column
from astropy.time import Time
import astropy.units as u
import numpy as np
from .extinction import get_ext
from .ruwetools import U0Interpolator
__all__ = ['GaiaData']
# This is from reading the data model
gaia_unit_map = {
'ra': u.degree,
'dec': u.degree,
'parallax': u.milliarcsecond,
'pmra': u.milliarcsecond / u.year,
'pmdec': u.milliarcsecond / u.year,
'radial_velocity': u.km / u.s,
'ra_error': u.milliarcsecond,
'dec_error': u.milliarcsecond,
'parallax_error': u.milliarcsecond,
'pmra_error': u.milliarcsecond / u.year,
'pmdec_error': u.milliarcsecond / u.year,
'radial_velocity_error': u.km / u.s,
'astrometric_excess_noise': u.mas,
'astrometric_weight_al': 1/u.mas**2,
'astrometric_pseudo_colour': 1/u.micrometer,
'astrometric_pseudo_colour_error': 1/u.micrometer,
'astrometric_sigma5d_max': u.mas,
'phot_g_mean_flux': u.photon/u.s,
'phot_g_mean_flux_error': u.photon/u.s,
'phot_g_mean_mag': u.mag,
'phot_bp_mean_flux': u.photon/u.s,
'phot_bp_mean_flux_error': u.photon/u.s,
'phot_bp_mean_mag': u.mag,
'phot_rp_mean_flux': u.photon/u.s,
'phot_rp_mean_flux_error': u.photon/u.s,
'phot_rp_mean_mag': u.mag,
'bp_rp': u.mag,
'bp_g': u.mag,
'g_rp': u.mag,
'rv_template_teff': u.K,
'l': u.degree,
'b': u.degree,
'ecl_lon': u.degree,
'ecl_lat': u.degree,
'teff_val': u.K,
'teff_percentile_lower': u.K,
'teff_percentile_upper': u.K,
'a_g_val': u.mag,
'a_g_percentile_lower': u.mag,
'a_g_percentile_upper': u.mag,
'e_bp_min_rp_val': u.mag,
'e_bp_min_rp_percentile_lower': u.mag,
'e_bp_min_rp_percentile_upper': u.mag,
'radius_val': u.Rsun,
'radius_percentile_lower': u.Rsun,
'radius_percentile_upper': u.Rsun,
'lum_val': u.Lsun,
'lum_percentile_lower': u.Lsun,
'lum_percentile_upper': u.Lsun,
'ref_epoch': u.year
}
REF_EPOCH = {
'DR2': Time(2015.5, format='jyear'),
'EDR3': Time(2016.0, format='jyear')
}
LATEST_RELEASE = 'EDR3'
class GaiaData:
"""Class for loading and interacting with data from the Gaia mission. This
should work with data from any data release, i.e., DR1 gaia_source or TGAS,
or DR2 gaia_source, or EDR3 gaia_source.
Parameters
----------
data : `astropy.table.Table`, `pandas.DataFrame`, dict_like, str
This must be pre-loaded data as any of the types listed above, or a
string filename containing a table that is readable by
`astropy.table.Table.read`.
"""
def __init__(self, data, **kwargs):
if not isinstance(data, Table):
if isinstance(data, (str, pathlib.Path)):
data = Table.read(data, **kwargs)
else:
# the dict-like object might have Quantity's, so we want to
# preserve any units
data = Table(data, **kwargs)
# HACK: make sure table isn't masked, until astropy supports masked
# quantities
if data.masked:
cols = []
for c in data.colnames:
col = data[c]
col.mask = None
cols.append(Column(col))
data = Table(cols, copy=False)
# Create a copy of the default unit map
self.units = gaia_unit_map.copy()
# Store the source table
self.data = data
# Update the unit map with the table units
self._invalid_units = dict()
for c in data.colnames:
if data[c].unit is not None:
try:
self.units[c] = u.Unit(str(data[c].unit))
except ValueError:
self._invalid_units[c] = data[c].unit
# HACK: hard coded
self._has_rv = ('radial_velocity' in self.data.colnames or
'dr2_radial_velocity' in self.data.colnames)
# For caching later
self._cache = dict()
@classmethod
def from_query(cls, query_str, login_info=None, verbose=False):
"""
Run the specified query and return a `GaiaData` instance with the
returned data.
This is meant only to be used for quick queries to the main Gaia science
archive. For longer queries and more customized usage, use TAP access to
any of the Gaia mirrors with, e.g., astroquery or pyvo.
This requires ``astroquery`` to be installed.
Parameters
----------
query_str : str
The string ADQL query to execute.
login_info : dict, optional
Username and password for the Gaia science archive as keys "user"
and "password". If not specified, will use anonymous access, subject
to the query limits.
Returns
-------
gaiadata : `GaiaData`
An instance of this object.
"""
try:
from astroquery.gaia import Gaia
except ImportError:
raise ImportError('Failed to import astroquery. To use the '
'from_query() classmethod, you must first'
' install astroquery, e.g., with pip: '
'\n\tpip install astroquery')
if login_info is not None:
Gaia.login(**login_info)
job = Gaia.launch_job_async(query_str, verbose=verbose)
tbl = job.get_results()
return cls(tbl)
@classmethod
def from_source_id(cls, source_id, source_id_dr=None, data_dr=None,
**kwargs):
"""Retrieve data from a DR for a given Gaia source_id in a DR.
Useful if you have, e.g., a DR2 source_id and want EDR3 data.
Parameters
----------
source_id : int
The Gaia source_id
source_id_dr : str, optional
The data release slug (e.g., 'dr2' or 'edr3') for the input
source_id. Defaults to the latest data release.
data_dr : str, optional
The data release slug (e.g., 'dr2' or 'edr3') to retrieve data from.
Defaults to the latest data release.
**kwargs
Passed to ``from_query()``
Returns
-------
gaiadata : `GaiaData`
An instance of this object.
"""
join_tables = {
'dr1': {'dr2': "gaiadr2.dr1_neighbourhood"},
'dr2': {'edr3': "gaiaedr3.dr2_neighbourhood"},
}
source_id_prefixes = {
'dr1': 'dr1',
'dr2': 'dr2',
'edr3': 'dr3'
}
if source_id_dr is None:
source_id_dr = LATEST_RELEASE.lower()
if data_dr is None:
data_dr = LATEST_RELEASE.lower()
if source_id_dr == data_dr:
query_str = f"""
SELECT * FROM gaia{data_dr}.gaia_source AS gaia
WHERE gaia.source_id = {source_id}
"""
return cls.from_query(query_str, **kwargs)
dr1, dr2 = sorted([source_id_dr, data_dr])
try:
join_table = join_tables[dr1][dr2]
source_id_pref = source_id_prefixes[source_id_dr]
data_pref = source_id_prefixes[data_dr]
except KeyError:
raise KeyError(f"Failed to find join table for {source_id_dr} "
f"to {data_dr}")
query_str = f"""
SELECT * FROM gaia{data_dr}.gaia_source AS gaia
JOIN {join_table} AS old_gaia
ON gaia.source_id = old_gaia.{data_pref}_source_id
WHERE old_gaia.{source_id_pref}_source_id = {source_id}
"""
return cls.from_query(query_str, **kwargs)
##########################################################################
# Python internal
#
def __getattr__(self, name):
# to prevent recursion errors:
# nedbatchelder.com/blog/201010/surprising_getattr_recursion.html
if name in ['data', 'units']:
raise AttributeError()
lookup_name = name
if name.startswith('radial_velocity'):
# HACK: this should be more general...
if ('radial_velocity' not in self.data.colnames
and 'dr2_radial_velocity' in self.data.colnames):
lookup_name = f'dr2_{name}'
coldata = self.data[lookup_name]
if hasattr(coldata, 'mask') and coldata.mask is not None:
arr = coldata.filled(np.nan)
else:
arr = coldata
arr = np.asarray(arr)
if name in self.units:
return arr * self.units[name]
else:
return arr
def __setattr__(self, name, val):
if name in ['data', 'units']:
# needs to be here to catch the first time we enter this func.
super().__setattr__(name, val)
elif name in self.units:
if not hasattr(val, 'unit'):
raise ValueError('To set data for column "{0}", you must '
'provide a Quantity-like object (with units).'
.format(name))
self.data[name] = val
self.units[name] = val.unit
elif name in self.data.columns:
self.data[name] = val
else:
super().__setattr__(name, val)
def __dir__(self):
return super().__dir__() + [str(k) for k in self.data.columns]
def __getitem__(self, slc):
if isinstance(slc, int):
slc = slice(slc, slc+1)
elif isinstance(slc, str):
return self.__getattr__(slc)
return self.__class__(self.data[slc])
def __setitem__(self, name, val):
if hasattr(val, 'unit'):
self.data[name] = val.value
self.units[name] = val.unit
else:
self.data[name] = val
def __len__(self):
return len(self.data)
def __str__(self):
names = ['ra', 'dec', 'parallax', 'pmra', 'pmdec']
if self._has_rv:
names.append('radial_velocity')
return str(self.data[names])
def __repr__(self):
return "<GaiaData: {0:d} rows>".format(len(self))
##########################################################################
# Computed and convenience quantities
#
@property
def pm(self):
"""2D proper motion. Has shape `(nrows, 2)`"""
_u = self.pmra.unit
return np.vstack((self.pmra.value, self.pmdec.to(_u).value)).T * _u
@u.quantity_input(min_parallax=u.mas, equivalencies=u.parallax())
def get_distance(self, min_parallax=None, parallax_fill_value=np.nan,
allow_negative=False):
"""Compute distance from parallax (by inverting the parallax) using
`~astropy.coordinates.Distance`.
Parameters
----------
min_parallax : `~astropy.units.Quantity` (optional)
If `min_parallax` specified, the parallaxes are clipped to this
values (and it is also used to replace NaNs).
allow_negative : bool (optional)
This is passed through to `~astropy.coordinates.Distance`.
Returns
-------
dist : `~astropy.coordinates.Distance`
A ``Distance`` object with the data.
"""
plx = self.parallax.copy()
if np.isnan(parallax_fill_value):
parallax_fill_value = parallax_fill_value * u.mas
if min_parallax is not None:
clipped = plx < min_parallax
clipped |= ~np.isfinite(plx)
plx[clipped] = parallax_fill_value
return coord.Distance(parallax=plx, allow_negative=allow_negative)
@property
def distance(self):
"""Assumes 1/parallax. Has shape `(nrows,)`.
This attribute will raise an error when there are negative or zero
parallax values. For more flexible retrieval of distance values and
auto-filling bad values, use the .get_distance() method."""
return self.get_distance()
def get_radial_velocity(self, fill_value=None):
"""Return radial velocity but with invalid values filled with the
specified fill value.
Parameters
----------
fill_value : `~astropy.units.Quantity` (optional)
If not ``None``, fill any invalid values with the specified value.
"""
rv = self.radial_velocity.copy()
rv[~np.isfinite(rv)] = fill_value
return rv
@property
def distmod(self):
"""Distance modulus, m-M = 5 * log10(dist / (10 pc))"""
return self.distance.distmod
@property
def vtan(self):
"""
Tangential velocity computed using the proper motion and inverse
parallax as the distance. Has shape `(nrows, 2)`
"""
d = self.distance
vra = (self.pmra * d).to(u.km/u.s, u.dimensionless_angles()).value
vdec = (self.pmdec * d).to(u.km/u.s, u.dimensionless_angles()).value
return np.vstack((vra, vdec)).T * u.km/u.s
def get_cov(self, RAM_threshold=1*u.gigabyte, units=None):
"""
The Gaia data tables contain correlation coefficients and standard
deviations for (ra, dec, parallax, pm_ra, pm_dec), but for most analyses
we need covariance matrices. This converts the data provided by Gaia
into covariance matrices.
If a radial velocity exists, this also contains the radial velocity
variance. If radial velocity doesn't exist, that diagonal element is set
to inf.
The default units of the covariance matrix are [degree, degree, mas,
mas/yr, mas/yr, km/s], but this can be modified by passing in a
dictionary with new units. For example, to change just the default ra,
dec units for the covariance matrix, you can pass in::
units=dict(ra=u.radian, dec=u.radian)
Parameters
----------
RAM_threshold : `astropy.units.Quantity`
Raise an error if the expected covariance array is larger than the
specified threshold. Set to ``None`` to disable this checking.
"""
if 'cov' in self._cache:
if units == self._cache['cov_units']:
return self._cache['cov']
if RAM_threshold is not None:
# Raise error if the user is going to blow up their RAM
estimated_RAM = 6 * 6 * len(self) * 8*u.bit
if estimated_RAM > RAM_threshold:
raise RuntimeError('Estimated RAM usage for generating '
'covariance matrices is larger than the '
'specified threshold. Use the argument: '
'`RAM_threshold=None` to disable this check')
if units is None:
units = dict()
units.setdefault('ra', u.deg)
units.setdefault('dec', u.deg)
units.setdefault('parallax', u.mas)
units.setdefault('pmra', u.mas/u.yr)
units.setdefault('pmdec', u.mas/u.yr)
units.setdefault('radial_velocity', u.km/u.s)
# The full returned matrix
C = np.zeros((len(self), 6, 6))
# We handle radial_velocity separately below - doesn't have correlation
# coefficients with the astrometric parameters
names = ['ra', 'dec', 'parallax', 'pmra', 'pmdec']
# pre-load the diagonal
for i, name in enumerate(names):
if name + "_error" in self.data.colnames:
err = getattr(self, name + "_error")
C[:, i, i] = err.to(units[name]).value ** 2
else:
C[:, i, i] = np.nan
if self._has_rv:
name = 'radial_velocity'
err = getattr(self, name + "_error")
C[:, 5, 5] = err.to(units[name]).value ** 2
else:
C[:, 5, 5] = np.inf
C[:, 5, 5][np.isnan(C[:, 5, 5])] = np.inf # missing values
for i, name1 in enumerate(names):
for j, name2 in enumerate(names):
if j <= i:
continue
if "{0}_{1}_corr".format(name1, name2) in self.data.colnames:
corr = getattr(self, "{0}_{1}_corr".format(name1, name2))
else:
corr = np.nan
# We don't need to worry about units here because the diagonal
# values have already been converted
C[:, i, j] = corr * np.sqrt(C[:, i, i] * C[:, j, j])
C[:, j, i] = C[:, i, j]
self._cache['cov'] = C
self._cache['cov_units'] = units
return self._cache['cov']
def get_ebv(self, dustmaps_cls=None):
"""Compute the E(B-V) reddening at this location
This requires the `dustmaps <http://dustmaps.readthedocs.io>`_ package
to run!
Parameters
----------
dustmaps_cls : ``dustmaps`` query class
By default, ``SFDQuery``.
"""
if dustmaps_cls is None:
from dustmaps.sfd import SFDQuery
dustmaps_cls = SFDQuery
c = self.get_skycoord(distance=False)
return dustmaps_cls().query(c)
def get_ext(self, ebv=None, dustmaps_cls=None):
"""Compute the E(B-V) reddening at this location
This requires the `dustmaps <http://dustmaps.readthedocs.io>`_ package
to run!
Parameters
----------
dustmaps_cls : ``dustmaps`` query class
By default, ``SFDQuery``.
Returns
-------
A_G
A_BP
A_RP
"""
if 'ebv' not in self._cache:
if ebv is None:
self._cache['ebv'] = self.get_ebv(dustmaps_cls=dustmaps_cls)
else:
self._cache['ebv'] = ebv
if 'A_G' not in self._cache:
A_G, A_B, A_R = get_ext(self.phot_g_mean_mag.value,
self.phot_bp_mean_mag.value,
self.phot_rp_mean_mag.value,
self._cache['ebv'])
self._cache['A_G'] = A_G * u.mag
self._cache['A_B'] = A_B * u.mag
self._cache['A_R'] = A_R * u.mag
return (self._cache['A_G'],
self._cache['A_B'],
self._cache['A_R'])
def get_G0(self, *args, **kwargs):
"""Return the extinction-corrected G-band magnitude. Any arguments are
passed to ``get_ext()``.
"""
A, _, _ = self.get_ext(*args, **kwargs)
return self.phot_g_mean_mag - A
def get_BP0(self, *args, **kwargs):
"""Return the extinction-corrected G_BP magnitude. Any arguments are
passed to ``get_ext()``."""
_, A, _ = self.get_ext(*args, **kwargs)
return self.phot_bp_mean_mag - A
def get_RP0(self, *args, **kwargs):
"""Return the extinction-corrected G_RP magnitude. Any arguments are
passed to ``get_ext()``."""
_, _, A = self.get_ext(*args, **kwargs)
return self.phot_rp_mean_mag - A
def get_uwe(self):
"""Compute and return the unit-weight error."""
return np.sqrt(self.astrometric_chi2_al /
(self.astrometric_n_good_obs_al-5))
def get_ruwe(self):
"""Compute and return the renormalized unit-weight error."""
interp = U0Interpolator()
bprp = self.phot_bp_mean_mag.value - self.phot_rp_mean_mag.value
u0 = interp.get_u0(self.phot_g_mean_mag.value, bprp)
return self.get_uwe() / u0
##########################################################################
# Astropy connections
#
@property
def skycoord(self):
"""
Return an `~astropy.coordinates.SkyCoord` object to represent
all coordinates. Note: this requires Astropy v3.0 or higher!
Use the ``get_skycoord()`` method for more flexible access.
"""
return self.get_skycoord()
def get_skycoord(self, distance=None, radial_velocity=None,
ref_epoch=REF_EPOCH[LATEST_RELEASE]):
"""
Return an `~astropy.coordinates.SkyCoord` object to represent
all coordinates. Note: this requires Astropy v3.0 or higher!
`ref_epoch` is used to set the `obstime` attribute on the coordinate
objects. This is often included in the data release tables, but
`ref_epoch` here is used if it's not.
Parameters
----------
distance : `~astropy.coordinate.Distance`, `~astropy.units.Quantity`, ``False``, str (optional)
If ``None``, this inverts the parallax to get the distance from the
Gaia data. If ``False``, distance information is ignored. If an
astropy ``Quantity`` or ``Distance`` object, it sets the distance
values of the output ``SkyCoord`` to whatever is passed in.
radial_velocity : `~astropy.units.Quantity`, str (optional)
If ``None``, this uses radial velocity data from the input Gaia
table. If an astropy ``Quantity`` object, it sets the radial
velocity values of the output ``SkyCoord`` to whatever is passed in.
ref_epoch : `~astropy.time.Time`, float (optional)
The reference epoch of the data. If not specified, this will try to
read it from the input Gaia data table. If not provided, this will
be set to whatever the most recent data release is, so, **beware**!
Returns
-------
c : `~astropy.coordinates.SkyCoord`
The coordinate object constructed from the input Gaia data.
"""
_coord_opts = (distance, radial_velocity)
if 'coord' in self._cache:
try:
_check = self._cache['coord_opts'] == _coord_opts
except ValueError: # array passed for distance or radial_velocity
_check = False
if _check:
return self._cache['coord']
kw = dict()
if self._has_rv:
kw['radial_velocity'] = self.radial_velocity
# Reference epoch
if 'ref_epoch' in self.data.colnames:
obstime = Time(self.ref_epoch.value, format='jyear')
else:
obstime = Time(ref_epoch, format='jyear')
kw['obstime'] = obstime
if radial_velocity is not False and radial_velocity is not None:
if isinstance(radial_velocity, str):
kw['radial_velocity'] = self[radial_velocity]
else:
kw['radial_velocity'] = radial_velocity
elif radial_velocity is False and 'radial_velocity' in kw:
kw.pop('radial_velocity')
if distance is None:
kw['distance'] = self.distance
elif distance is not False and distance is not None:
if isinstance(distance, str):
kw['distance'] = self[distance]
else:
kw['distance'] = distance
self._cache['coord'] = coord.SkyCoord(ra=self.ra, dec=self.dec,
pm_ra_cosdec=self.pmra,
pm_dec=self.pmdec, **kw)
self._cache['coord_opts'] = _coord_opts
return self._cache['coord']
def get_error_samples(self, size=1, rnd=None):
"""Generate a sampling from the Gaia error distribution for each source.
This function constructs the astrometric covariance matrix for each
source and generates a specified number of random samples from the error
distribution for each source. This does not handle spatially-dependent
correlations. Samplings generated with this method can be used to, e.g.,
propagate the Gaia errors through coordinate transformations or
analyses.
Parameters
----------
size : int
The number of random samples per soure to generate.
rnd : ``numpy.random.RandomState``, optional
The random state.
Returns
-------
g_samples : `pyia.GaiaData`
The same data table, but now each Gaia coordinate entry contains
samples from the error distribution.
"""
if rnd is None:
rnd = np.random.RandomState()
C = self.get_cov().copy()
rv_mask = ~np.isfinite(C[:, 5, 5])
C[rv_mask, 5, 5] = 0.
arrs = []
for k, unit in self._cache['cov_units'].items():
arrs.append(getattr(self, k).to_value(unit))
y = np.stack(arrs).T
samples = np.array([rnd.multivariate_normal(y[i], C[i], size=size)
for i in range(len(y))])
d = self.data.copy()
for i, (k, unit) in enumerate(self._cache['cov_units'].items()):
d[k] = samples[..., i] * unit
return self.__class__(d)
def filter(self, **kwargs):
"""
Filter the data based on columns and data ranges.
Parameters
----------
**kwargs
Keys should be column names, values should be tuples representing
ranges to select the column values withing. For example, to select
parallaxes between 0.5 and 5, pass ``parallax=(0.5, 5)*u.mas``.
Pass `None` to skip a filter, for example ``parallax=(None,
5*u.mas)`` would select all parallax values < 5 mas.
Returns
-------
filtered_g : `pyia.GaiaData`
The same data table, but filtered.
"""
mask = np.ones(len(self), dtype=bool)
for k, (x1, x2) in kwargs.items():
if x1 is None and x2 is None:
raise ValueError(f"Both range values are None for key {k}!")
if x1 is None:
mask &= self[k] < x2
elif x2 is None:
mask &= self[k] >= x1
else:
mask &= (self[k] >= x1) & (self[k] < x2)
return self[mask]
| 34.854667
| 103
| 0.564898
|
import pathlib
import astropy.coordinates as coord
from astropy.table import Table, Column
from astropy.time import Time
import astropy.units as u
import numpy as np
from .extinction import get_ext
from .ruwetools import U0Interpolator
__all__ = ['GaiaData']
gaia_unit_map = {
'ra': u.degree,
'dec': u.degree,
'parallax': u.milliarcsecond,
'pmra': u.milliarcsecond / u.year,
'pmdec': u.milliarcsecond / u.year,
'radial_velocity': u.km / u.s,
'ra_error': u.milliarcsecond,
'dec_error': u.milliarcsecond,
'parallax_error': u.milliarcsecond,
'pmra_error': u.milliarcsecond / u.year,
'pmdec_error': u.milliarcsecond / u.year,
'radial_velocity_error': u.km / u.s,
'astrometric_excess_noise': u.mas,
'astrometric_weight_al': 1/u.mas**2,
'astrometric_pseudo_colour': 1/u.micrometer,
'astrometric_pseudo_colour_error': 1/u.micrometer,
'astrometric_sigma5d_max': u.mas,
'phot_g_mean_flux': u.photon/u.s,
'phot_g_mean_flux_error': u.photon/u.s,
'phot_g_mean_mag': u.mag,
'phot_bp_mean_flux': u.photon/u.s,
'phot_bp_mean_flux_error': u.photon/u.s,
'phot_bp_mean_mag': u.mag,
'phot_rp_mean_flux': u.photon/u.s,
'phot_rp_mean_flux_error': u.photon/u.s,
'phot_rp_mean_mag': u.mag,
'bp_rp': u.mag,
'bp_g': u.mag,
'g_rp': u.mag,
'rv_template_teff': u.K,
'l': u.degree,
'b': u.degree,
'ecl_lon': u.degree,
'ecl_lat': u.degree,
'teff_val': u.K,
'teff_percentile_lower': u.K,
'teff_percentile_upper': u.K,
'a_g_val': u.mag,
'a_g_percentile_lower': u.mag,
'a_g_percentile_upper': u.mag,
'e_bp_min_rp_val': u.mag,
'e_bp_min_rp_percentile_lower': u.mag,
'e_bp_min_rp_percentile_upper': u.mag,
'radius_val': u.Rsun,
'radius_percentile_lower': u.Rsun,
'radius_percentile_upper': u.Rsun,
'lum_val': u.Lsun,
'lum_percentile_lower': u.Lsun,
'lum_percentile_upper': u.Lsun,
'ref_epoch': u.year
}
REF_EPOCH = {
'DR2': Time(2015.5, format='jyear'),
'EDR3': Time(2016.0, format='jyear')
}
LATEST_RELEASE = 'EDR3'
class GaiaData:
def __init__(self, data, **kwargs):
if not isinstance(data, Table):
if isinstance(data, (str, pathlib.Path)):
data = Table.read(data, **kwargs)
else:
# preserve any units
data = Table(data, **kwargs)
# HACK: make sure table isn't masked, until astropy supports masked
if data.masked:
cols = []
for c in data.colnames:
col = data[c]
col.mask = None
cols.append(Column(col))
data = Table(cols, copy=False)
self.units = gaia_unit_map.copy()
self.data = data
self._invalid_units = dict()
for c in data.colnames:
if data[c].unit is not None:
try:
self.units[c] = u.Unit(str(data[c].unit))
except ValueError:
self._invalid_units[c] = data[c].unit
self._has_rv = ('radial_velocity' in self.data.colnames or
'dr2_radial_velocity' in self.data.colnames)
self._cache = dict()
@classmethod
def from_query(cls, query_str, login_info=None, verbose=False):
try:
from astroquery.gaia import Gaia
except ImportError:
raise ImportError('Failed to import astroquery. To use the '
'from_query() classmethod, you must first'
' install astroquery, e.g., with pip: '
'\n\tpip install astroquery')
if login_info is not None:
Gaia.login(**login_info)
job = Gaia.launch_job_async(query_str, verbose=verbose)
tbl = job.get_results()
return cls(tbl)
@classmethod
def from_source_id(cls, source_id, source_id_dr=None, data_dr=None,
**kwargs):
join_tables = {
'dr1': {'dr2': "gaiadr2.dr1_neighbourhood"},
'dr2': {'edr3': "gaiaedr3.dr2_neighbourhood"},
}
source_id_prefixes = {
'dr1': 'dr1',
'dr2': 'dr2',
'edr3': 'dr3'
}
if source_id_dr is None:
source_id_dr = LATEST_RELEASE.lower()
if data_dr is None:
data_dr = LATEST_RELEASE.lower()
if source_id_dr == data_dr:
query_str = f"""
SELECT * FROM gaia{data_dr}.gaia_source AS gaia
WHERE gaia.source_id = {source_id}
"""
return cls.from_query(query_str, **kwargs)
dr1, dr2 = sorted([source_id_dr, data_dr])
try:
join_table = join_tables[dr1][dr2]
source_id_pref = source_id_prefixes[source_id_dr]
data_pref = source_id_prefixes[data_dr]
except KeyError:
raise KeyError(f"Failed to find join table for {source_id_dr} "
f"to {data_dr}")
query_str = f"""
SELECT * FROM gaia{data_dr}.gaia_source AS gaia
JOIN {join_table} AS old_gaia
ON gaia.source_id = old_gaia.{data_pref}_source_id
WHERE old_gaia.{source_id_pref}_source_id = {source_id}
"""
return cls.from_query(query_str, **kwargs)
name2) in self.data.colnames:
corr = getattr(self, "{0}_{1}_corr".format(name1, name2))
else:
corr = np.nan
# We don't need to worry about units here because the diagonal
C[:, i, j] = corr * np.sqrt(C[:, i, i] * C[:, j, j])
C[:, j, i] = C[:, i, j]
self._cache['cov'] = C
self._cache['cov_units'] = units
return self._cache['cov']
def get_ebv(self, dustmaps_cls=None):
if dustmaps_cls is None:
from dustmaps.sfd import SFDQuery
dustmaps_cls = SFDQuery
c = self.get_skycoord(distance=False)
return dustmaps_cls().query(c)
def get_ext(self, ebv=None, dustmaps_cls=None):
if 'ebv' not in self._cache:
if ebv is None:
self._cache['ebv'] = self.get_ebv(dustmaps_cls=dustmaps_cls)
else:
self._cache['ebv'] = ebv
if 'A_G' not in self._cache:
A_G, A_B, A_R = get_ext(self.phot_g_mean_mag.value,
self.phot_bp_mean_mag.value,
self.phot_rp_mean_mag.value,
self._cache['ebv'])
self._cache['A_G'] = A_G * u.mag
self._cache['A_B'] = A_B * u.mag
self._cache['A_R'] = A_R * u.mag
return (self._cache['A_G'],
self._cache['A_B'],
self._cache['A_R'])
def get_G0(self, *args, **kwargs):
A, _, _ = self.get_ext(*args, **kwargs)
return self.phot_g_mean_mag - A
def get_BP0(self, *args, **kwargs):
_, A, _ = self.get_ext(*args, **kwargs)
return self.phot_bp_mean_mag - A
def get_RP0(self, *args, **kwargs):
_, _, A = self.get_ext(*args, **kwargs)
return self.phot_rp_mean_mag - A
def get_uwe(self):
return np.sqrt(self.astrometric_chi2_al /
(self.astrometric_n_good_obs_al-5))
def get_ruwe(self):
interp = U0Interpolator()
bprp = self.phot_bp_mean_mag.value - self.phot_rp_mean_mag.value
u0 = interp.get_u0(self.phot_g_mean_mag.value, bprp)
return self.get_uwe() / u0
for key {k}!")
if x1 is None:
mask &= self[k] < x2
elif x2 is None:
mask &= self[k] >= x1
else:
mask &= (self[k] >= x1) & (self[k] < x2)
return self[mask]
| true
| true
|
1c48a63aeb2b3796ce42ca504a0584918ac96541
| 8,704
|
py
|
Python
|
VisualGimp/Util.py
|
duangsuse/VisualGimp
|
79776fded12595ab3c56855b5ae56e2242780b2e
|
[
"MIT"
] | 2
|
2019-05-07T12:09:11.000Z
|
2019-05-08T09:31:44.000Z
|
VisualGimp/Util.py
|
duangsuse-valid-projects/VisualGimp
|
79776fded12595ab3c56855b5ae56e2242780b2e
|
[
"MIT"
] | null | null | null |
VisualGimp/Util.py
|
duangsuse-valid-projects/VisualGimp
|
79776fded12595ab3c56855b5ae56e2242780b2e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- encoding: utf-8 -*-
# Simple coding helper
# author: duangsuse
# date: Thu May 02 2019 CST
def infseq(x):
''' Infinity generator of x '''
while True: yield x
def nseq(x, n):
''' N length generator of x '''
while n > 0:
yield x
n -= 1
def may_slice(ary, ind):
''' if slice in array bound, return array[slice], else return retrofit slice '''
if type(ind) is not slice: return
if type(ary) is not list: return
# start stop /step
start = ind.start
stop = ind.stop
len_list = len(ary)
if start < 0:
# negative: list[-1] = list[len(list) - 1]
if start <= -len_list:
return ary[0:stop]
else:
startovf = start >= len_list
endovf = stop >= len_list
if startovf or endovf:
if startovf and endovf:
return []
elif startovf:
return ary[startovf-len_list:stop]
elif endovf:
return ary[start:-1]
#from multiprocessing.pool import Pool
def ap(f, *args, **kwargs):
''' Apply a applicative object with given arguments '''
if len(args) == 0: return f(**kwargs)
fn = f; argc = len(args)
argp = 0
while callable(fn) and argp < argc:
if fn.func_code.co_argcount == 1:
fn = fn.__call__(args[argp], **kwargs)
else:
fn = fn.__call__(*args, **kwargs)
argp += 1
return fn
def flip(fn): return lambda x: lambda y: fn(y, x)
def flipL(fn): return lambda x: lambda y: fn(y)(x)
def curry2(fn):
''' curry for binary functions '''
return lambda x: lambda y: fn(x, y)
def curry2L(fn):
''' curry for binary lambdas '''
return lambda x: lambda y: fn(x)(y)
def compose(f, g): return lambda x: f(g(x))
def concat_stream(gen):
strn = str()
try:
while True: strn += gen.next()
except StopIteration:
pass
return strn
def stream_join(xs, ys):
'''
Given xs, ys, yields { xs.next, ys.next }...
'''
xss = iter(xs)
yss = iter(ys)
while True:
yield xss.next() # StopIteration?
yield yss.next() # StopIteration?
def _globalq(name):
''' Get a (maybe MODULE-wise) global by name or None '''
_globals = globals()
if name in _globals: return _globals[name]
else: return None
def uh(obj, do = lambda x: x):
''' if obj is not None, run uh, else return None '''
if obj is not None: return do(obj)
else: return None
def coerce(x, t): return x.__coerce__(t)
def unreachable(): raise RuntimeError('Corrupted state : Impossible code path reached !!!')
def foldl(op):
''' Fold left definition like the one with same name in Haskell '''
def foldl_init(val):
def foldl_ls(ls):
# foldl f v (x:xs) = foldl f (f v x) xs
#if ls == []: return val
def foldl_ac(head = 0):
if head >= len(ls): return val
x = ls[head]#; xs = [] if head+1 >= len(ls) else ls[head+1:len(ls)]
return foldl_init(op(val, x))(ls)(head+1)
return foldl_ac
return foldl_ls
return foldl_init
class Maybe:
''' Nullable value '''
def __init__(self, obj):
''' Make a (maybe None) value wrapper '''
self.obj = obj
self.is_null = obj is None
def is_none(self):
''' Return True if wrapped object is None '''
return self.is_null
def is_any(self):
''' Return True if wrapped object is not None '''
return not self.null
null = property(is_none)
any = property(is_any)
def must_get(self):
''' Get value or raise AssertionError (if none) '''
if self.null:
raise AssertionError('Object is None')
else: return self.obj
def get(self):
''' Get value or return Nothing Maybe '''
if self.null: return Nothing
else: return self.obj
def get_or(self, fn_x_obj):
''' Get value or else... (callable or alternative value) '''
if self.null:
if callable(fn_x_obj): return fn_x_obj()
else: return fn_x_obj
else: return self.obj
def flat_map(self, fn):
''' If obj is null, return obj, else fn(obj) '''
result = fn(self.obj) if not self.null else None
return result
def map(self, fn):
''' If obj is null, return Nothing, else compose(Maybe, fn)(obj) '''
return Maybe(self.flat_map(fn))
def __str__(self):
tt = type(self.obj) if self.any else object
typenam = '%s?' % tt.__name__
return '{}({})'.format(typenam, self.obj)
def __or__(self, other):
''' this any or other any '''
if self.any: return self
else: return other
def __coerce__(self, newtype):
if self.any:
return self.get().__coerce__(newtype)
else:
raise TypeError('Cannot convert Nothing nullable to type %s' %newtype.__name__)
Nothing = Maybe(None) # 没有所谓的类型,值才有类型...
Just = Maybe
def maybe(default, fn, may):
'''
takes a default value, a function, and a Maybe value. If the Maybe value is Nothing, the function returns the default value.
Otherwise, it applies the function to the value inside the Just and returns the result.
See: Maybe.flat_map
Equivalent: flat_map with default value
'''
if may is Nothing: return default
else: fn(may.get())
identity = lambda x:x
def identitystar(*args): return args
typeof = type
def sizeof(o): return o.__sizeof__()
#def mk_func(): return identity.__class__(identity.func_code, {})
def doall(*fns):
''' return a lambda that launches all the fns '''
def do(fns):
#o = mk_func()
filtered = [f for f in fns if callable(f)]
def _ocall():
for f in filtered: f.__call__()
#o.func_code = _ocall.__code__
return _ocall
return do(fns)
def fst(tup): return tup[0]
def snd(tup): return tup[1]
def lastindex(xs): return len(xs) -1
def head(xs, off=0): return xs[off]
def tail(xs, off=0): return xs[off+1:lastindex(xs)]
def first_just(ls):
''' Find first Just in a maybe list '''
if type(ls) is list:
return foldl(Maybe.__or__)(Nothing)(ls)()
else:
res = filter(Maybe.is_any, ls)
if len(res) is 0: return Nothing
else: return iter(res).next()
class Either:
''' Either value a or b '''
def __init__(self, a_v = None, b_v = None):
self.a = a_v; self.b = b_v
def is_left(self):
''' Is Left exists? '''
return self.a is not None
def is_right(self):
''' Is Right exists? '''
return self.b is not None
left = property(is_left)
right = property(is_right)
def get_left(self):
''' Get Left or None '''
return self.a
def get_right(self):
''' Get Right or None '''
return self.b
l = property(get_left)
r = property(get_right)
def get_left_or(self, fn_x_v):
''' Get Left or run procedure / return default value '''
if self.left:
return self.l
else:
if callable(fn_x_v): return fn_x_v()
else: return fn_x_v
def get_right_or(self, fn_x_v):
''' Get Right or run procedure / return default value '''
if self.right:
return self.r
else:
if callable(fn_x_v): return fn_x_v()
else: return fn_x_v
def _fail_assert(self, name):
def __(): raise AssertionError('Failed to get {} for {}'.format(name, self))
return __
def must_get_left(self):
''' Get Left or throw assertion error '''
return self.get_left_or(self._fail_assert('left'))
def must_get_right(self):
''' Get Right or throw assertion error '''
return self.get_right_or(self._fail_assert('right'))
def either(self, fl, fr):
''' Map left using fl, map right using fr '''
if self.right: return fr(self.r)
if self.left: return fl(self.l)
def swap(self):
''' Swap left and right '''
return self.either(Right, Left)
def map_l(self, fl):
''' Map left side of this Either to Either '''
if self.left: return Either(fl(self.l), self.r)
else: return self
def map_r(self, fr):
''' Map right side of this Either to Either '''
if self.right: return Either(self.l, b_v=fr(self.r))
else: return self
def flat_map_l(self, fn):
''' Map left side of this Either '''
if self.left: return fn(self.l)
else: return self
def flat_map_r(self, fn):
''' Map right side of this Either '''
if self.right: return fn(self.r)
else: return self
def __str__(self):
return 'Either<{}, {}>({}|{})'.format(typeof(self.l).__name__, typeof(self.r).__name__, self.l, self.r)
def map(self, fl): return self.map_l(fl)
def flat_map(self, fl): return self.flat_map_l(fl)
def Left(a): return Either(a, None)
def Right(b): return Either(None, b)
def _trimMarks(m, bracel = '<', bracer = '>'):
''' remove format strs of SGML markup '''
output = str()
waitClose = False
if len(m) == 0: return output
for i in range(0, len(m)):
char = m[i]
if char == bracel: waitClose = True
if waitClose:
if char == bracer: waitClose = False
else: output += char
return output
def _hexdigit(n): return hex(n)[2:]
| 26.864198
| 126
| 0.633502
|
def infseq(x):
while True: yield x
def nseq(x, n):
while n > 0:
yield x
n -= 1
def may_slice(ary, ind):
if type(ind) is not slice: return
if type(ary) is not list: return
start = ind.start
stop = ind.stop
len_list = len(ary)
if start < 0:
if start <= -len_list:
return ary[0:stop]
else:
startovf = start >= len_list
endovf = stop >= len_list
if startovf or endovf:
if startovf and endovf:
return []
elif startovf:
return ary[startovf-len_list:stop]
elif endovf:
return ary[start:-1]
def ap(f, *args, **kwargs):
if len(args) == 0: return f(**kwargs)
fn = f; argc = len(args)
argp = 0
while callable(fn) and argp < argc:
if fn.func_code.co_argcount == 1:
fn = fn.__call__(args[argp], **kwargs)
else:
fn = fn.__call__(*args, **kwargs)
argp += 1
return fn
def flip(fn): return lambda x: lambda y: fn(y, x)
def flipL(fn): return lambda x: lambda y: fn(y)(x)
def curry2(fn):
return lambda x: lambda y: fn(x, y)
def curry2L(fn):
return lambda x: lambda y: fn(x)(y)
def compose(f, g): return lambda x: f(g(x))
def concat_stream(gen):
strn = str()
try:
while True: strn += gen.next()
except StopIteration:
pass
return strn
def stream_join(xs, ys):
xss = iter(xs)
yss = iter(ys)
while True:
yield xss.next()
yield yss.next()
def _globalq(name):
_globals = globals()
if name in _globals: return _globals[name]
else: return None
def uh(obj, do = lambda x: x):
if obj is not None: return do(obj)
else: return None
def coerce(x, t): return x.__coerce__(t)
def unreachable(): raise RuntimeError('Corrupted state : Impossible code path reached !!!')
def foldl(op):
def foldl_init(val):
def foldl_ls(ls):
def foldl_ac(head = 0):
if head >= len(ls): return val
x = ls[head]
return foldl_init(op(val, x))(ls)(head+1)
return foldl_ac
return foldl_ls
return foldl_init
class Maybe:
def __init__(self, obj):
self.obj = obj
self.is_null = obj is None
def is_none(self):
return self.is_null
def is_any(self):
return not self.null
null = property(is_none)
any = property(is_any)
def must_get(self):
if self.null:
raise AssertionError('Object is None')
else: return self.obj
def get(self):
if self.null: return Nothing
else: return self.obj
def get_or(self, fn_x_obj):
if self.null:
if callable(fn_x_obj): return fn_x_obj()
else: return fn_x_obj
else: return self.obj
def flat_map(self, fn):
result = fn(self.obj) if not self.null else None
return result
def map(self, fn):
return Maybe(self.flat_map(fn))
def __str__(self):
tt = type(self.obj) if self.any else object
typenam = '%s?' % tt.__name__
return '{}({})'.format(typenam, self.obj)
def __or__(self, other):
if self.any: return self
else: return other
def __coerce__(self, newtype):
if self.any:
return self.get().__coerce__(newtype)
else:
raise TypeError('Cannot convert Nothing nullable to type %s' %newtype.__name__)
Nothing = Maybe(None)
Just = Maybe
def maybe(default, fn, may):
if may is Nothing: return default
else: fn(may.get())
identity = lambda x:x
def identitystar(*args): return args
typeof = type
def sizeof(o): return o.__sizeof__()
def doall(*fns):
def do(fns):
filtered = [f for f in fns if callable(f)]
def _ocall():
for f in filtered: f.__call__()
return _ocall
return do(fns)
def fst(tup): return tup[0]
def snd(tup): return tup[1]
def lastindex(xs): return len(xs) -1
def head(xs, off=0): return xs[off]
def tail(xs, off=0): return xs[off+1:lastindex(xs)]
def first_just(ls):
if type(ls) is list:
return foldl(Maybe.__or__)(Nothing)(ls)()
else:
res = filter(Maybe.is_any, ls)
if len(res) is 0: return Nothing
else: return iter(res).next()
class Either:
def __init__(self, a_v = None, b_v = None):
self.a = a_v; self.b = b_v
def is_left(self):
return self.a is not None
def is_right(self):
return self.b is not None
left = property(is_left)
right = property(is_right)
def get_left(self):
return self.a
def get_right(self):
return self.b
l = property(get_left)
r = property(get_right)
def get_left_or(self, fn_x_v):
if self.left:
return self.l
else:
if callable(fn_x_v): return fn_x_v()
else: return fn_x_v
def get_right_or(self, fn_x_v):
if self.right:
return self.r
else:
if callable(fn_x_v): return fn_x_v()
else: return fn_x_v
def _fail_assert(self, name):
def __(): raise AssertionError('Failed to get {} for {}'.format(name, self))
return __
def must_get_left(self):
return self.get_left_or(self._fail_assert('left'))
def must_get_right(self):
return self.get_right_or(self._fail_assert('right'))
def either(self, fl, fr):
if self.right: return fr(self.r)
if self.left: return fl(self.l)
def swap(self):
return self.either(Right, Left)
def map_l(self, fl):
if self.left: return Either(fl(self.l), self.r)
else: return self
def map_r(self, fr):
if self.right: return Either(self.l, b_v=fr(self.r))
else: return self
def flat_map_l(self, fn):
if self.left: return fn(self.l)
else: return self
def flat_map_r(self, fn):
if self.right: return fn(self.r)
else: return self
def __str__(self):
return 'Either<{}, {}>({}|{})'.format(typeof(self.l).__name__, typeof(self.r).__name__, self.l, self.r)
def map(self, fl): return self.map_l(fl)
def flat_map(self, fl): return self.flat_map_l(fl)
def Left(a): return Either(a, None)
def Right(b): return Either(None, b)
def _trimMarks(m, bracel = '<', bracer = '>'):
output = str()
waitClose = False
if len(m) == 0: return output
for i in range(0, len(m)):
char = m[i]
if char == bracel: waitClose = True
if waitClose:
if char == bracer: waitClose = False
else: output += char
return output
def _hexdigit(n): return hex(n)[2:]
| true
| true
|
1c48a657eee6323282e43dcd654c59dc1603a45b
| 5,624
|
py
|
Python
|
api/destination/base.py
|
ziegeer/autocert
|
285df181508573918e280948e51cdd7c65743281
|
[
"MIT"
] | null | null | null |
api/destination/base.py
|
ziegeer/autocert
|
285df181508573918e280948e51cdd7c65743281
|
[
"MIT"
] | null | null | null |
api/destination/base.py
|
ziegeer/autocert
|
285df181508573918e280948e51cdd7c65743281
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
destination.base
'''
import itertools
from pprint import pformat
from attrdict import AttrDict
from utils.format import fmt, pfmt
from utils.exceptions import AutocertError
from app import app
class DestinationConnectivityError(AutocertError):
def __init__(self, dest_ex_pairs):
msg = ''
for dest, ex in dest_ex_pairs:
error = repr(ex)
msg += fmt('{error} when attempting destination {dest}')
super(DestinationConnectivityError, self).__init__(msg)
class DestinationPathError(AutocertError):
def __init__(self, path_or_paths):
message = fmt('error with DestinationBase param path(s) = {path_or_paths}')
super(DestinationPathError, self).__init__(message)
class DestinationDestError(AutocertError):
def __init__(self, dest_or_dests):
message = fmt('error with DestinationBase param dest(s) = {dest_or_dests}')
super(DestinationDestError, self).__init__(message)
class JsonsDontMatchPathsError(AutocertError):
def __init__(self, jsons, paths):
len_jsons = len(jsons) if isinstance(jsons, list) else None
len_paths = len(paths) if isinstance(paths, list) else None
message = fmt('len(jsons) -> {len_jsons} != len(paths) -> {len_paths}; jsons={jsons}, paths={paths}')
super(JsonsDontMatchPathsError, self).__init__(message)
class DestsDontMatchPathsError(AutocertError):
def __init__(self, dests, paths):
len_dests = len(dests) if isinstance(dests, list) else None
len_paths = len(paths) if isinstance(paths, list) else None
message = fmt('len(dests) -> {len_dests} != len(paths) -> {len_paths}; dests={dests}, paths={paths}')
super(DestsDontMatchPathsError, self).__init__(message)
class DestinationBase(object):
def __init__(self, ar, cfg, verbosity):
self.ar = ar
self.cfg = AttrDict(cfg)
self.verbosity = verbosity
def keywords(self, path=None, dest=None, **kw):
if path is None:
raise DestinationPathError(path)
if not dest:
raise DestinationDestError(dest)
cfg = AttrDict(self.cfg[dest])
kw['url'] = str(cfg.baseurl / path)
kw['auth'] = kw.get('auth', cfg.auth)
kw['headers'] = kw.get('headers', {
'Content-Type': 'application/json',
'User-Agent': 'autocert',
})
return kw
def request(self, method, **kw):
self.ar.request(method, **self.keywords(**kw))
def get(self, path=None, dest=None, **kw):
return self.request('GET', path=path, dest=dest, **kw)
def put(self, path=None, dest=None, **kw):
return self.request('PUT', path=path, dest=dest, **kw)
def post(self, path=None, dest=None, **kw):
return self.request('POST', path=path, dest=dest, **kw)
def delete(self, path=None, dest=None, **kw):
return self.request('DELETE', path=path, dest=dest, **kw)
def requests(self, method, paths=None, dests=None, jsons=None, product=True, **kw):
if not paths or not hasattr(paths, '__iter__'):
raise DestinationPathError(paths)
if not dests or not hasattr(dests, '__iter__'):
raise DestinationDestError(dests)
if jsons:
if len(jsons) != len(paths):
raise JsonsDontMatchPathsError(jsons, paths)
if product:
kws = [self.keywords(path=path, dest=dest, json=json, **kw) for (path, json), dest in itertools.product(zip(paths, jsons), dests)]
else:
if len(dests) != len(paths):
raise DestsDontMatchPathsError(dests, paths)
kws = [self.keywords(path=path, dest=dest, json=json, **kw) for (path, json), dest in zip(zip(paths, jsons), dests)]
else:
if product:
kws = [self.keywords(path=path, dest=dest, **kw) for path, dest in itertools.product(paths, dests)]
else:
if len(dests) != len(paths):
raise DestsDontMatchPathsError(dests, paths)
kws = [self.keywords(path=path, dest=dest, **kw) for path, dest in zip(paths, dests)]
app.logger.debug('requests kws =')
app.logger.debug(pformat(kws))
return self.ar.requests(method, *kws)
def gets(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('GET', paths=paths, dests=dests, jsons=jsons, **kw)
def puts(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('PUT', paths=paths, dests=dests, jsons=jsons, **kw)
def posts(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('POST', paths=paths, dests=dests, jsons=jsons, **kw)
def deletes(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('DELETE', paths=paths, dests=dests, jsons=jsons, **kw)
def has_connectivity(self, timeout, *dests):
raise NotImplementedError
def add_destinations(self, cert, *dests, **items):
'''
does this belong here?
'''
cert['destinations'] = cert.get('destinations', {})
for dest in dests:
cert['destinations'][dest] = items
return cert
def fetch_certificates(self, certs, *dests):
raise NotImplementedError
def install_certificates(self, note, certs, *dests):
raise NotImplementedError
def update_certificates(self, certs, *dests):
raise NotImplementedError
def remove_certificates(self, certs, *dests):
raise NotImplementedError
| 39.605634
| 146
| 0.630156
|
import itertools
from pprint import pformat
from attrdict import AttrDict
from utils.format import fmt, pfmt
from utils.exceptions import AutocertError
from app import app
class DestinationConnectivityError(AutocertError):
def __init__(self, dest_ex_pairs):
msg = ''
for dest, ex in dest_ex_pairs:
error = repr(ex)
msg += fmt('{error} when attempting destination {dest}')
super(DestinationConnectivityError, self).__init__(msg)
class DestinationPathError(AutocertError):
def __init__(self, path_or_paths):
message = fmt('error with DestinationBase param path(s) = {path_or_paths}')
super(DestinationPathError, self).__init__(message)
class DestinationDestError(AutocertError):
def __init__(self, dest_or_dests):
message = fmt('error with DestinationBase param dest(s) = {dest_or_dests}')
super(DestinationDestError, self).__init__(message)
class JsonsDontMatchPathsError(AutocertError):
def __init__(self, jsons, paths):
len_jsons = len(jsons) if isinstance(jsons, list) else None
len_paths = len(paths) if isinstance(paths, list) else None
message = fmt('len(jsons) -> {len_jsons} != len(paths) -> {len_paths}; jsons={jsons}, paths={paths}')
super(JsonsDontMatchPathsError, self).__init__(message)
class DestsDontMatchPathsError(AutocertError):
def __init__(self, dests, paths):
len_dests = len(dests) if isinstance(dests, list) else None
len_paths = len(paths) if isinstance(paths, list) else None
message = fmt('len(dests) -> {len_dests} != len(paths) -> {len_paths}; dests={dests}, paths={paths}')
super(DestsDontMatchPathsError, self).__init__(message)
class DestinationBase(object):
def __init__(self, ar, cfg, verbosity):
self.ar = ar
self.cfg = AttrDict(cfg)
self.verbosity = verbosity
def keywords(self, path=None, dest=None, **kw):
if path is None:
raise DestinationPathError(path)
if not dest:
raise DestinationDestError(dest)
cfg = AttrDict(self.cfg[dest])
kw['url'] = str(cfg.baseurl / path)
kw['auth'] = kw.get('auth', cfg.auth)
kw['headers'] = kw.get('headers', {
'Content-Type': 'application/json',
'User-Agent': 'autocert',
})
return kw
def request(self, method, **kw):
self.ar.request(method, **self.keywords(**kw))
def get(self, path=None, dest=None, **kw):
return self.request('GET', path=path, dest=dest, **kw)
def put(self, path=None, dest=None, **kw):
return self.request('PUT', path=path, dest=dest, **kw)
def post(self, path=None, dest=None, **kw):
return self.request('POST', path=path, dest=dest, **kw)
def delete(self, path=None, dest=None, **kw):
return self.request('DELETE', path=path, dest=dest, **kw)
def requests(self, method, paths=None, dests=None, jsons=None, product=True, **kw):
if not paths or not hasattr(paths, '__iter__'):
raise DestinationPathError(paths)
if not dests or not hasattr(dests, '__iter__'):
raise DestinationDestError(dests)
if jsons:
if len(jsons) != len(paths):
raise JsonsDontMatchPathsError(jsons, paths)
if product:
kws = [self.keywords(path=path, dest=dest, json=json, **kw) for (path, json), dest in itertools.product(zip(paths, jsons), dests)]
else:
if len(dests) != len(paths):
raise DestsDontMatchPathsError(dests, paths)
kws = [self.keywords(path=path, dest=dest, json=json, **kw) for (path, json), dest in zip(zip(paths, jsons), dests)]
else:
if product:
kws = [self.keywords(path=path, dest=dest, **kw) for path, dest in itertools.product(paths, dests)]
else:
if len(dests) != len(paths):
raise DestsDontMatchPathsError(dests, paths)
kws = [self.keywords(path=path, dest=dest, **kw) for path, dest in zip(paths, dests)]
app.logger.debug('requests kws =')
app.logger.debug(pformat(kws))
return self.ar.requests(method, *kws)
def gets(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('GET', paths=paths, dests=dests, jsons=jsons, **kw)
def puts(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('PUT', paths=paths, dests=dests, jsons=jsons, **kw)
def posts(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('POST', paths=paths, dests=dests, jsons=jsons, **kw)
def deletes(self, paths=None, dests=None, jsons=None, **kw):
return self.requests('DELETE', paths=paths, dests=dests, jsons=jsons, **kw)
def has_connectivity(self, timeout, *dests):
raise NotImplementedError
def add_destinations(self, cert, *dests, **items):
cert['destinations'] = cert.get('destinations', {})
for dest in dests:
cert['destinations'][dest] = items
return cert
def fetch_certificates(self, certs, *dests):
raise NotImplementedError
def install_certificates(self, note, certs, *dests):
raise NotImplementedError
def update_certificates(self, certs, *dests):
raise NotImplementedError
def remove_certificates(self, certs, *dests):
raise NotImplementedError
| true
| true
|
1c48a6e236fc1adb1d930fd0e269173b1e610f96
| 3,078
|
py
|
Python
|
tango_with_django_project/tango_with_django_project/settings.py
|
2027205T/tangowithdjango
|
b7f0e55ee2807ba6531ee736e94098a4e30a96ed
|
[
"MIT"
] | null | null | null |
tango_with_django_project/tango_with_django_project/settings.py
|
2027205T/tangowithdjango
|
b7f0e55ee2807ba6531ee736e94098a4e30a96ed
|
[
"MIT"
] | null | null | null |
tango_with_django_project/tango_with_django_project/settings.py
|
2027205T/tangowithdjango
|
b7f0e55ee2807ba6531ee736e94098a4e30a96ed
|
[
"MIT"
] | null | null | null |
"""
Django settings for tango_with_django_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'k&en=nkp4c(bz*_h2h15a!c!jf&uiua-fca3_)9l4y7^!(o&fl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = True
TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
TEMPLATE_DIRS = [ TEMPLATE_PATH, ]
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rango',
'registration',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tango_with_django_project.urls'
WSGI_APPLICATION = 'tango_with_django_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_PATH = os.path.join(BASE_DIR,'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media') # Absolute path to the media directory
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
)
# LOGIN_URL = '/rango/login/'
REGISTRATION_OPEN = True # If True, users can register
ACCOUNT_ACTIVATION_DAYS = 7 # One-week activation window; you may, of course, use a different value.
REGISTRATION_AUTO_LOGIN = True # If True, the user will be automatically logged in.
LOGIN_REDIRECT_URL = '/rango/' # The page you want users to arrive at after they successful log in
LOGIN_URL = '/accounts/login/' # The page users are directed to if they are not logged in, and are trying to access pages requiring authentication
| 28.238532
| 147
| 0.74399
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = 'k&en=nkp4c(bz*_h2h15a!c!jf&uiua-fca3_)9l4y7^!(o&fl'
DEBUG = False
TEMPLATE_DEBUG = True
TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
TEMPLATE_DIRS = [ TEMPLATE_PATH, ]
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rango',
'registration',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tango_with_django_project.urls'
WSGI_APPLICATION = 'tango_with_django_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_PATH = os.path.join(BASE_DIR,'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media') # Absolute path to the media directory
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
)
# LOGIN_URL = '/rango/login/'
REGISTRATION_OPEN = True # If True, users can register
ACCOUNT_ACTIVATION_DAYS = 7 # One-week activation window; you may, of course, use a different value.
REGISTRATION_AUTO_LOGIN = True # If True, the user will be automatically logged in.
LOGIN_REDIRECT_URL = '/rango/' # The page you want users to arrive at after they successful log in
LOGIN_URL = '/accounts/login/' # The page users are directed to if they are not logged in, and are trying to access pages requiring authentication
| true
| true
|
1c48a7e46d335ddc978171fea67312868be6e7b2
| 9,721
|
py
|
Python
|
tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py
|
xolve/azure-sdk-for-python
|
9f5baa19c392f77f811d936ee43450e4ea524002
|
[
"MIT"
] | null | null | null |
tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py
|
xolve/azure-sdk-for-python
|
9f5baa19c392f77f811d936ee43450e4ea524002
|
[
"MIT"
] | null | null | null |
tools/azure-sdk-tools/devtools_testutils/proxy_testcase.py
|
xolve/azure-sdk-for-python
|
9f5baa19c392f77f811d936ee43450e4ea524002
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import os
import logging
import requests
import six
import sys
from typing import TYPE_CHECKING
try:
# py3
import urllib.parse as url_parse
except:
# py2
import urlparse as url_parse
import pytest
import subprocess
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
from azure.core.pipeline.policies import ContentDecodePolicy
# the functions we patch
from azure.core.pipeline.transport import RequestsTransport
# the trimming function to clean up incoming arguments to the test function we are wrapping
from azure_devtools.scenario_tests.utilities import trim_kwargs_from_test_function
from .helpers import is_live, is_live_and_not_recording
from .config import PROXY_URL
if TYPE_CHECKING:
from typing import Tuple
# To learn about how to migrate SDK tests to the test proxy, please refer to the migration guide at
# https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/test_proxy_migration_guide.md
# defaults
RECORDING_START_URL = "{}/record/start".format(PROXY_URL)
RECORDING_STOP_URL = "{}/record/stop".format(PROXY_URL)
PLAYBACK_START_URL = "{}/playback/start".format(PROXY_URL)
PLAYBACK_STOP_URL = "{}/playback/stop".format(PROXY_URL)
# we store recording IDs in a module-level variable so that sanitizers can access them
# we map test IDs to recording IDs, rather than storing only the current test's recording ID, for parallelization
this = sys.modules[__name__]
this.recording_ids = {}
def get_recording_id():
test_id = get_test_id()
return this.recording_ids.get(test_id)
def get_test_id():
# type: () -> str
# pytest sets the current running test in an environment variable
# the path to the test can depend on the environment, so we can't assume this is the path from the repo root
setting_value = os.getenv("PYTEST_CURRENT_TEST")
path_to_test = os.path.normpath(setting_value.split(" ")[0])
full_path_to_test = os.path.abspath(path_to_test)
# walk up to the repo root by looking for "sdk" directory or root of file system
path_components = []
head, tail = os.path.split(full_path_to_test)
while tail != "sdk" and tail != "":
path_components.append(tail)
head, tail = os.path.split(head)
# reverse path_components to construct components of path from repo root: [sdk, ..., tests, {test}]
path_components.append("sdk")
path_components.reverse()
for idx, val in enumerate(path_components):
if val.startswith("test"):
path_components.insert(idx + 1, "recordings")
break
return os.sep.join(path_components).replace("::", "").replace("\\", "/")
def start_record_or_playback(test_id):
# type: (str) -> Tuple(str, dict)
"""Sends a request to begin recording or playing back the provided test.
This returns a tuple, (a, b), where a is the recording ID of the test and b is the `variables` dictionary that maps
test variables to values. If no variable dictionary was stored when the test was recorded, b is an empty dictionary.
"""
head_commit = subprocess.check_output(["git", "rev-parse", "HEAD"])
current_sha = head_commit.decode("utf-8").strip()
variables = {} # this stores a dictionary of test variable values that could have been stored with a recording
if is_live():
result = requests.post(
RECORDING_START_URL,
json={"x-recording-file": test_id},
)
if result.status_code != 200:
message = six.ensure_str(result._content)
raise HttpResponseError(message=message)
recording_id = result.headers["x-recording-id"]
else:
result = requests.post(
PLAYBACK_START_URL,
json={"x-recording-file": test_id},
)
if result.status_code != 200:
message = six.ensure_str(result._content)
raise HttpResponseError(message=message)
try:
recording_id = result.headers["x-recording-id"]
except KeyError as ex:
six.raise_from(ValueError("No recording file found for {}".format(test_id)), ex)
if result.text:
try:
variables = result.json()
except ValueError as ex: # would be a JSONDecodeError on Python 3, which subclasses ValueError
six.raise_from(
ValueError("The response body returned from starting playback did not contain valid JSON"), ex
)
# set recording ID in a module-level variable so that sanitizers can access it
this.recording_ids[test_id] = recording_id
return (recording_id, variables)
def stop_record_or_playback(test_id, recording_id, test_output):
# type: (str, str, dict) -> None
if is_live():
requests.post(
RECORDING_STOP_URL,
headers={
"x-recording-file": test_id,
"x-recording-id": recording_id,
"x-recording-save": "true",
"Content-Type": "application/json"
},
json=test_output or {} # tests don't record successfully unless test_output is a dictionary
)
else:
requests.post(
PLAYBACK_STOP_URL,
headers={"x-recording-id": recording_id},
)
def get_proxy_netloc():
parsed_result = url_parse.urlparse(PROXY_URL)
return {"scheme": parsed_result.scheme, "netloc": parsed_result.netloc}
def transform_request(request, recording_id):
"""Redirect the request to the test proxy, and store the original request URI in a header"""
headers = request.headers
parsed_result = url_parse.urlparse(request.url)
updated_target = parsed_result._replace(**get_proxy_netloc()).geturl()
if headers.get("x-recording-upstream-base-uri", None) is None:
headers["x-recording-upstream-base-uri"] = "{}://{}".format(parsed_result.scheme, parsed_result.netloc)
headers["x-recording-id"] = recording_id
headers["x-recording-mode"] = "record" if is_live() else "playback"
request.url = updated_target
def recorded_by_proxy(test_func):
"""Decorator that redirects network requests to target the azure-sdk-tools test proxy. Use with recorded tests.
For more details and usage examples, refer to
https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/test_proxy_migration_guide.md
"""
def record_wrap(*args, **kwargs):
if sys.version_info.major == 2 and not is_live():
pytest.skip("Playback testing is incompatible with the azure-sdk-tools test proxy on Python 2")
def transform_args(*args, **kwargs):
copied_positional_args = list(args)
request = copied_positional_args[1]
transform_request(request, recording_id)
return tuple(copied_positional_args), kwargs
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
original_transport_func = RequestsTransport.send
def combined_call(*args, **kwargs):
adjusted_args, adjusted_kwargs = transform_args(*args, **kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
# make the x-recording-upstream-base-uri the URL of the request
# this makes the request look like it was made to the original endpoint instead of to the proxy
# without this, things like LROPollers can get broken by polling the wrong endpoint
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.urlparse(result.request.headers["x-recording-upstream-base-uri"])
upstream_uri_dict = {"scheme": upstream_uri.scheme, "netloc": upstream_uri.netloc}
original_target = parsed_result._replace(**upstream_uri_dict).geturl()
result.request.url = original_target
return result
RequestsTransport.send = combined_call
# call the modified function
# we define test_output before invoking the test so the variable is defined in case of an exception
test_output = None
try:
try:
test_output = test_func(*args, variables=variables, **trimmed_kwargs)
except TypeError:
logger = logging.getLogger()
logger.info(
"This test can't accept variables as input. The test method should accept `**kwargs` and/or a "
"`variables` parameter to make use of recorded test variables."
)
test_output = test_func(*args, **trimmed_kwargs)
except ResourceNotFoundError as error:
error_body = ContentDecodePolicy.deserialize_from_http_generics(error.response)
message = error_body.get("message") or error_body.get("Message")
error_with_message = ResourceNotFoundError(message=message, response=error.response)
six.raise_from(error_with_message, error)
finally:
RequestsTransport.send = original_transport_func
stop_record_or_playback(test_id, recording_id, test_output)
return test_output
return record_wrap
| 40.67364
| 120
| 0.668861
|
import os
import logging
import requests
import six
import sys
from typing import TYPE_CHECKING
try:
import urllib.parse as url_parse
except:
import urlparse as url_parse
import pytest
import subprocess
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
from azure.core.pipeline.policies import ContentDecodePolicy
from azure.core.pipeline.transport import RequestsTransport
from azure_devtools.scenario_tests.utilities import trim_kwargs_from_test_function
from .helpers import is_live, is_live_and_not_recording
from .config import PROXY_URL
if TYPE_CHECKING:
from typing import Tuple
RECORDING_START_URL = "{}/record/start".format(PROXY_URL)
RECORDING_STOP_URL = "{}/record/stop".format(PROXY_URL)
PLAYBACK_START_URL = "{}/playback/start".format(PROXY_URL)
PLAYBACK_STOP_URL = "{}/playback/stop".format(PROXY_URL)
this = sys.modules[__name__]
this.recording_ids = {}
def get_recording_id():
test_id = get_test_id()
return this.recording_ids.get(test_id)
def get_test_id():
# type: () -> str
# pytest sets the current running test in an environment variable
# the path to the test can depend on the environment, so we can't assume this is the path from the repo root
setting_value = os.getenv("PYTEST_CURRENT_TEST")
path_to_test = os.path.normpath(setting_value.split(" ")[0])
full_path_to_test = os.path.abspath(path_to_test)
path_components = []
head, tail = os.path.split(full_path_to_test)
while tail != "sdk" and tail != "":
path_components.append(tail)
head, tail = os.path.split(head)
path_components.append("sdk")
path_components.reverse()
for idx, val in enumerate(path_components):
if val.startswith("test"):
path_components.insert(idx + 1, "recordings")
break
return os.sep.join(path_components).replace("::", "").replace("\\", "/")
def start_record_or_playback(test_id):
head_commit = subprocess.check_output(["git", "rev-parse", "HEAD"])
current_sha = head_commit.decode("utf-8").strip()
variables = {}
if is_live():
result = requests.post(
RECORDING_START_URL,
json={"x-recording-file": test_id},
)
if result.status_code != 200:
message = six.ensure_str(result._content)
raise HttpResponseError(message=message)
recording_id = result.headers["x-recording-id"]
else:
result = requests.post(
PLAYBACK_START_URL,
json={"x-recording-file": test_id},
)
if result.status_code != 200:
message = six.ensure_str(result._content)
raise HttpResponseError(message=message)
try:
recording_id = result.headers["x-recording-id"]
except KeyError as ex:
six.raise_from(ValueError("No recording file found for {}".format(test_id)), ex)
if result.text:
try:
variables = result.json()
except ValueError as ex:
six.raise_from(
ValueError("The response body returned from starting playback did not contain valid JSON"), ex
)
this.recording_ids[test_id] = recording_id
return (recording_id, variables)
def stop_record_or_playback(test_id, recording_id, test_output):
if is_live():
requests.post(
RECORDING_STOP_URL,
headers={
"x-recording-file": test_id,
"x-recording-id": recording_id,
"x-recording-save": "true",
"Content-Type": "application/json"
},
json=test_output or {}
)
else:
requests.post(
PLAYBACK_STOP_URL,
headers={"x-recording-id": recording_id},
)
def get_proxy_netloc():
parsed_result = url_parse.urlparse(PROXY_URL)
return {"scheme": parsed_result.scheme, "netloc": parsed_result.netloc}
def transform_request(request, recording_id):
headers = request.headers
parsed_result = url_parse.urlparse(request.url)
updated_target = parsed_result._replace(**get_proxy_netloc()).geturl()
if headers.get("x-recording-upstream-base-uri", None) is None:
headers["x-recording-upstream-base-uri"] = "{}://{}".format(parsed_result.scheme, parsed_result.netloc)
headers["x-recording-id"] = recording_id
headers["x-recording-mode"] = "record" if is_live() else "playback"
request.url = updated_target
def recorded_by_proxy(test_func):
def record_wrap(*args, **kwargs):
if sys.version_info.major == 2 and not is_live():
pytest.skip("Playback testing is incompatible with the azure-sdk-tools test proxy on Python 2")
def transform_args(*args, **kwargs):
copied_positional_args = list(args)
request = copied_positional_args[1]
transform_request(request, recording_id)
return tuple(copied_positional_args), kwargs
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
original_transport_func = RequestsTransport.send
def combined_call(*args, **kwargs):
adjusted_args, adjusted_kwargs = transform_args(*args, **kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
# make the x-recording-upstream-base-uri the URL of the request
# this makes the request look like it was made to the original endpoint instead of to the proxy
# without this, things like LROPollers can get broken by polling the wrong endpoint
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.urlparse(result.request.headers["x-recording-upstream-base-uri"])
upstream_uri_dict = {"scheme": upstream_uri.scheme, "netloc": upstream_uri.netloc}
original_target = parsed_result._replace(**upstream_uri_dict).geturl()
result.request.url = original_target
return result
RequestsTransport.send = combined_call
# call the modified function
# we define test_output before invoking the test so the variable is defined in case of an exception
test_output = None
try:
try:
test_output = test_func(*args, variables=variables, **trimmed_kwargs)
except TypeError:
logger = logging.getLogger()
logger.info(
"This test can't accept variables as input. The test method should accept `**kwargs` and/or a "
"`variables` parameter to make use of recorded test variables."
)
test_output = test_func(*args, **trimmed_kwargs)
except ResourceNotFoundError as error:
error_body = ContentDecodePolicy.deserialize_from_http_generics(error.response)
message = error_body.get("message") or error_body.get("Message")
error_with_message = ResourceNotFoundError(message=message, response=error.response)
six.raise_from(error_with_message, error)
finally:
RequestsTransport.send = original_transport_func
stop_record_or_playback(test_id, recording_id, test_output)
return test_output
return record_wrap
| true
| true
|
1c48a7ec9e8bcce337e61ad31b2fdf124e12ba99
| 2,489
|
py
|
Python
|
logger.py
|
lvwuyunlifan/crop
|
7392d007a8271ff384c5c66ed5717afbc4172b4d
|
[
"Apache-2.0"
] | null | null | null |
logger.py
|
lvwuyunlifan/crop
|
7392d007a8271ff384c5c66ed5717afbc4172b4d
|
[
"Apache-2.0"
] | null | null | null |
logger.py
|
lvwuyunlifan/crop
|
7392d007a8271ff384c5c66ed5717afbc4172b4d
|
[
"Apache-2.0"
] | null | null | null |
# Code referenced from https://gist.github.com/gyglim/1f8dfb1b5c82627ae3efcfbbadb9f514
import tensorflow as tf
import numpy as np
import scipy.misc
try:
from StringIO import StringIO # Python 2.7
except ImportError:
from io import BytesIO # Python 3.x
class Logger(object):
def __init__(self, log_dir):
"""Create a summary writer logging to log_dir."""
self.writer = tf.summary.FileWriter(log_dir)
def scalar_summary(self, tag, value, step):
"""Log a scalar variable."""
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, simple_value=value)])
self.writer.add_summary(summary, step)
def image_summary(self, tag, images, step):
"""Log a list of images."""
img_summaries = []
for i, img in enumerate(images):
# Write the image to a string
try:
s = StringIO()
except:
s = BytesIO()
scipy.misc.toimage(img).save(s, format="png")
# Create an Image object
img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(),
height=img.shape[0],
width=img.shape[1])
# Create a Summary value
img_summaries.append(tf.Summary.Value(tag='%s/%d' % (tag, i), image=img_sum))
# Create and write Summary
summary = tf.Summary(value=img_summaries)
self.writer.add_summary(summary, step)
def histo_summary(self, tag, values, step, bins=1000):
"""Log a histogram of the tensor of values."""
# Create a histogram using numpy
counts, bin_edges = np.histogram(values, bins=bins)
# Fill the fields of the histogram proto
hist = tf.HistogramProto()
hist.min = float(np.min(values))
hist.max = float(np.max(values))
hist.num = int(np.prod(values.shape))
hist.sum = float(np.sum(values))
hist.sum_squares = float(np.sum(values ** 2))
# Drop the start of the first bin
bin_edges = bin_edges[1:]
# Add bin edges and counts
for edge in bin_edges:
hist.bucket_limit.append(edge)
for c in counts:
hist.bucket.append(c)
# Create and write Summary
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=hist)])
self.writer.add_summary(summary, step)
self.writer.flush()
| 22.223214
| 89
| 0.584974
|
import tensorflow as tf
import numpy as np
import scipy.misc
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO
class Logger(object):
def __init__(self, log_dir):
self.writer = tf.summary.FileWriter(log_dir)
def scalar_summary(self, tag, value, step):
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, simple_value=value)])
self.writer.add_summary(summary, step)
def image_summary(self, tag, images, step):
img_summaries = []
for i, img in enumerate(images):
try:
s = StringIO()
except:
s = BytesIO()
scipy.misc.toimage(img).save(s, format="png")
img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(),
height=img.shape[0],
width=img.shape[1])
img_summaries.append(tf.Summary.Value(tag='%s/%d' % (tag, i), image=img_sum))
summary = tf.Summary(value=img_summaries)
self.writer.add_summary(summary, step)
def histo_summary(self, tag, values, step, bins=1000):
counts, bin_edges = np.histogram(values, bins=bins)
hist = tf.HistogramProto()
hist.min = float(np.min(values))
hist.max = float(np.max(values))
hist.num = int(np.prod(values.shape))
hist.sum = float(np.sum(values))
hist.sum_squares = float(np.sum(values ** 2))
bin_edges = bin_edges[1:]
for edge in bin_edges:
hist.bucket_limit.append(edge)
for c in counts:
hist.bucket.append(c)
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=hist)])
self.writer.add_summary(summary, step)
self.writer.flush()
| true
| true
|
1c48aac01a148b38c7ba99f01d288cdce7084c4d
| 8,262
|
py
|
Python
|
lib/Crypto/SelfTest/Cipher/test_OpenPGP.py
|
niulinlnc/pycryptodome
|
d902c929fcfdd0ece28e0c2e164a2aae78e3f4a2
|
[
"Unlicense"
] | null | null | null |
lib/Crypto/SelfTest/Cipher/test_OpenPGP.py
|
niulinlnc/pycryptodome
|
d902c929fcfdd0ece28e0c2e164a2aae78e3f4a2
|
[
"Unlicense"
] | null | null | null |
lib/Crypto/SelfTest/Cipher/test_OpenPGP.py
|
niulinlnc/pycryptodome
|
d902c929fcfdd0ece28e0c2e164a2aae78e3f4a2
|
[
"Unlicense"
] | null | null | null |
# ===================================================================
#
# Copyright (c) 2015, Legrandin <helderijs@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
import unittest
from binascii import unhexlify
from Crypto.SelfTest.st_common import list_test_cases
from Crypto.Util.py3compat import tobytes
from Crypto.Cipher import AES, DES3, DES
from Crypto.Hash import SHAKE128
def get_tag_random(tag, length):
return SHAKE128.new(data=tobytes(tag)).read(length)
from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests
class OpenPGPTests(BlockChainingTests):
aes_mode = AES.MODE_OPENPGP
des3_mode = DES3.MODE_OPENPGP
# Redefine test_unaligned_data_128/64
key_128 = get_tag_random("key_128", 16)
key_192 = get_tag_random("key_192", 24)
iv_128 = get_tag_random("iv_128", 16)
iv_64 = get_tag_random("iv_64", 8)
data_128 = get_tag_random("data_128", 16)
def test_loopback_128(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
pt = get_tag_random("plaintext", 16 * 100)
ct = cipher.encrypt(pt)
eiv, ct = ct[:18], ct[18:]
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
pt2 = cipher.decrypt(ct)
self.assertEqual(pt, pt2)
def test_loopback_64(self):
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64)
pt = get_tag_random("plaintext", 8 * 100)
ct = cipher.encrypt(pt)
eiv, ct = ct[:10], ct[10:]
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, eiv)
pt2 = cipher.decrypt(ct)
self.assertEqual(pt, pt2)
def test_IV_iv_attributes(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
eiv = cipher.encrypt(b"")
self.assertEqual(cipher.iv, self.iv_128)
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
self.assertEqual(cipher.iv, self.iv_128)
def test_null_encryption_decryption(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
eiv = cipher.encrypt(b"")
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
self.assertEqual(cipher.decrypt(b""), b"")
def test_either_encrypt_or_decrypt(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
eiv = cipher.encrypt(b"")
self.assertRaises(TypeError, cipher.decrypt, b"")
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
cipher.decrypt(b"")
self.assertRaises(TypeError, cipher.encrypt, b"")
def test_unaligned_data_128(self):
plaintexts = [ b"7777777" ] * 100
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
ciphertexts = [ cipher.encrypt(x) for x in plaintexts ]
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts)))
def test_unaligned_data_64(self):
plaintexts = [ b"7777777" ] * 100
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64)
ciphertexts = [ cipher.encrypt(x) for x in plaintexts ]
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64)
self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts)))
class TestVectors(unittest.TestCase):
def test_aes(self):
# The following test vectors have been generated with gpg v1.4.0.
# The command line used was:
#
# gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \
# --disable-mdc --s2k-mode 0 --output ct pt
#
# As result, the content of the file 'pt' is encrypted with a key derived
# from 'secret_passphrase' and written to file 'ct'.
# Test vectors must be extracted from 'ct', which is a collection of
# TLVs (see RFC4880 for all details):
# - the encrypted data (with the encrypted IV as prefix) is the payload
# of the TLV with tag 9 (Symmetrical Encrypted Data Packet).
# This is the ciphertext in the test vector.
# - inside the encrypted part, there is a further layer of TLVs. One must
# look for tag 11 (Literal Data Packet); in its payload, after a short
# but time dependent header, there is the content of file 'pt'.
# In the test vector, the plaintext is the complete set of TLVs that gets
# encrypted. It is not just the content of 'pt'.
# - the key is the leftmost 16 bytes of the SHA1 digest of the password.
# The test vector contains such shortened digest.
#
# Note that encryption uses a clear IV, and decryption an encrypted IV
plaintext = 'ac18620270744fb4f647426c61636b4361745768697465436174'
ciphertext = 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb'
key = '5baa61e4c9b93f3f0682250b6cf8331b'
iv = '3d7d3e62282add7eb203eeba5c800733'
encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef'
plaintext = unhexlify(plaintext)
ciphertext = unhexlify(ciphertext)
key = unhexlify(key)
iv = unhexlify(iv)
encrypted_iv = unhexlify(encrypted_iv)
cipher = AES.new(key, AES.MODE_OPENPGP, iv)
ct = cipher.encrypt(plaintext)
self.assertEqual(ct[:18], encrypted_iv)
self.assertEqual(ct[18:], ciphertext)
cipher = AES.new(key, AES.MODE_OPENPGP, encrypted_iv)
pt = cipher.decrypt(ciphertext)
self.assertEqual(pt, plaintext)
def test_des3(self):
# The following test vectors have been generated with gpg v1.4.0.
# The command line used was:
# gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \
# --disable-mdc --s2k-mode 0 --output ct pt
# For an explanation, see test_AES.py .
plaintext = 'ac1762037074324fb53ba3596f73656d69746556616c6c6579'
ciphertext = '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d'
key = '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2'
iv='cd47e2afb8b7e4b0'
encrypted_iv='6a7eef0b58050e8b904a'
plaintext = unhexlify(plaintext)
ciphertext = unhexlify(ciphertext)
key = unhexlify(key)
iv = unhexlify(iv)
encrypted_iv = unhexlify(encrypted_iv)
cipher = DES3.new(key, DES3.MODE_OPENPGP, iv)
ct = cipher.encrypt(plaintext)
self.assertEqual(ct[:10], encrypted_iv)
self.assertEqual(ct[10:], ciphertext)
cipher = DES3.new(key, DES3.MODE_OPENPGP, encrypted_iv)
pt = cipher.decrypt(ciphertext)
self.assertEqual(pt, plaintext)
def get_tests(config={}):
tests = []
tests += list_test_cases(OpenPGPTests)
tests += list_test_cases(TestVectors)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
| 39.913043
| 85
| 0.671508
|
import unittest
from binascii import unhexlify
from Crypto.SelfTest.st_common import list_test_cases
from Crypto.Util.py3compat import tobytes
from Crypto.Cipher import AES, DES3, DES
from Crypto.Hash import SHAKE128
def get_tag_random(tag, length):
return SHAKE128.new(data=tobytes(tag)).read(length)
from Crypto.SelfTest.Cipher.test_CBC import BlockChainingTests
class OpenPGPTests(BlockChainingTests):
aes_mode = AES.MODE_OPENPGP
des3_mode = DES3.MODE_OPENPGP
key_128 = get_tag_random("key_128", 16)
key_192 = get_tag_random("key_192", 24)
iv_128 = get_tag_random("iv_128", 16)
iv_64 = get_tag_random("iv_64", 8)
data_128 = get_tag_random("data_128", 16)
def test_loopback_128(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
pt = get_tag_random("plaintext", 16 * 100)
ct = cipher.encrypt(pt)
eiv, ct = ct[:18], ct[18:]
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
pt2 = cipher.decrypt(ct)
self.assertEqual(pt, pt2)
def test_loopback_64(self):
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64)
pt = get_tag_random("plaintext", 8 * 100)
ct = cipher.encrypt(pt)
eiv, ct = ct[:10], ct[10:]
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, eiv)
pt2 = cipher.decrypt(ct)
self.assertEqual(pt, pt2)
def test_IV_iv_attributes(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
eiv = cipher.encrypt(b"")
self.assertEqual(cipher.iv, self.iv_128)
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
self.assertEqual(cipher.iv, self.iv_128)
def test_null_encryption_decryption(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
eiv = cipher.encrypt(b"")
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
self.assertEqual(cipher.decrypt(b""), b"")
def test_either_encrypt_or_decrypt(self):
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
eiv = cipher.encrypt(b"")
self.assertRaises(TypeError, cipher.decrypt, b"")
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, eiv)
cipher.decrypt(b"")
self.assertRaises(TypeError, cipher.encrypt, b"")
def test_unaligned_data_128(self):
plaintexts = [ b"7777777" ] * 100
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
ciphertexts = [ cipher.encrypt(x) for x in plaintexts ]
cipher = AES.new(self.key_128, AES.MODE_OPENPGP, self.iv_128)
self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts)))
def test_unaligned_data_64(self):
plaintexts = [ b"7777777" ] * 100
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64)
ciphertexts = [ cipher.encrypt(x) for x in plaintexts ]
cipher = DES3.new(self.key_192, DES3.MODE_OPENPGP, self.iv_64)
self.assertEqual(b"".join(ciphertexts), cipher.encrypt(b"".join(plaintexts)))
class TestVectors(unittest.TestCase):
def test_aes(self):
plaintext = 'ac18620270744fb4f647426c61636b4361745768697465436174'
ciphertext = 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb'
key = '5baa61e4c9b93f3f0682250b6cf8331b'
iv = '3d7d3e62282add7eb203eeba5c800733'
encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef'
plaintext = unhexlify(plaintext)
ciphertext = unhexlify(ciphertext)
key = unhexlify(key)
iv = unhexlify(iv)
encrypted_iv = unhexlify(encrypted_iv)
cipher = AES.new(key, AES.MODE_OPENPGP, iv)
ct = cipher.encrypt(plaintext)
self.assertEqual(ct[:18], encrypted_iv)
self.assertEqual(ct[18:], ciphertext)
cipher = AES.new(key, AES.MODE_OPENPGP, encrypted_iv)
pt = cipher.decrypt(ciphertext)
self.assertEqual(pt, plaintext)
def test_des3(self):
plaintext = 'ac1762037074324fb53ba3596f73656d69746556616c6c6579'
ciphertext = '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d'
key = '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2'
iv='cd47e2afb8b7e4b0'
encrypted_iv='6a7eef0b58050e8b904a'
plaintext = unhexlify(plaintext)
ciphertext = unhexlify(ciphertext)
key = unhexlify(key)
iv = unhexlify(iv)
encrypted_iv = unhexlify(encrypted_iv)
cipher = DES3.new(key, DES3.MODE_OPENPGP, iv)
ct = cipher.encrypt(plaintext)
self.assertEqual(ct[:10], encrypted_iv)
self.assertEqual(ct[10:], ciphertext)
cipher = DES3.new(key, DES3.MODE_OPENPGP, encrypted_iv)
pt = cipher.decrypt(ciphertext)
self.assertEqual(pt, plaintext)
def get_tests(config={}):
tests = []
tests += list_test_cases(OpenPGPTests)
tests += list_test_cases(TestVectors)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
| true
| true
|
1c48ac07fa865efa223f340c554937bd46c9c70e
| 7,872
|
py
|
Python
|
server/openslides/agenda/config_variables.py
|
jonathankeuser/OpenSlides
|
7ea9a9a6a1fe1e9f4c6d25c27f79e5a60858e2f6
|
[
"MIT"
] | null | null | null |
server/openslides/agenda/config_variables.py
|
jonathankeuser/OpenSlides
|
7ea9a9a6a1fe1e9f4c6d25c27f79e5a60858e2f6
|
[
"MIT"
] | null | null | null |
server/openslides/agenda/config_variables.py
|
jonathankeuser/OpenSlides
|
7ea9a9a6a1fe1e9f4c6d25c27f79e5a60858e2f6
|
[
"MIT"
] | null | null | null |
from django.core.validators import MaxLengthValidator, MinValueValidator
from openslides.core.config import ConfigVariable
def get_config_variables():
"""
Generator which yields all config variables of this app.
It has to be evaluated during app loading (see apps.py).
"""
# General
yield ConfigVariable(
name="agenda_start_event_date_time",
default_value=None,
input_type="datetimepicker",
label="Begin of event",
help_text="Input format: DD.MM.YYYY HH:MM",
weight=200,
group="Agenda",
)
yield ConfigVariable(
name="agenda_show_subtitle",
default_value=False,
input_type="boolean",
label="Show subtitles in the agenda",
weight=201,
group="Agenda",
)
# Numbering
yield ConfigVariable(
name="agenda_enable_numbering",
label="Enable numbering for agenda items",
input_type="boolean",
default_value=True,
weight=205,
group="Agenda",
subgroup="Numbering",
)
yield ConfigVariable(
name="agenda_number_prefix",
default_value="",
label="Numbering prefix for agenda items",
help_text="This prefix will be set if you run the automatic agenda numbering.",
weight=206,
group="Agenda",
subgroup="Numbering",
validators=(MaxLengthValidator(20),),
)
yield ConfigVariable(
name="agenda_numeral_system",
default_value="arabic",
input_type="choice",
label="Numeral system for agenda items",
choices=(
{"value": "arabic", "display_name": "Arabic"},
{"value": "roman", "display_name": "Roman"},
),
weight=207,
group="Agenda",
subgroup="Numbering",
)
# Visibility
yield ConfigVariable(
name="agenda_item_creation",
label="Add to agenda",
default_value="default_yes",
input_type="choice",
choices=(
{"value": "always", "display_name": "Always"},
{"value": "never", "display_name": "Never"},
{"value": "default_yes", "display_name": "Ask, default yes"},
{"value": "default_no", "display_name": "Ask, default no"},
),
weight=210,
group="Agenda",
subgroup="Visibility",
)
yield ConfigVariable(
name="agenda_new_items_default_visibility",
default_value="2",
input_type="choice",
choices=(
{"value": "1", "display_name": "Public item"},
{"value": "2", "display_name": "Internal item"},
{"value": "3", "display_name": "Hidden item"},
),
label="Default visibility for new agenda items (except topics)",
weight=211,
group="Agenda",
subgroup="Visibility",
)
yield ConfigVariable(
name="agenda_hide_internal_items_on_projector",
default_value=True,
input_type="boolean",
label="Hide internal items when projecting subitems",
weight=212,
group="Agenda",
subgroup="Visibility",
)
# List of speakers
yield ConfigVariable(
name="agenda_show_last_speakers",
default_value=0,
input_type="integer",
label="Number of last speakers to be shown on the projector",
weight=220,
group="Agenda",
subgroup="List of speakers",
validators=(MinValueValidator(0),),
)
yield ConfigVariable(
name="agenda_show_next_speakers",
default_value=-1,
input_type="integer",
label="Number of the next speakers to be shown on the projector",
help_text="Enter number of the next shown speakers. Choose -1 to show all next speakers.",
weight=222,
group="Agenda",
subgroup="List of speakers",
validators=(MinValueValidator(-1),),
)
yield ConfigVariable(
name="agenda_countdown_warning_time",
default_value=0,
input_type="integer",
label="Show orange countdown in the last x seconds of speaking time",
help_text="Enter duration in seconds. Choose 0 to disable warning color.",
weight=224,
group="Agenda",
subgroup="List of speakers",
validators=(MinValueValidator(0),),
)
yield ConfigVariable(
name="projector_default_countdown",
default_value=60,
input_type="integer",
label="Predefined seconds of new countdowns",
weight=226,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_enable_global_list_of_speakers",
default_value=False,
input_type="boolean",
label="Enable global list of speakers",
weight=227,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_global_list_of_speakers",
default_value=1,
input_type="integer",
label="ID of globally used list of speakers",
weight=227,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_couple_countdown_and_speakers",
default_value=True,
input_type="boolean",
label="Couple countdown with the list of speakers",
help_text="[Begin speech] starts the countdown, [End speech] stops the countdown.",
weight=228,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_enable_point_of_order_speakers",
default_value=False,
input_type="boolean",
label="Enable points of order",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_order_groups",
default_value=[],
input_type="groups",
label="Default groups with rights to request points of order",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_orders_project",
default_value=False,
input_type="boolean",
label="Project point of order as message to projector",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_orders",
default_value='["Sofortige Abstimmung","Begrenzung der Redezeit","..."]',
label="Point of order requests in the format of a JSON-Array",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_order_projectors",
default_value="[1]",
label="Projectors to project point of orders to in the format of a JSON-Array",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_hide_amount_of_speakers",
default_value=False,
input_type="boolean",
label="Hide the amount of speakers in subtitle of list of speakers slide",
weight=230,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_present_speakers_only",
default_value=False,
input_type="boolean",
label="Only present participants can be added to the list of speakers",
weight=232,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_show_first_contribution",
default_value=False,
input_type="boolean",
label="Show hint »first speech« in the list of speakers management view",
weight=234,
group="Agenda",
subgroup="List of speakers",
)
| 29.263941
| 98
| 0.603786
|
from django.core.validators import MaxLengthValidator, MinValueValidator
from openslides.core.config import ConfigVariable
def get_config_variables():
yield ConfigVariable(
name="agenda_start_event_date_time",
default_value=None,
input_type="datetimepicker",
label="Begin of event",
help_text="Input format: DD.MM.YYYY HH:MM",
weight=200,
group="Agenda",
)
yield ConfigVariable(
name="agenda_show_subtitle",
default_value=False,
input_type="boolean",
label="Show subtitles in the agenda",
weight=201,
group="Agenda",
)
yield ConfigVariable(
name="agenda_enable_numbering",
label="Enable numbering for agenda items",
input_type="boolean",
default_value=True,
weight=205,
group="Agenda",
subgroup="Numbering",
)
yield ConfigVariable(
name="agenda_number_prefix",
default_value="",
label="Numbering prefix for agenda items",
help_text="This prefix will be set if you run the automatic agenda numbering.",
weight=206,
group="Agenda",
subgroup="Numbering",
validators=(MaxLengthValidator(20),),
)
yield ConfigVariable(
name="agenda_numeral_system",
default_value="arabic",
input_type="choice",
label="Numeral system for agenda items",
choices=(
{"value": "arabic", "display_name": "Arabic"},
{"value": "roman", "display_name": "Roman"},
),
weight=207,
group="Agenda",
subgroup="Numbering",
)
yield ConfigVariable(
name="agenda_item_creation",
label="Add to agenda",
default_value="default_yes",
input_type="choice",
choices=(
{"value": "always", "display_name": "Always"},
{"value": "never", "display_name": "Never"},
{"value": "default_yes", "display_name": "Ask, default yes"},
{"value": "default_no", "display_name": "Ask, default no"},
),
weight=210,
group="Agenda",
subgroup="Visibility",
)
yield ConfigVariable(
name="agenda_new_items_default_visibility",
default_value="2",
input_type="choice",
choices=(
{"value": "1", "display_name": "Public item"},
{"value": "2", "display_name": "Internal item"},
{"value": "3", "display_name": "Hidden item"},
),
label="Default visibility for new agenda items (except topics)",
weight=211,
group="Agenda",
subgroup="Visibility",
)
yield ConfigVariable(
name="agenda_hide_internal_items_on_projector",
default_value=True,
input_type="boolean",
label="Hide internal items when projecting subitems",
weight=212,
group="Agenda",
subgroup="Visibility",
)
yield ConfigVariable(
name="agenda_show_last_speakers",
default_value=0,
input_type="integer",
label="Number of last speakers to be shown on the projector",
weight=220,
group="Agenda",
subgroup="List of speakers",
validators=(MinValueValidator(0),),
)
yield ConfigVariable(
name="agenda_show_next_speakers",
default_value=-1,
input_type="integer",
label="Number of the next speakers to be shown on the projector",
help_text="Enter number of the next shown speakers. Choose -1 to show all next speakers.",
weight=222,
group="Agenda",
subgroup="List of speakers",
validators=(MinValueValidator(-1),),
)
yield ConfigVariable(
name="agenda_countdown_warning_time",
default_value=0,
input_type="integer",
label="Show orange countdown in the last x seconds of speaking time",
help_text="Enter duration in seconds. Choose 0 to disable warning color.",
weight=224,
group="Agenda",
subgroup="List of speakers",
validators=(MinValueValidator(0),),
)
yield ConfigVariable(
name="projector_default_countdown",
default_value=60,
input_type="integer",
label="Predefined seconds of new countdowns",
weight=226,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_enable_global_list_of_speakers",
default_value=False,
input_type="boolean",
label="Enable global list of speakers",
weight=227,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_global_list_of_speakers",
default_value=1,
input_type="integer",
label="ID of globally used list of speakers",
weight=227,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_couple_countdown_and_speakers",
default_value=True,
input_type="boolean",
label="Couple countdown with the list of speakers",
help_text="[Begin speech] starts the countdown, [End speech] stops the countdown.",
weight=228,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_enable_point_of_order_speakers",
default_value=False,
input_type="boolean",
label="Enable points of order",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_order_groups",
default_value=[],
input_type="groups",
label="Default groups with rights to request points of order",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_orders_project",
default_value=False,
input_type="boolean",
label="Project point of order as message to projector",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_orders",
default_value='["Sofortige Abstimmung","Begrenzung der Redezeit","..."]',
label="Point of order requests in the format of a JSON-Array",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_point_of_order_projectors",
default_value="[1]",
label="Projectors to project point of orders to in the format of a JSON-Array",
weight=229,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_hide_amount_of_speakers",
default_value=False,
input_type="boolean",
label="Hide the amount of speakers in subtitle of list of speakers slide",
weight=230,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_present_speakers_only",
default_value=False,
input_type="boolean",
label="Only present participants can be added to the list of speakers",
weight=232,
group="Agenda",
subgroup="List of speakers",
)
yield ConfigVariable(
name="agenda_show_first_contribution",
default_value=False,
input_type="boolean",
label="Show hint »first speech« in the list of speakers management view",
weight=234,
group="Agenda",
subgroup="List of speakers",
)
| true
| true
|
1c48ae2800ba9d5636b79fa22e0a6f1b12416057
| 13,480
|
py
|
Python
|
docassemble_webapp/docassemble/webapp/file_access.py
|
silexsistemas/docassemble
|
ffde6eef64e09bfbdab599f06f8dd6499ec44f09
|
[
"MIT"
] | 1
|
2019-12-29T22:33:48.000Z
|
2019-12-29T22:33:48.000Z
|
docassemble_webapp/docassemble/webapp/file_access.py
|
silexsistemas/docassemble
|
ffde6eef64e09bfbdab599f06f8dd6499ec44f09
|
[
"MIT"
] | 20
|
2020-03-24T18:20:54.000Z
|
2022-03-12T00:19:20.000Z
|
docassemble_webapp/docassemble/webapp/file_access.py
|
silexsistemas/docassemble
|
ffde6eef64e09bfbdab599f06f8dd6499ec44f09
|
[
"MIT"
] | null | null | null |
from docassemble.base.logger import logmessage
import re
import os
import PyPDF2
import tempfile
import importlib
try:
from urllib.request import urlopen, Request
except ImportError:
from urllib2 import urlopen, Request
import mimetypes
from PIL import Image
import xml.etree.ElementTree as ET
import docassemble.base.functions
from docassemble.webapp.core.models import Uploads
from docassemble.webapp.files import SavedFile, get_ext_and_mimetype
from flask_login import current_user
from sqlalchemy import or_, and_
import docassemble.base.config
from io import open
from six import text_type
import sys
from docassemble.base.generate_key import random_lower_string
import docassemble.webapp.cloud
cloud = docassemble.webapp.cloud.get_cloud()
def url_if_exists(file_reference, **kwargs):
parts = file_reference.split(":")
from flask import url_for
base_url = url_for('rootindex', _external=kwargs.get('_external', False)).rstrip('/')
if len(parts) == 2:
if cloud and docassemble.base.config.daconfig.get('use cloud urls', False):
m = re.search(r'^docassemble.playground([0-9]+)$', parts[0])
if m:
user_id = m.group(1)
if re.search(r'^data/sources/', parts[1]):
section = 'playgroundsources'
filename = re.sub(r'^data/sources/', '', parts[1])
filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', filename)
key = str(section) + '/' + str(user_id) + '/' + filename
cloud_key = cloud.get_key(key)
if cloud_key.does_exist:
if not kwargs.get('inline', False):
return cloud_key.generate_url(3600, display_filename=filename)
else:
return cloud_key.generate_url(3600)
return None
section = 'playgroundstatic'
filename = re.sub(r'^data/static/', '', parts[1])
version_parameter = get_version_parameter(parts[0])
return base_url + '/packagestatic/' + parts[0] + '/' + re.sub(r'^data/static/', '', parts[1]) + version_parameter
the_path = docassemble.base.functions.static_filename_path(file_reference)
if the_path is None or not os.path.isfile(the_path):
return None
version_parameter = get_version_parameter(parts[0])
return base_url + '/packagestatic/' + parts[0] + '/' + re.sub(r'^data/static/', '', parts[1]) + version_parameter
return None
def get_version_parameter(package):
try:
return '?v=' + text_type(importlib.import_module(package).__version__)
except:
if package.startswith('docassemble.playground'):
return '?v=' + random_lower_string(6)
return ''
def reference_exists(file_reference):
if cloud:
parts = file_reference.split(":")
if len(parts) == 2:
m = re.search(r'^docassemble.playground([0-9]+)$', parts[0])
if m:
user_id = m.group(1)
if re.search(r'^data/sources/', parts[1]):
section = 'playgroundsources'
filename = re.sub(r'^data/sources/', '', parts[1])
else:
section = 'playgroundstatic'
filename = re.sub(r'^data/static/', '', parts[1])
filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', filename)
key = str(section) + '/' + str(user_id) + '/' + filename
cloud_key = cloud.get_key(key)
if cloud_key.does_exist:
return True
return False
the_path = docassemble.base.functions.static_filename_path(file_reference)
if the_path is None or not os.path.isfile(the_path):
#logmessage("Returning false")
return False
#logmessage("Returning true because path is " + str(the_path))
return True
def get_info_from_file_reference(file_reference, **kwargs):
#sys.stderr.write('file reference is ' + str(file_reference) + "\n")
#logmessage('file reference is ' + str(file_reference))
if 'convert' in kwargs:
convert = kwargs['convert']
else:
convert = None
if 'privileged' in kwargs:
privileged = kwargs['privileged']
else:
privileged = None
has_info = False
if re.search(r'^[0-9]+$', str(file_reference)):
if 'uids' in kwargs:
uids = kwargs['uids']
else:
uids = None
if uids is None or len(uids) == 0:
new_uid = docassemble.base.functions.get_uid()
if new_uid is not None:
uids = [new_uid]
else:
uids = []
if 'filename' in kwargs:
result = get_info_from_file_number(int(file_reference), privileged=privileged, filename=kwargs['filename'], uids=uids)
else:
result = get_info_from_file_number(int(file_reference), privileged=privileged, uids=uids)
if 'fullpath' not in result:
result['fullpath'] = None
has_info = True
elif re.search(r'^https?://', str(file_reference)):
#logmessage("get_info_from_file_reference: " + str(file_reference) + " is a URL")
possible_filename = re.sub(r'.*/', '', file_reference)
if possible_filename == '':
possible_filename = 'index.html'
if re.search(r'\.', possible_filename):
(possible_ext, possible_mimetype) = get_ext_and_mimetype(possible_filename)
possible_ext = re.sub(r'[^A-Za-z0-9\.].*', '', possible_ext)
#logmessage("get_info_from_file_reference: starting with " + str(possible_ext) + " and " + str(possible_mimetype))
else:
possible_ext = 'txt'
possible_mimetype = 'text/plain'
result = dict()
temp_file = tempfile.NamedTemporaryFile(prefix="datemp", suffix='.' + possible_ext, delete=False)
req = Request(file_reference, headers={'User-Agent' : docassemble.base.config.daconfig.get('user agent', 'curl/7.64.0')})
response = urlopen(req)
temp_file.write(response.read())
#(local_filename, headers) = urllib.urlretrieve(file_reference)
result['fullpath'] = temp_file.name
try:
#result['mimetype'] = headers.gettype()
result['mimetype'] = response.headers['Content-Type']
#logmessage("get_info_from_file_reference: mimetype is " + str(result['mimetype']))
except Exception as errmess:
logmessage("get_info_from_file_reference: could not get mimetype from headers")
result['mimetype'] = possible_mimetype
result['extension'] = possible_ext
if 'extension' not in result:
#logmessage("get_info_from_file_reference: extension not in result")
result['extension'] = re.sub(r'^\.', '', mimetypes.guess_extension(result['mimetype']))
#logmessage("get_info_from_file_reference: extension is " + str(result['extension']))
if re.search(r'\.', possible_filename):
result['filename'] = possible_filename
else:
result['filename'] = possible_filename + '.' + result['extension']
path_parts = os.path.splitext(result['fullpath'])
result['path'] = path_parts[0]
has_info = True
#logmessage("get_info_from_file_reference: downloaded to " + str(result['fullpath']))
else:
#logmessage(str(file_reference) + " is not a URL")
result = dict()
question = kwargs.get('question', None)
folder = kwargs.get('folder', None)
the_package = None
parts = file_reference.split(':')
if len(parts) == 1:
the_package = None
if question is not None:
the_package = question.from_source.package
if the_package is None:
the_package = docassemble.base.functions.get_current_package()
if folder is None:
m = re.search(r'^data/(templates|sources|static)/(.*)', file_reference)
if m:
folder = m.group(1)
file_reference = m.group(2)
if folder is not None and not re.search(r'/', file_reference):
file_reference = 'data/' + str(folder) + '/' + file_reference
if the_package is not None:
#logmessage("package is " + str(the_package))
file_reference = the_package + ':' + file_reference
else:
#logmessage("package was null")
file_reference = 'docassemble.base:' + file_reference
if the_package is not None:
result['package'] = the_package
elif len(parts) == 2:
result['package'] = parts[0]
result['fullpath'] = docassemble.base.functions.static_filename_path(file_reference)
#logmessage("path is " + str(result['fullpath']))
if result['fullpath'] is not None: #os.path.isfile(result['fullpath'])
if not has_info:
result['filename'] = os.path.basename(result['fullpath'])
ext_type, result['mimetype'] = get_ext_and_mimetype(result['fullpath'])
path_parts = os.path.splitext(result['fullpath'])
result['path'] = path_parts[0]
result['extension'] = path_parts[1].lower()
result['extension'] = re.sub(r'\.', '', result['extension'])
#logmessage("Extension is " + result['extension'])
if convert is not None and result['extension'] in convert:
#logmessage("Converting...")
if os.path.isfile(result['path'] + '.' + convert[result['extension']]):
#logmessage("Found conversion file ")
result['extension'] = convert[result['extension']]
result['fullpath'] = result['path'] + '.' + result['extension']
ext_type, result['mimetype'] = get_ext_and_mimetype(result['fullpath'])
else:
logmessage("Did not find file " + result['path'] + '.' + convert[result['extension']])
return dict()
#logmessage("Full path is " + result['fullpath'])
if os.path.isfile(result['fullpath']) and not has_info:
add_info_about_file(result['fullpath'], result['path'], result)
else:
logmessage("File reference " + str(file_reference) + " DID NOT EXIST.")
return(result)
def add_info_about_file(filename, basename, result):
if result['extension'] == 'pdf':
try:
reader = PyPDF2.PdfFileReader(open(filename, 'rb'))
result['encrypted'] = reader.isEncrypted
result['pages'] = reader.getNumPages()
except:
result['pages'] = 1
elif os.path.isfile(basename + '.pdf'):
try:
reader = PyPDF2.PdfFileReader(open(basename + '.pdf', 'rb'))
result['encrypted'] = reader.isEncrypted
result['pages'] = reader.getNumPages()
except:
result['pages'] = 1
elif result['extension'] in ['png', 'jpg', 'gif']:
im = Image.open(filename)
result['width'], result['height'] = im.size
elif result['extension'] == 'svg':
try:
tree = ET.parse(filename)
root = tree.getroot()
viewBox = root.attrib.get('viewBox', None)
if viewBox is not None:
dimen = viewBox.split(' ')
if len(dimen) == 4:
result['width'] = float(dimen[2]) - float(dimen[0])
result['height'] = float(dimen[3]) - float(dimen[1])
except:
raise Exception("problem reading " + str(filename))
logmessage('add_info_about_file: could not read ' + str(filename))
return
def get_info_from_file_number(file_number, privileged=False, filename=None, uids=None):
if current_user and current_user.is_authenticated and current_user.has_role('admin', 'developer', 'advocate', 'trainer'):
privileged = True
elif uids is None or len(uids) == 0:
new_uid = docassemble.base.functions.get_uid()
if new_uid is not None:
uids = [new_uid]
else:
uids = []
result = dict()
upload = Uploads.query.filter_by(indexno=file_number).first()
if not privileged and upload is not None and upload.private and upload.key not in uids:
upload = None
if upload:
if filename is None:
result['filename'] = upload.filename
else:
result['filename'] = filename
result['extension'], result['mimetype'] = get_ext_and_mimetype(result['filename'])
sf = SavedFile(file_number, extension=result['extension'], fix=True)
result['path'] = sf.path
result['fullpath'] = result['path'] + '.' + result['extension']
result['private'] = upload.private
result['persistent'] = upload.persistent
#logmessage("fullpath is " + str(result['fullpath']))
if 'path' not in result:
logmessage("get_info_from_file_number: path is not in result for " + str(file_number))
return result
final_filename = result['path'] + '.' + result['extension']
if os.path.isfile(final_filename):
add_info_about_file(final_filename, result['path'], result)
# else:
# logmessage("Filename " + final_filename + "did not exist.")
return(result)
| 46.643599
| 130
| 0.594139
|
from docassemble.base.logger import logmessage
import re
import os
import PyPDF2
import tempfile
import importlib
try:
from urllib.request import urlopen, Request
except ImportError:
from urllib2 import urlopen, Request
import mimetypes
from PIL import Image
import xml.etree.ElementTree as ET
import docassemble.base.functions
from docassemble.webapp.core.models import Uploads
from docassemble.webapp.files import SavedFile, get_ext_and_mimetype
from flask_login import current_user
from sqlalchemy import or_, and_
import docassemble.base.config
from io import open
from six import text_type
import sys
from docassemble.base.generate_key import random_lower_string
import docassemble.webapp.cloud
cloud = docassemble.webapp.cloud.get_cloud()
def url_if_exists(file_reference, **kwargs):
parts = file_reference.split(":")
from flask import url_for
base_url = url_for('rootindex', _external=kwargs.get('_external', False)).rstrip('/')
if len(parts) == 2:
if cloud and docassemble.base.config.daconfig.get('use cloud urls', False):
m = re.search(r'^docassemble.playground([0-9]+)$', parts[0])
if m:
user_id = m.group(1)
if re.search(r'^data/sources/', parts[1]):
section = 'playgroundsources'
filename = re.sub(r'^data/sources/', '', parts[1])
filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', filename)
key = str(section) + '/' + str(user_id) + '/' + filename
cloud_key = cloud.get_key(key)
if cloud_key.does_exist:
if not kwargs.get('inline', False):
return cloud_key.generate_url(3600, display_filename=filename)
else:
return cloud_key.generate_url(3600)
return None
section = 'playgroundstatic'
filename = re.sub(r'^data/static/', '', parts[1])
version_parameter = get_version_parameter(parts[0])
return base_url + '/packagestatic/' + parts[0] + '/' + re.sub(r'^data/static/', '', parts[1]) + version_parameter
the_path = docassemble.base.functions.static_filename_path(file_reference)
if the_path is None or not os.path.isfile(the_path):
return None
version_parameter = get_version_parameter(parts[0])
return base_url + '/packagestatic/' + parts[0] + '/' + re.sub(r'^data/static/', '', parts[1]) + version_parameter
return None
def get_version_parameter(package):
try:
return '?v=' + text_type(importlib.import_module(package).__version__)
except:
if package.startswith('docassemble.playground'):
return '?v=' + random_lower_string(6)
return ''
def reference_exists(file_reference):
if cloud:
parts = file_reference.split(":")
if len(parts) == 2:
m = re.search(r'^docassemble.playground([0-9]+)$', parts[0])
if m:
user_id = m.group(1)
if re.search(r'^data/sources/', parts[1]):
section = 'playgroundsources'
filename = re.sub(r'^data/sources/', '', parts[1])
else:
section = 'playgroundstatic'
filename = re.sub(r'^data/static/', '', parts[1])
filename = re.sub(r'[^A-Za-z0-9\-\_\. ]', '', filename)
key = str(section) + '/' + str(user_id) + '/' + filename
cloud_key = cloud.get_key(key)
if cloud_key.does_exist:
return True
return False
the_path = docassemble.base.functions.static_filename_path(file_reference)
if the_path is None or not os.path.isfile(the_path):
return False
return True
def get_info_from_file_reference(file_reference, **kwargs):
if 'convert' in kwargs:
convert = kwargs['convert']
else:
convert = None
if 'privileged' in kwargs:
privileged = kwargs['privileged']
else:
privileged = None
has_info = False
if re.search(r'^[0-9]+$', str(file_reference)):
if 'uids' in kwargs:
uids = kwargs['uids']
else:
uids = None
if uids is None or len(uids) == 0:
new_uid = docassemble.base.functions.get_uid()
if new_uid is not None:
uids = [new_uid]
else:
uids = []
if 'filename' in kwargs:
result = get_info_from_file_number(int(file_reference), privileged=privileged, filename=kwargs['filename'], uids=uids)
else:
result = get_info_from_file_number(int(file_reference), privileged=privileged, uids=uids)
if 'fullpath' not in result:
result['fullpath'] = None
has_info = True
elif re.search(r'^https?://', str(file_reference)):
possible_filename = re.sub(r'.*/', '', file_reference)
if possible_filename == '':
possible_filename = 'index.html'
if re.search(r'\.', possible_filename):
(possible_ext, possible_mimetype) = get_ext_and_mimetype(possible_filename)
possible_ext = re.sub(r'[^A-Za-z0-9\.].*', '', possible_ext)
else:
possible_ext = 'txt'
possible_mimetype = 'text/plain'
result = dict()
temp_file = tempfile.NamedTemporaryFile(prefix="datemp", suffix='.' + possible_ext, delete=False)
req = Request(file_reference, headers={'User-Agent' : docassemble.base.config.daconfig.get('user agent', 'curl/7.64.0')})
response = urlopen(req)
temp_file.write(response.read())
result['fullpath'] = temp_file.name
try:
result['mimetype'] = response.headers['Content-Type']
except Exception as errmess:
logmessage("get_info_from_file_reference: could not get mimetype from headers")
result['mimetype'] = possible_mimetype
result['extension'] = possible_ext
if 'extension' not in result:
result['extension'] = re.sub(r'^\.', '', mimetypes.guess_extension(result['mimetype']))
if re.search(r'\.', possible_filename):
result['filename'] = possible_filename
else:
result['filename'] = possible_filename + '.' + result['extension']
path_parts = os.path.splitext(result['fullpath'])
result['path'] = path_parts[0]
has_info = True
else:
result = dict()
question = kwargs.get('question', None)
folder = kwargs.get('folder', None)
the_package = None
parts = file_reference.split(':')
if len(parts) == 1:
the_package = None
if question is not None:
the_package = question.from_source.package
if the_package is None:
the_package = docassemble.base.functions.get_current_package()
if folder is None:
m = re.search(r'^data/(templates|sources|static)/(.*)', file_reference)
if m:
folder = m.group(1)
file_reference = m.group(2)
if folder is not None and not re.search(r'/', file_reference):
file_reference = 'data/' + str(folder) + '/' + file_reference
if the_package is not None:
file_reference = the_package + ':' + file_reference
else:
file_reference = 'docassemble.base:' + file_reference
if the_package is not None:
result['package'] = the_package
elif len(parts) == 2:
result['package'] = parts[0]
result['fullpath'] = docassemble.base.functions.static_filename_path(file_reference)
if result['fullpath'] is not None:
if not has_info:
result['filename'] = os.path.basename(result['fullpath'])
ext_type, result['mimetype'] = get_ext_and_mimetype(result['fullpath'])
path_parts = os.path.splitext(result['fullpath'])
result['path'] = path_parts[0]
result['extension'] = path_parts[1].lower()
result['extension'] = re.sub(r'\.', '', result['extension'])
if convert is not None and result['extension'] in convert:
if os.path.isfile(result['path'] + '.' + convert[result['extension']]):
result['extension'] = convert[result['extension']]
result['fullpath'] = result['path'] + '.' + result['extension']
ext_type, result['mimetype'] = get_ext_and_mimetype(result['fullpath'])
else:
logmessage("Did not find file " + result['path'] + '.' + convert[result['extension']])
return dict()
if os.path.isfile(result['fullpath']) and not has_info:
add_info_about_file(result['fullpath'], result['path'], result)
else:
logmessage("File reference " + str(file_reference) + " DID NOT EXIST.")
return(result)
def add_info_about_file(filename, basename, result):
if result['extension'] == 'pdf':
try:
reader = PyPDF2.PdfFileReader(open(filename, 'rb'))
result['encrypted'] = reader.isEncrypted
result['pages'] = reader.getNumPages()
except:
result['pages'] = 1
elif os.path.isfile(basename + '.pdf'):
try:
reader = PyPDF2.PdfFileReader(open(basename + '.pdf', 'rb'))
result['encrypted'] = reader.isEncrypted
result['pages'] = reader.getNumPages()
except:
result['pages'] = 1
elif result['extension'] in ['png', 'jpg', 'gif']:
im = Image.open(filename)
result['width'], result['height'] = im.size
elif result['extension'] == 'svg':
try:
tree = ET.parse(filename)
root = tree.getroot()
viewBox = root.attrib.get('viewBox', None)
if viewBox is not None:
dimen = viewBox.split(' ')
if len(dimen) == 4:
result['width'] = float(dimen[2]) - float(dimen[0])
result['height'] = float(dimen[3]) - float(dimen[1])
except:
raise Exception("problem reading " + str(filename))
logmessage('add_info_about_file: could not read ' + str(filename))
return
def get_info_from_file_number(file_number, privileged=False, filename=None, uids=None):
if current_user and current_user.is_authenticated and current_user.has_role('admin', 'developer', 'advocate', 'trainer'):
privileged = True
elif uids is None or len(uids) == 0:
new_uid = docassemble.base.functions.get_uid()
if new_uid is not None:
uids = [new_uid]
else:
uids = []
result = dict()
upload = Uploads.query.filter_by(indexno=file_number).first()
if not privileged and upload is not None and upload.private and upload.key not in uids:
upload = None
if upload:
if filename is None:
result['filename'] = upload.filename
else:
result['filename'] = filename
result['extension'], result['mimetype'] = get_ext_and_mimetype(result['filename'])
sf = SavedFile(file_number, extension=result['extension'], fix=True)
result['path'] = sf.path
result['fullpath'] = result['path'] + '.' + result['extension']
result['private'] = upload.private
result['persistent'] = upload.persistent
if 'path' not in result:
logmessage("get_info_from_file_number: path is not in result for " + str(file_number))
return result
final_filename = result['path'] + '.' + result['extension']
if os.path.isfile(final_filename):
add_info_about_file(final_filename, result['path'], result)
return(result)
| true
| true
|
1c48aff799d42ca79db06796ca34d6954eca5df8
| 4,691
|
py
|
Python
|
tutorials/frontend/from_keras.py
|
Exhorder6/tvm
|
7e3f068373937c0ae08d58f67b84030a027db1c9
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 90
|
2019-01-26T00:38:49.000Z
|
2022-03-11T23:12:34.000Z
|
tutorials/frontend/from_keras.py
|
Exhorder6/tvm
|
7e3f068373937c0ae08d58f67b84030a027db1c9
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 91
|
2019-02-27T00:17:01.000Z
|
2022-02-21T18:08:21.000Z
|
tutorials/frontend/from_keras.py
|
Exhorder6/tvm
|
7e3f068373937c0ae08d58f67b84030a027db1c9
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 41
|
2019-01-28T14:37:03.000Z
|
2022-03-31T03:58:57.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Compile Keras Models
=====================
**Author**: `Yuwei Hu <https://Huyuwei.github.io/>`_
This article is an introductory tutorial to deploy keras models with Relay.
For us to begin with, keras should be installed.
Tensorflow is also required since it's used as the default backend of keras.
A quick solution is to install via pip
.. code-block:: bash
pip install -U keras --user
pip install -U tensorflow --user
or please refer to official site
https://keras.io/#installation
"""
import tvm
from tvm import te
import tvm.relay as relay
from tvm.contrib.download import download_testdata
import keras
import numpy as np
######################################################################
# Load pretrained keras model
# ----------------------------
# We load a pretrained resnet-50 classification model provided by keras.
if tuple(keras.__version__.split(".")) < ("2", "4", "0"):
weights_url = "".join(
[
"https://github.com/fchollet/deep-learning-models/releases/",
"download/v0.2/resnet50_weights_tf_dim_ordering_tf_kernels.h5",
]
)
weights_file = "resnet50_keras_old.h5"
else:
weights_url = "".join(
[
" https://storage.googleapis.com/tensorflow/keras-applications/",
"resnet/resnet50_weights_tf_dim_ordering_tf_kernels.h5",
]
)
weights_file = "resnet50_keras_new.h5"
weights_path = download_testdata(weights_url, weights_file, module="keras")
keras_resnet50 = keras.applications.resnet50.ResNet50(
include_top=True, weights=None, input_shape=(224, 224, 3), classes=1000
)
keras_resnet50.load_weights(weights_path)
######################################################################
# Load a test image
# ------------------
# A single cat dominates the examples!
from PIL import Image
from matplotlib import pyplot as plt
from keras.applications.resnet50 import preprocess_input
img_url = "https://github.com/dmlc/mxnet.js/blob/main/data/cat.png?raw=true"
img_path = download_testdata(img_url, "cat.png", module="data")
img = Image.open(img_path).resize((224, 224))
plt.imshow(img)
plt.show()
# input preprocess
data = np.array(img)[np.newaxis, :].astype("float32")
data = preprocess_input(data).transpose([0, 3, 1, 2])
print("input_1", data.shape)
######################################################################
# Compile the model with Relay
# ----------------------------
# convert the keras model(NHWC layout) to Relay format(NCHW layout).
shape_dict = {"input_1": data.shape}
mod, params = relay.frontend.from_keras(keras_resnet50, shape_dict)
# compile the model
target = "cuda"
dev = tvm.cuda(0)
with tvm.transform.PassContext(opt_level=3):
executor = relay.build_module.create_executor("graph", mod, dev, target)
######################################################################
# Execute on TVM
# ---------------
dtype = "float32"
tvm_out = executor.evaluate()(tvm.nd.array(data.astype(dtype)), **params)
top1_tvm = np.argmax(tvm_out.numpy()[0])
#####################################################################
# Look up synset name
# -------------------
# Look up prediction top 1 index in 1000 class synset.
synset_url = "".join(
[
"https://gist.githubusercontent.com/zhreshold/",
"4d0b62f3d01426887599d4f7ede23ee5/raw/",
"596b27d23537e5a1b5751d2b0481ef172f58b539/",
"imagenet1000_clsid_to_human.txt",
]
)
synset_name = "imagenet1000_clsid_to_human.txt"
synset_path = download_testdata(synset_url, synset_name, module="data")
with open(synset_path) as f:
synset = eval(f.read())
print("Relay top-1 id: {}, class name: {}".format(top1_tvm, synset[top1_tvm]))
# confirm correctness with keras output
keras_out = keras_resnet50.predict(data.transpose([0, 2, 3, 1]))
top1_keras = np.argmax(keras_out)
print("Keras top-1 id: {}, class name: {}".format(top1_keras, synset[top1_keras]))
| 35.80916
| 82
| 0.655297
|
import tvm
from tvm import te
import tvm.relay as relay
from tvm.contrib.download import download_testdata
import keras
import numpy as np
| true
| true
|
1c48b01cc0ac5761301f37029c367398c30b48c0
| 1,460
|
py
|
Python
|
Unit_C/slam_06_c_multiply_distribution_question.py
|
tuongngoc/slam-tutorial-code
|
14238852d51df9059ce55ffed92d95b286aec805
|
[
"MIT"
] | 7
|
2017-06-03T15:40:20.000Z
|
2021-05-17T16:05:58.000Z
|
Unit_C/slam_06_c_multiply_distribution_question.py
|
tuongngoc/slam-tutorial-code
|
14238852d51df9059ce55ffed92d95b286aec805
|
[
"MIT"
] | null | null | null |
Unit_C/slam_06_c_multiply_distribution_question.py
|
tuongngoc/slam-tutorial-code
|
14238852d51df9059ce55ffed92d95b286aec805
|
[
"MIT"
] | 7
|
2017-06-03T15:40:26.000Z
|
2020-08-12T12:11:41.000Z
|
# Multiply a distribution by another distribution.
# 06_c_multiply_distribution
# Claus Brenner, 26 NOV 2012
from pylab import plot, show, grid
from distribution import *
def multiply(a, b):
"""Multiply two distributions and return the resulting distribution."""
new_values = [a.value(b.offset+i)*b_val for i, b_val in enumerate(b.values)]
d = Distribution(b.offset, new_values)
d.normalize()
return d
if __name__ == '__main__':
arena = (0,1000)
# Here is our assumed position. Plotted in blue.
position_value = 400
position_error = 100
position = Distribution.triangle(position_value, position_error)
plot(position.plotlists(*arena)[0], position.plotlists(*arena)[1],
color='b', linestyle='steps')
# Here is our measurement. Plotted in green.
# That is what we read from the instrument.
measured_value = 400
measurement_error = 200
measurement = Distribution.triangle(measured_value, measurement_error)
plot(measurement.plotlists(*arena)[0], measurement.plotlists(*arena)[1],
color='g', linestyle='steps')
# Now, we integrate our sensor measurement. Result is plotted in red.
position_after_measurement = multiply(position, measurement)
plot(position_after_measurement.plotlists(*arena)[0],
position_after_measurement.plotlists(*arena)[1],
color='r', linestyle='steps')
grid(True)
show()
| 35.609756
| 81
| 0.686986
|
from pylab import plot, show, grid
from distribution import *
def multiply(a, b):
new_values = [a.value(b.offset+i)*b_val for i, b_val in enumerate(b.values)]
d = Distribution(b.offset, new_values)
d.normalize()
return d
if __name__ == '__main__':
arena = (0,1000)
position_value = 400
position_error = 100
position = Distribution.triangle(position_value, position_error)
plot(position.plotlists(*arena)[0], position.plotlists(*arena)[1],
color='b', linestyle='steps')
measured_value = 400
measurement_error = 200
measurement = Distribution.triangle(measured_value, measurement_error)
plot(measurement.plotlists(*arena)[0], measurement.plotlists(*arena)[1],
color='g', linestyle='steps')
position_after_measurement = multiply(position, measurement)
plot(position_after_measurement.plotlists(*arena)[0],
position_after_measurement.plotlists(*arena)[1],
color='r', linestyle='steps')
grid(True)
show()
| true
| true
|
1c48b136bbc070205a438fbc04db53f177dfacd2
| 2,578
|
py
|
Python
|
scripts/study_case/ID_22/My_pytorch1.py
|
kzbnb/numerical_bugs
|
bc22e72bcc06df6ce7889a25e0aeed027bde910b
|
[
"Apache-2.0"
] | 8
|
2021-06-30T06:55:14.000Z
|
2022-03-18T01:57:14.000Z
|
scripts/study_case/ID_22/My_pytorch1.py
|
kzbnb/numerical_bugs
|
bc22e72bcc06df6ce7889a25e0aeed027bde910b
|
[
"Apache-2.0"
] | 1
|
2021-06-30T03:08:15.000Z
|
2021-06-30T03:08:15.000Z
|
scripts/study_case/ID_22/My_pytorch1.py
|
kzbnb/numerical_bugs
|
bc22e72bcc06df6ce7889a25e0aeed027bde910b
|
[
"Apache-2.0"
] | 2
|
2021-11-17T11:19:48.000Z
|
2021-11-18T03:05:58.000Z
|
import torch
import torch.nn as nn
import torch.utils.data
from torch.autograd import Variable
from torchvision import datasets, transforms
import torch.optim as optim
import torch.nn.functional as F
import numpy as np
from torchvision.utils import make_grid , save_image
import sys
sys.path.append("/data")
batch_size = 64
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('My_RBM', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor()])), batch_size=batch_size)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('My_RBM', train=False, transform=transforms.Compose([
transforms.ToTensor()])), batch_size=batch_size)
class RBM(nn.Module):
def __init__(self, n_vis, n_hid, k):
super(RBM, self).__init__()
self.W = nn.Parameter(torch.randn(n_hid, n_vis) * 1e-2)
self.v_bias = nn.Parameter(torch.zeros(n_vis))
self.h_bias = nn.Parameter(torch.zeros(n_hid))
self.k = k
def vis_hid(self, v):
p_h = F.sigmoid(F.linear(v, self.W, self.h_bias))
sample_h = F.relu(torch.sign(p_h - Variable(torch.rand(p_h.size()))))
return p_h, sample_h
def hid_vis(self, h):
p_v = F.sigmoid(F.linear(h, self.W.t(), self.v_bias))
sample_v = F.relu(torch.sign(p_v - Variable(torch.rand(p_v.size()))))
return p_v, sample_v
def forward(self, v):
h0, h_ = self.vis_hid(v)
for _ in range(self.k):
v0_, v_ = self.hid_vis(h_)
h0_, h_ = self.vis_hid(v_)
return v, v_
def free_energy(self, v):
wx_b = F.linear(v, self.W, self.h_bias)
vbias_term = v.mv(self.v_bias)
hidden_term = wx_b.exp().add(1).log().sum(1)
return (-hidden_term - vbias_term).mean()
rbm = RBM(n_vis=784, n_hid=500, k=1)
train_op = optim.SGD(rbm.parameters(), 0.1)
'''inserted code'''
from scripts.utils.torch_utils import TorchScheduler
scheduler = TorchScheduler(name="git1_rbm")
'''inserted code'''
while True:
loss_ = []
for _, (data, target) in enumerate(train_loader):
sample_data = Variable(data.view(-1, 784)).bernoulli()
v, v1 = rbm(sample_data)
loss = rbm.free_energy(v) - rbm.free_energy(v1)
'''inserted code'''
scheduler.loss_checker(loss)
'''inserted code'''
loss_.append(loss.item())
train_op.zero_grad()
loss.backward()
train_op.step()
'''inserted code'''
scheduler.check_time()
'''inserted code'''
| 31.439024
| 77
| 0.628394
|
import torch
import torch.nn as nn
import torch.utils.data
from torch.autograd import Variable
from torchvision import datasets, transforms
import torch.optim as optim
import torch.nn.functional as F
import numpy as np
from torchvision.utils import make_grid , save_image
import sys
sys.path.append("/data")
batch_size = 64
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('My_RBM', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor()])), batch_size=batch_size)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('My_RBM', train=False, transform=transforms.Compose([
transforms.ToTensor()])), batch_size=batch_size)
class RBM(nn.Module):
def __init__(self, n_vis, n_hid, k):
super(RBM, self).__init__()
self.W = nn.Parameter(torch.randn(n_hid, n_vis) * 1e-2)
self.v_bias = nn.Parameter(torch.zeros(n_vis))
self.h_bias = nn.Parameter(torch.zeros(n_hid))
self.k = k
def vis_hid(self, v):
p_h = F.sigmoid(F.linear(v, self.W, self.h_bias))
sample_h = F.relu(torch.sign(p_h - Variable(torch.rand(p_h.size()))))
return p_h, sample_h
def hid_vis(self, h):
p_v = F.sigmoid(F.linear(h, self.W.t(), self.v_bias))
sample_v = F.relu(torch.sign(p_v - Variable(torch.rand(p_v.size()))))
return p_v, sample_v
def forward(self, v):
h0, h_ = self.vis_hid(v)
for _ in range(self.k):
v0_, v_ = self.hid_vis(h_)
h0_, h_ = self.vis_hid(v_)
return v, v_
def free_energy(self, v):
wx_b = F.linear(v, self.W, self.h_bias)
vbias_term = v.mv(self.v_bias)
hidden_term = wx_b.exp().add(1).log().sum(1)
return (-hidden_term - vbias_term).mean()
rbm = RBM(n_vis=784, n_hid=500, k=1)
train_op = optim.SGD(rbm.parameters(), 0.1)
from scripts.utils.torch_utils import TorchScheduler
scheduler = TorchScheduler(name="git1_rbm")
while True:
loss_ = []
for _, (data, target) in enumerate(train_loader):
sample_data = Variable(data.view(-1, 784)).bernoulli()
v, v1 = rbm(sample_data)
loss = rbm.free_energy(v) - rbm.free_energy(v1)
scheduler.loss_checker(loss)
loss_.append(loss.item())
train_op.zero_grad()
loss.backward()
train_op.step()
scheduler.check_time()
| true
| true
|
1c48b1f7e5f9baf42a7e5d6111b47e524a3faf0a
| 5,056
|
py
|
Python
|
phising/data_type_valid/data_type_valid_train.py
|
sethusaim/Phising-Classification-Azure
|
98f597444be18737f315686ce693f93edaa2b103
|
[
"MIT"
] | null | null | null |
phising/data_type_valid/data_type_valid_train.py
|
sethusaim/Phising-Classification-Azure
|
98f597444be18737f315686ce693f93edaa2b103
|
[
"MIT"
] | null | null | null |
phising/data_type_valid/data_type_valid_train.py
|
sethusaim/Phising-Classification-Azure
|
98f597444be18737f315686ce693f93edaa2b103
|
[
"MIT"
] | null | null | null |
from phising.blob_storage_operations.blob_operations import Blob_Operation
from phising.mongo_db_operations.mongo_operations import MongoDB_Operation
from utils.logger import App_Logger
from utils.read_params import read_params
class DB_Operation_Train:
"""
Description : This class shall be used for handling all the db operations
Version : 1.2
Revisions : moved setup to cloud
"""
def __init__(self):
self.config = read_params()
self.class_name = self.__class__.__name__
self.db_name = self.config["db_log"]["train"]
self.train_data_container = self.config["container"]["phising_train_data"]
self.train_export_csv_file = self.config["export_csv_file"]["train"]
self.good_data_train_dir = self.config["data"]["train"]["good_data_dir"]
self.input_files_container = self.config["container"]["input_files"]
self.train_db_insert_log = self.config["train_db_log"]["db_insert"]
self.train_export_csv_log = self.config["train_db_log"]["export_csv"]
self.blob = Blob_Operation()
self.db_op = MongoDB_Operation()
self.log_writer = App_Logger()
def insert_good_data_as_record(self, good_data_db_name, good_data_collection_name):
"""
Method Name : insert_good_data_as_record
Description : This method inserts the good data in MongoDB as collection
Version : 1.2
Revisions : moved setup to cloud
"""
method_name = self.insert_good_data_as_record.__name__
self.log_writer.start_log(
key="start",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
try:
lst = self.blob.read_csv_from_folder(
folder_name=self.good_data_train_dir,
container_name=self.train_data_container,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
for f in lst:
df = f[0]
file = f[1]
if file.endswith(".csv"):
self.db_op.insert_dataframe_as_record(
data_frame=df,
db_name=good_data_db_name,
collection_name=good_data_collection_name,
)
else:
pass
self.log_writer.log(
db_name=self.db_name,
collection_name=self.train_db_insert_log,
log_info="Inserted dataframe as collection record in mongodb",
)
self.log_writer.start_log(
key="exit",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
except Exception as e:
self.log_writer.exception_log(
error=e,
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
def export_collection_to_csv(self, good_data_db_name, good_data_collection_name):
"""
Method Name : export_collection_to_csv
Description : This method extracts the inserted data to csv file, which will be used for training
Version : 1.2
Revisions : moved setup to cloud
"""
method_name = self.export_collection_to_csv.__name__
self.log_writer.start_log(
key="start",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
try:
df = self.db_op.get_collection_as_dataframe(
db_name=good_data_db_name,
collection_name=good_data_collection_name,
)
self.blob.upload_df_as_csv(
dataframe=df,
local_file_name=self.train_export_csv_file,
container_file_name=self.train_export_csv_file,
container_name=self.input_files_container,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
self.log_writer.start_log(
key="exit",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
except Exception as e:
self.log_writer.exception_log(
error=e,
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
| 33.045752
| 107
| 0.585245
|
from phising.blob_storage_operations.blob_operations import Blob_Operation
from phising.mongo_db_operations.mongo_operations import MongoDB_Operation
from utils.logger import App_Logger
from utils.read_params import read_params
class DB_Operation_Train:
def __init__(self):
self.config = read_params()
self.class_name = self.__class__.__name__
self.db_name = self.config["db_log"]["train"]
self.train_data_container = self.config["container"]["phising_train_data"]
self.train_export_csv_file = self.config["export_csv_file"]["train"]
self.good_data_train_dir = self.config["data"]["train"]["good_data_dir"]
self.input_files_container = self.config["container"]["input_files"]
self.train_db_insert_log = self.config["train_db_log"]["db_insert"]
self.train_export_csv_log = self.config["train_db_log"]["export_csv"]
self.blob = Blob_Operation()
self.db_op = MongoDB_Operation()
self.log_writer = App_Logger()
def insert_good_data_as_record(self, good_data_db_name, good_data_collection_name):
method_name = self.insert_good_data_as_record.__name__
self.log_writer.start_log(
key="start",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
try:
lst = self.blob.read_csv_from_folder(
folder_name=self.good_data_train_dir,
container_name=self.train_data_container,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
for f in lst:
df = f[0]
file = f[1]
if file.endswith(".csv"):
self.db_op.insert_dataframe_as_record(
data_frame=df,
db_name=good_data_db_name,
collection_name=good_data_collection_name,
)
else:
pass
self.log_writer.log(
db_name=self.db_name,
collection_name=self.train_db_insert_log,
log_info="Inserted dataframe as collection record in mongodb",
)
self.log_writer.start_log(
key="exit",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
except Exception as e:
self.log_writer.exception_log(
error=e,
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_db_insert_log,
)
def export_collection_to_csv(self, good_data_db_name, good_data_collection_name):
method_name = self.export_collection_to_csv.__name__
self.log_writer.start_log(
key="start",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
try:
df = self.db_op.get_collection_as_dataframe(
db_name=good_data_db_name,
collection_name=good_data_collection_name,
)
self.blob.upload_df_as_csv(
dataframe=df,
local_file_name=self.train_export_csv_file,
container_file_name=self.train_export_csv_file,
container_name=self.input_files_container,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
self.log_writer.start_log(
key="exit",
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
except Exception as e:
self.log_writer.exception_log(
error=e,
class_name=self.class_name,
method_name=method_name,
db_name=self.db_name,
collection_name=self.train_export_csv_log,
)
| true
| true
|
1c48b46ca3eccad0bc0a519bddb6c7a4511e1488
| 3,026
|
py
|
Python
|
PyLB/IO.py
|
pastewka/LBWithPython
|
a913683afa55b77395189b4c5d95f836599a91cb
|
[
"MIT"
] | 1
|
2022-03-19T13:48:28.000Z
|
2022-03-19T13:48:28.000Z
|
PyLB/IO.py
|
IMTEK-Simulation/LBWithPython
|
a913683afa55b77395189b4c5d95f836599a91cb
|
[
"MIT"
] | 1
|
2021-12-08T09:24:42.000Z
|
2021-12-08T09:24:42.000Z
|
PyLB/IO.py
|
IMTEK-Simulation/LBWithPython
|
a913683afa55b77395189b4c5d95f836599a91cb
|
[
"MIT"
] | 2
|
2021-06-07T14:24:51.000Z
|
2021-08-15T15:12:52.000Z
|
# Copyright 2017-2018 Lars Pastewka, Andreas Greiner
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import numpy as np
from mpi4py import MPI
###
def save_mpiio(comm, fn, g_kl):
"""
Write a global two-dimensional array to a single file in the npy format
using MPI I/O: https://docs.scipy.org/doc/numpy/neps/npy-format.html
Arrays written with this function can be read with numpy.load.
Parameters
----------
comm
MPI communicator.
fn : str
File name.
g_kl : array
Portion of the array on this MPI processes.
"""
from numpy.lib.format import dtype_to_descr, magic
magic_str = magic(1, 0)
local_nx, local_ny = g_kl.shape
nx = np.empty_like(local_nx)
ny = np.empty_like(local_ny)
commx = comm.Sub((True, False))
commy = comm.Sub((False, True))
commx.Allreduce(np.asarray(local_nx), nx)
commy.Allreduce(np.asarray(local_ny), ny)
arr_dict_str = str({'descr': dtype_to_descr(g_kl.dtype),
'fortran_order': False,
'shape': (np.asscalar(nx), np.asscalar(ny))})
while (len(arr_dict_str) + len(magic_str) + 2) % 16 != 15:
arr_dict_str += ' '
arr_dict_str += '\n'
header_len = len(arr_dict_str) + len(magic_str) + 2
offsetx = np.zeros_like(local_nx)
commx.Exscan(np.asarray(ny*local_nx), offsetx)
offsety = np.zeros_like(local_ny)
commy.Exscan(np.asarray(local_ny), offsety)
file = MPI.File.Open(comm, fn, MPI.MODE_CREATE | MPI.MODE_WRONLY)
if MPI.COMM_WORLD.Get_rank() == 0:
file.Write(magic_str)
file.Write(np.int16(len(arr_dict_str)))
file.Write(arr_dict_str.encode('latin-1'))
mpitype = MPI._typedict[g_kl.dtype.char]
filetype = mpitype.Create_vector(g_kl.shape[0], g_kl.shape[1], ny)
filetype.Commit()
file.Set_view(header_len + (offsety+offsetx)*mpitype.Get_size(),
filetype=filetype)
file.Write_all(g_kl.copy())
filetype.Free()
file.Close()
| 37.358025
| 80
| 0.689028
|
import numpy as np
from mpi4py import MPI
def save_mpiio(comm, fn, g_kl):
from numpy.lib.format import dtype_to_descr, magic
magic_str = magic(1, 0)
local_nx, local_ny = g_kl.shape
nx = np.empty_like(local_nx)
ny = np.empty_like(local_ny)
commx = comm.Sub((True, False))
commy = comm.Sub((False, True))
commx.Allreduce(np.asarray(local_nx), nx)
commy.Allreduce(np.asarray(local_ny), ny)
arr_dict_str = str({'descr': dtype_to_descr(g_kl.dtype),
'fortran_order': False,
'shape': (np.asscalar(nx), np.asscalar(ny))})
while (len(arr_dict_str) + len(magic_str) + 2) % 16 != 15:
arr_dict_str += ' '
arr_dict_str += '\n'
header_len = len(arr_dict_str) + len(magic_str) + 2
offsetx = np.zeros_like(local_nx)
commx.Exscan(np.asarray(ny*local_nx), offsetx)
offsety = np.zeros_like(local_ny)
commy.Exscan(np.asarray(local_ny), offsety)
file = MPI.File.Open(comm, fn, MPI.MODE_CREATE | MPI.MODE_WRONLY)
if MPI.COMM_WORLD.Get_rank() == 0:
file.Write(magic_str)
file.Write(np.int16(len(arr_dict_str)))
file.Write(arr_dict_str.encode('latin-1'))
mpitype = MPI._typedict[g_kl.dtype.char]
filetype = mpitype.Create_vector(g_kl.shape[0], g_kl.shape[1], ny)
filetype.Commit()
file.Set_view(header_len + (offsety+offsetx)*mpitype.Get_size(),
filetype=filetype)
file.Write_all(g_kl.copy())
filetype.Free()
file.Close()
| true
| true
|
1c48b4a673eba893ad4a41d0434a1f394309e428
| 659
|
py
|
Python
|
pkg/pkg/utils/toy.py
|
neurodata/bilateral-connectome
|
2335bd444040ff647a4cd3304bddf7f533e490a7
|
[
"MIT"
] | 2
|
2021-09-24T20:21:18.000Z
|
2022-02-08T18:31:29.000Z
|
pkg/pkg/utils/toy.py
|
neurodata/bgm
|
b04162f84820f81cf719e8a5ddd4dae34d8f5f41
|
[
"MIT"
] | 9
|
2021-09-29T17:23:41.000Z
|
2022-03-16T20:22:04.000Z
|
pkg/pkg/utils/toy.py
|
neurodata/bgm
|
b04162f84820f81cf719e8a5ddd4dae34d8f5f41
|
[
"MIT"
] | 2
|
2021-11-16T16:17:53.000Z
|
2022-03-26T01:25:10.000Z
|
import numpy as np
import pandas as pd
import seaborn as sns
from graspologic.simulations import sbm
def sample_toy_networks(seed=888888, ns=None, B=None):
np.random.seed(seed)
if ns is None:
ns = [5, 6, 7]
if B is None:
B = np.array([[0.8, 0.2, 0.05], [0.05, 0.9, 0.2], [0.05, 0.05, 0.7]])
A1, labels = sbm(ns, B, directed=True, loops=False, return_labels=True)
A2 = sbm(ns, B, directed=True, loops=False)
node_data = pd.DataFrame(index=np.arange(A1.shape[0]))
node_data["labels"] = labels + 1
return A1, A2, node_data
def get_toy_palette():
return dict(zip([1, 2, 3], sns.color_palette("Set2")[3:]))
| 28.652174
| 77
| 0.632777
|
import numpy as np
import pandas as pd
import seaborn as sns
from graspologic.simulations import sbm
def sample_toy_networks(seed=888888, ns=None, B=None):
np.random.seed(seed)
if ns is None:
ns = [5, 6, 7]
if B is None:
B = np.array([[0.8, 0.2, 0.05], [0.05, 0.9, 0.2], [0.05, 0.05, 0.7]])
A1, labels = sbm(ns, B, directed=True, loops=False, return_labels=True)
A2 = sbm(ns, B, directed=True, loops=False)
node_data = pd.DataFrame(index=np.arange(A1.shape[0]))
node_data["labels"] = labels + 1
return A1, A2, node_data
def get_toy_palette():
return dict(zip([1, 2, 3], sns.color_palette("Set2")[3:]))
| true
| true
|
1c48b4ff77f16456c4d2ccbf2ebf5ccef234b70c
| 9,572
|
py
|
Python
|
disent/frameworks/vae/_unsupervised__infovae.py
|
neonkitchen/disent
|
0f45fefea03473690dfdbf48ef83f6e17ca9b8b3
|
[
"MIT"
] | null | null | null |
disent/frameworks/vae/_unsupervised__infovae.py
|
neonkitchen/disent
|
0f45fefea03473690dfdbf48ef83f6e17ca9b8b3
|
[
"MIT"
] | null | null | null |
disent/frameworks/vae/_unsupervised__infovae.py
|
neonkitchen/disent
|
0f45fefea03473690dfdbf48ef83f6e17ca9b8b3
|
[
"MIT"
] | 1
|
2022-01-18T06:43:33.000Z
|
2022-01-18T06:43:33.000Z
|
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
# MIT License
#
# Copyright (c) 2021 Nathan Juraj Michlo
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
from dataclasses import dataclass
from typing import Sequence
import numpy as np
import torch
from torch import Tensor
from torch.distributions import Normal
from disent.frameworks.vae._unsupervised__vae import Vae
# ========================================================================= #
# InfoVae #
# ========================================================================= #
class InfoVae(Vae):
"""
InfoVAE: Balancing Learning and Inference in Variational Autoencoders
https://arxiv.org/pdf/1706.02262.pdf
TODO: this is not verified
Reference implementation is from: https://github.com/AntixK/PyTorch-VAE
Changes:
1. kernels are computed weirdly in this implementation
2. uses unbiased MMD estimates from https://arxiv.org/pdf/1505.03906.pdf
3. computes means, not sums
"""
REQUIRED_OBS = 1
@dataclass
class cfg(Vae.cfg):
info_alpha: float = -0.5
info_lambda: float = 5.0
info_kernel: str = 'rbf'
# what is this? I don't think this should be configurable
z_var: float = 2.
# this is optional
maintain_reg_ratio: bool = True
def __init__(self, make_optimizer_fn, make_model_fn, batch_augment=None, cfg: cfg = None):
super().__init__(make_optimizer_fn, make_model_fn, batch_augment=batch_augment, cfg=cfg)
# checks
assert self.cfg.info_alpha <= 0, f'cfg.info_alpha must be <= zero, current value is: {self.cfg.info_alpha}'
assert self.cfg.loss_reduction == 'mean', 'InfoVAE only supports cfg.loss_reduction == "mean"'
# --------------------------------------------------------------------- #
# Overrides #
# --------------------------------------------------------------------- #
def compute_ave_reg_loss(self, ds_posterior: Sequence[Normal], ds_prior: Sequence[Normal], zs_sampled):
"""
TODO: This could be wrong?
"""
# only supports one input observation at the moment
(d_posterior,), (d_prior,), (z_sampled,) = ds_posterior, ds_prior, zs_sampled
# compute kl divergence
# compute maximum-mean discrepancy
kl_loss = self.latents_handler.compute_ave_kl_loss(ds_posterior, ds_prior, zs_sampled)
mmd_loss = self._compute_mmd(z_posterior_samples=z_sampled, z_prior_samples=d_prior.rsample())
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- #
# original loss Sums Everything, we use the mean and scale everything to keep the ratios the same
# OLD: (C*W*H) * recon_mean + (Z) * kl_mean + (Z) * mmd_mean
# NEW: recon_mean + (Z)/(C*W*H) * kl_mean + (Z)/(C*W*H) * mmd_mean
# compute the weight
# TODO: maybe this should be standardised to something like Z=9, W=64, H=64, C=3
# TODO: this could be moved into other models
reg_weight = (self._model.z_size / np.prod(self._model.x_shape)) if self.cfg.maintain_reg_ratio else 1.0
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- #
# weight the loss terms
kl_reg_loss = reg_weight * (1 - self.cfg.info_alpha) * kl_loss
mmd_reg_loss = reg_weight * (self.cfg.info_alpha + self.cfg.info_lambda - 1) * mmd_loss
# compute combined loss
combined_loss = kl_reg_loss + mmd_reg_loss
# return logs
return combined_loss, {
'kl_loss': kl_loss,
'kl_reg_loss': kl_reg_loss,
'mmd_loss': mmd_loss,
'mmd_reg_loss': mmd_reg_loss,
}
def _compute_mmd(self, z_posterior_samples: Tensor, z_prior_samples: Tensor) -> Tensor:
"""
(✓) visual inspection against:
https://en.wikipedia.org/wiki/Kernel_embedding_of_distributions#Kernel_two-sample_test
"""
# check sizes - these conditions can be relaxed in practice, just for debugging
assert z_posterior_samples.ndim == 2
assert z_posterior_samples.shape == z_prior_samples.shape
# compute kernels: (B, Z) -> (,)
mean_pz_pz = self._compute_unbiased_mean(self._compute_kernel(z_prior_samples, z_prior_samples), unbaised=True)
mean_pz_qz = self._compute_unbiased_mean(self._compute_kernel(z_prior_samples, z_posterior_samples), unbaised=False)
mean_qz_qz = self._compute_unbiased_mean(self._compute_kernel(z_posterior_samples, z_posterior_samples), unbaised=True)
# maximum-mean discrepancy
mmd = mean_pz_pz - 2*mean_pz_qz + mean_qz_qz
return mmd
def _compute_unbiased_mean(self, kernel: Tensor, unbaised: bool) -> Tensor:
"""
(✓) visual inspection against equation (8) of
Training generative neural networks via Maximum Mean Discrepancy optimization
https://arxiv.org/pdf/1505.03906.pdf
"""
# (B, B) == (N, M) ie. N=B and M=B
N, M = kernel.shape
assert N == M
# compute mean along first and second dims
if unbaised:
# diagonal stacks values along last dimension ie. (B, B, Z) -> (Z, B) or (B, B) -> (B,)
sum_kernel = kernel.sum(dim=(0, 1)) - torch.diagonal(kernel, dim1=0, dim2=1).sum(dim=-1) # (B, B,) -> (,)
# compute unbiased mean
mean_kernel = sum_kernel / (N*(N-1))
else:
mean_kernel = kernel.mean(dim=(0, 1)) # (B, B,) -> (,)
# check size again
assert mean_kernel.ndim == 0
return mean_kernel
def _compute_kernel(self, z0: Tensor, z1: Tensor) -> Tensor:
"""
(✓) visual inspection against:
https://en.wikipedia.org/wiki/Kernel_embedding_of_distributions#Kernel_two-sample_test
"""
batch_size, z_size = z0.shape
# convert tensors
z0 = z0.unsqueeze(-2) # convert to column tensor # [B, Z] -> [B, 1, Z]
z1 = z1.unsqueeze(-3) # convert to row tensor # [B, Z] -> [1, B, Z]
# in our case this is not required, however it is useful
# if z0 and z1 have different sizes along the 0th dimension.
z0 = z0.expand(batch_size, batch_size, z_size) # [B, 1, Z] -> [B, B, Z]
z1 = z1.expand(batch_size, batch_size, z_size) # [1, B, Z] -> [B, B, Z]
# compute correct kernel
if self.cfg.info_kernel == 'rbf':
kernel = self._kernel_rbf(z0, z1)
# elif self.cfg.info_kernel == 'imq':
# kernel = self._kernel_imq(z0, z1)
else: # pragma: no cover
raise KeyError(f'invalid cfg.info_kernel: {self.cfg.info_kernel}')
# check result size
assert kernel.shape == (batch_size, batch_size)
return kernel
def _kernel_rbf(self, x: Tensor, y: Tensor) -> Tensor:
"""
Radial Basis Function (RBF) Kernel a.k.a. Gaussian Kernel
k(x, y) = exp(- ||x - y||^2 / (2*sigma^2))
(✓) visual inspection against:
https://en.wikipedia.org/wiki/Reproducing_kernel_Hilbert_space#Radial_basis_function_kernels
TODO: how do we arrive at the value for sigma?
- multiplying sigma by z_size is that same as computing .mean(dim=-1)
instead of the current sum
TODO: do we treat each latent variable separately? or as vectors like now due to the .sum?
"""
z_size = x.shape[-1]
sigma = 2 * self.cfg.z_var * z_size
kernel = torch.exp(-((x - y).pow(2).sum(dim=-1) / sigma))
return kernel
# def _kernel_imq(self, x: Tensor, y: Tensor, eps: float = 1e-7) -> Tensor:
# """
# Inverse Multi-Quadratics Kernel
# k(x, y) = (c^2 + ||x - y||^2)^b
# c ∈ R
# b < 0 but better if b ∈ (0, 1)
#
# TODO: This could be wrong?
# # TODO: how do we arrive at the value for c
# """
# z_size = x.shape[-1]
# c = 2 * self.cfg.z_var * z_size
# kernel = c / (eps + c + (x - y).pow(2).sum(-1))
# return kernel
# ========================================================================= #
# END #
# ========================================================================= #
| 45.364929
| 127
| 0.573234
|
from dataclasses import dataclass
from typing import Sequence
import numpy as np
import torch
from torch import Tensor
from torch.distributions import Normal
from disent.frameworks.vae._unsupervised__vae import Vae
class InfoVae(Vae):
REQUIRED_OBS = 1
@dataclass
class cfg(Vae.cfg):
info_alpha: float = -0.5
info_lambda: float = 5.0
info_kernel: str = 'rbf'
z_var: float = 2.
# this is optional
maintain_reg_ratio: bool = True
def __init__(self, make_optimizer_fn, make_model_fn, batch_augment=None, cfg: cfg = None):
super().__init__(make_optimizer_fn, make_model_fn, batch_augment=batch_augment, cfg=cfg)
# checks
assert self.cfg.info_alpha <= 0, f'cfg.info_alpha must be <= zero, current value is: {self.cfg.info_alpha}'
assert self.cfg.loss_reduction == 'mean', 'InfoVAE only supports cfg.loss_reduction == "mean"'
# --------------------------------------------------------------------- #
# Overrides #
# --------------------------------------------------------------------- #
def compute_ave_reg_loss(self, ds_posterior: Sequence[Normal], ds_prior: Sequence[Normal], zs_sampled):
# only supports one input observation at the moment
(d_posterior,), (d_prior,), (z_sampled,) = ds_posterior, ds_prior, zs_sampled
# compute kl divergence
# compute maximum-mean discrepancy
kl_loss = self.latents_handler.compute_ave_kl_loss(ds_posterior, ds_prior, zs_sampled)
mmd_loss = self._compute_mmd(z_posterior_samples=z_sampled, z_prior_samples=d_prior.rsample())
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- #
# original loss Sums Everything, we use the mean and scale everything to keep the ratios the same
# OLD: (C*W*H) * recon_mean + (Z) * kl_mean + (Z) * mmd_mean
# NEW: recon_mean + (Z)/(C*W*H) * kl_mean + (Z)/(C*W*H) * mmd_mean
# compute the weight
# TODO: maybe this should be standardised to something like Z=9, W=64, H=64, C=3
# TODO: this could be moved into other models
reg_weight = (self._model.z_size / np.prod(self._model.x_shape)) if self.cfg.maintain_reg_ratio else 1.0
# -~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~- #
# weight the loss terms
kl_reg_loss = reg_weight * (1 - self.cfg.info_alpha) * kl_loss
mmd_reg_loss = reg_weight * (self.cfg.info_alpha + self.cfg.info_lambda - 1) * mmd_loss
# compute combined loss
combined_loss = kl_reg_loss + mmd_reg_loss
# return logs
return combined_loss, {
'kl_loss': kl_loss,
'kl_reg_loss': kl_reg_loss,
'mmd_loss': mmd_loss,
'mmd_reg_loss': mmd_reg_loss,
}
def _compute_mmd(self, z_posterior_samples: Tensor, z_prior_samples: Tensor) -> Tensor:
# check sizes - these conditions can be relaxed in practice, just for debugging
assert z_posterior_samples.ndim == 2
assert z_posterior_samples.shape == z_prior_samples.shape
# compute kernels: (B, Z) -> (,)
mean_pz_pz = self._compute_unbiased_mean(self._compute_kernel(z_prior_samples, z_prior_samples), unbaised=True)
mean_pz_qz = self._compute_unbiased_mean(self._compute_kernel(z_prior_samples, z_posterior_samples), unbaised=False)
mean_qz_qz = self._compute_unbiased_mean(self._compute_kernel(z_posterior_samples, z_posterior_samples), unbaised=True)
# maximum-mean discrepancy
mmd = mean_pz_pz - 2*mean_pz_qz + mean_qz_qz
return mmd
def _compute_unbiased_mean(self, kernel: Tensor, unbaised: bool) -> Tensor:
# (B, B) == (N, M) ie. N=B and M=B
N, M = kernel.shape
assert N == M
# compute mean along first and second dims
if unbaised:
# diagonal stacks values along last dimension ie. (B, B, Z) -> (Z, B) or (B, B) -> (B,)
sum_kernel = kernel.sum(dim=(0, 1)) - torch.diagonal(kernel, dim1=0, dim2=1).sum(dim=-1) # (B, B,) -> (,)
# compute unbiased mean
mean_kernel = sum_kernel / (N*(N-1))
else:
mean_kernel = kernel.mean(dim=(0, 1)) # (B, B,) -> (,)
# check size again
assert mean_kernel.ndim == 0
return mean_kernel
def _compute_kernel(self, z0: Tensor, z1: Tensor) -> Tensor:
batch_size, z_size = z0.shape
# convert tensors
z0 = z0.unsqueeze(-2) # convert to column tensor # [B, Z] -> [B, 1, Z]
z1 = z1.unsqueeze(-3) # convert to row tensor # [B, Z] -> [1, B, Z]
# in our case this is not required, however it is useful
# if z0 and z1 have different sizes along the 0th dimension.
z0 = z0.expand(batch_size, batch_size, z_size) # [B, 1, Z] -> [B, B, Z]
z1 = z1.expand(batch_size, batch_size, z_size) # [1, B, Z] -> [B, B, Z]
# compute correct kernel
if self.cfg.info_kernel == 'rbf':
kernel = self._kernel_rbf(z0, z1)
# elif self.cfg.info_kernel == 'imq':
# kernel = self._kernel_imq(z0, z1)
else: # pragma: no cover
raise KeyError(f'invalid cfg.info_kernel: {self.cfg.info_kernel}')
# check result size
assert kernel.shape == (batch_size, batch_size)
return kernel
def _kernel_rbf(self, x: Tensor, y: Tensor) -> Tensor:
z_size = x.shape[-1]
sigma = 2 * self.cfg.z_var * z_size
kernel = torch.exp(-((x - y).pow(2).sum(dim=-1) / sigma))
return kernel
# def _kernel_imq(self, x: Tensor, y: Tensor, eps: float = 1e-7) -> Tensor:
# """
# Inverse Multi-Quadratics Kernel
# k(x, y) = (c^2 + ||x - y||^2)^b
# c ∈ R
# b < 0 but better if b ∈ (0, 1)
#
# TODO: This could be wrong?
# # TODO: how do we arrive at the value for c
# """
# z_size = x.shape[-1]
# c = 2 * self.cfg.z_var * z_size
# kernel = c / (eps + c + (x - y).pow(2).sum(-1))
# return kernel
# ========================================================================= #
# END #
# ========================================================================= #
| true
| true
|
1c48b546031f0533e11cbc81352d342ede099412
| 492
|
py
|
Python
|
src/ch16-mongodb-version/starter/pypi_nosql/pypi/controllers/home_controller.py
|
possnfiffer/data-driven-web-apps-with-pyramid-and-sqlalchemy
|
880dc4bc2a6d5dd5fcc91452725d5a1c7c9022c5
|
[
"MIT"
] | 83
|
2018-07-04T22:19:20.000Z
|
2022-03-09T22:44:48.000Z
|
src/ch16-mongodb-version/starter/pypi_nosql/pypi/controllers/home_controller.py
|
possnfiffer/data-driven-web-apps-with-pyramid-and-sqlalchemy
|
880dc4bc2a6d5dd5fcc91452725d5a1c7c9022c5
|
[
"MIT"
] | 9
|
2018-08-02T00:17:33.000Z
|
2021-11-17T17:18:05.000Z
|
src/ch16-mongodb-version/starter/pypi_nosql/pypi/controllers/home_controller.py
|
possnfiffer/data-driven-web-apps-with-pyramid-and-sqlalchemy
|
880dc4bc2a6d5dd5fcc91452725d5a1c7c9022c5
|
[
"MIT"
] | 78
|
2018-07-28T14:07:07.000Z
|
2022-01-15T07:06:42.000Z
|
from pyramid.view import view_config
from pypi.viewmodels.home.home_index_viewmodel import HomeIndexViewModel
from pypi.viewmodels.shared.viewmodel_base import ViewModelBase
@view_config(route_name='home', renderer='pypi:templates/home/index.pt')
def home_index(request):
vm = HomeIndexViewModel(request)
return vm.to_dict()
@view_config(route_name='about', renderer='pypi:templates/home/about.pt')
def home_about(request):
vm = ViewModelBase(request)
return vm.to_dict()
| 30.75
| 73
| 0.79065
|
from pyramid.view import view_config
from pypi.viewmodels.home.home_index_viewmodel import HomeIndexViewModel
from pypi.viewmodels.shared.viewmodel_base import ViewModelBase
@view_config(route_name='home', renderer='pypi:templates/home/index.pt')
def home_index(request):
vm = HomeIndexViewModel(request)
return vm.to_dict()
@view_config(route_name='about', renderer='pypi:templates/home/about.pt')
def home_about(request):
vm = ViewModelBase(request)
return vm.to_dict()
| true
| true
|
1c48b5e6ea648f2f13d47f5e4544e1b00c814fca
| 3,071
|
py
|
Python
|
tf_dot_general/tf_dot_general_test.py
|
DarrenZhang01/Neural_Tangents_TensorFlow
|
2fd360c8b1b8c9106044034f6a8b5c2734db9c3d
|
[
"Apache-2.0"
] | 4
|
2020-12-25T17:37:13.000Z
|
2022-01-03T17:00:23.000Z
|
tf_dot_general/tf_dot_general_test.py
|
DarrenZhang01/TensorFlow_GSoC
|
2fd360c8b1b8c9106044034f6a8b5c2734db9c3d
|
[
"Apache-2.0"
] | 33
|
2020-07-18T18:57:54.000Z
|
2020-08-17T13:58:46.000Z
|
tf_dot_general/tf_dot_general_test.py
|
DarrenZhang01/Neural_Tangents_TensorFlow
|
2fd360c8b1b8c9106044034f6a8b5c2734db9c3d
|
[
"Apache-2.0"
] | 1
|
2021-08-16T19:00:06.000Z
|
2021-08-16T19:00:06.000Z
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Tests for the general dot operation for TensorFlow.
Zhibo Zhang, 2020.06.30
"""
import tensorflow as tf
from tensorflow.python.platform import test
import numpy as np
import jax.numpy as jnp
from jax import lax
from tf_dot_general import *
from absl.testing import parameterized
class TFConvGeneralTest(test.TestCase, parameterized.TestCase):
@parameterized.parameters(
{"lhs": ['i', 'j'], "rhs": ['j', 'k'], "dims": (((1,), (0,)), ((), ())),
"result": "ik"},
{"lhs": ['a', 'i', 'j'], "rhs": ['a', 'j', 'k'], "dims": \
(((2,), (1,)), ((0,), (0,))), "result": "aik"},
{"lhs": ['a', 'b', 'i', 'j'], "rhs": ['a', 'b', 'j', 'k'], "dims": \
(((3,), (2,)), ((0, 1,), (0, 1,))), "result": "abik"},
)
def test_compose_output_rep(self, lhs, rhs, dims, result):
contraction, batch = dims
lhs_contraction, rhs_contraction = contraction
lhs_batch, rhs_batch = batch
output_rep = compose_output_rep(lhs, rhs, lhs_contraction, rhs_contraction,
lhs_batch, rhs_batch)
self.assertEqual(output_rep, result)
@parameterized.parameters(
{"lhs_np": np.ones((5, 3)), "rhs_np": np.ones((3, 2)),
"dims": (((1,), (0,)), ((), ()))},
{"lhs_np": np.ones((5, 3)), "rhs_np": np.ones((5, 3)),
"dims": (((0, 1), (0, 1)), ((), ()))},
{"lhs_np": np.ones((5, 3, 2)), "rhs_np": np.ones((2, 3, 2)),
"dims": (((1, 2), (1, 0)), ((), ()))},
{"lhs_np": np.ones((6, 5, 3)), "rhs_np": np.ones((6, 3, 2)),
"dims": (((2,), (1,)), ((0,), (0,)))},
{"lhs_np": np.ones((6, 3, 5)), "rhs_np": np.ones((6, 3, 2)),
"dims": (((1,), (1,)), ((0,), (0,)))},
{"lhs_np": np.ones((5, 3, 2, 2)), "rhs_np": np.ones((5, 2, 2, 6)),
"dims": (((2, 3), (1, 2)), ((0,), (0,)))},
{"lhs_np": np.ones((2, 2, 5, 3)), "rhs_np": np.ones((2, 2, 3, 2)),
"dims": (((3,), (2,)), ((0, 1), (0, 1)))},
{"lhs_np": np.ones((2, 2, 5, 2)), "rhs_np": np.ones((2, 2, 3, 2)),
"dims": (((3,), (1,)), ((0,), (0,)))},
{"lhs_np": np.ones((2, 2, 5, 3, 3)), "rhs_np": np.ones((2, 3, 2, 3, 2)),
"dims": (((4,), (1,)), ((0,), (0,)))},
)
def test_tf_dot_general(self, lhs_np, rhs_np, dims):
ans = lax.dot_general(lhs_np, rhs_np, dims)
result = tf_dot_general(lhs_np, rhs_np, dims)
self.assertAllClose(result, np.array(ans))
if __name__ == "__main__":
test.main()
| 38.873418
| 80
| 0.535656
|
import tensorflow as tf
from tensorflow.python.platform import test
import numpy as np
import jax.numpy as jnp
from jax import lax
from tf_dot_general import *
from absl.testing import parameterized
class TFConvGeneralTest(test.TestCase, parameterized.TestCase):
@parameterized.parameters(
{"lhs": ['i', 'j'], "rhs": ['j', 'k'], "dims": (((1,), (0,)), ((), ())),
"result": "ik"},
{"lhs": ['a', 'i', 'j'], "rhs": ['a', 'j', 'k'], "dims": \
(((2,), (1,)), ((0,), (0,))), "result": "aik"},
{"lhs": ['a', 'b', 'i', 'j'], "rhs": ['a', 'b', 'j', 'k'], "dims": \
(((3,), (2,)), ((0, 1,), (0, 1,))), "result": "abik"},
)
def test_compose_output_rep(self, lhs, rhs, dims, result):
contraction, batch = dims
lhs_contraction, rhs_contraction = contraction
lhs_batch, rhs_batch = batch
output_rep = compose_output_rep(lhs, rhs, lhs_contraction, rhs_contraction,
lhs_batch, rhs_batch)
self.assertEqual(output_rep, result)
@parameterized.parameters(
{"lhs_np": np.ones((5, 3)), "rhs_np": np.ones((3, 2)),
"dims": (((1,), (0,)), ((), ()))},
{"lhs_np": np.ones((5, 3)), "rhs_np": np.ones((5, 3)),
"dims": (((0, 1), (0, 1)), ((), ()))},
{"lhs_np": np.ones((5, 3, 2)), "rhs_np": np.ones((2, 3, 2)),
"dims": (((1, 2), (1, 0)), ((), ()))},
{"lhs_np": np.ones((6, 5, 3)), "rhs_np": np.ones((6, 3, 2)),
"dims": (((2,), (1,)), ((0,), (0,)))},
{"lhs_np": np.ones((6, 3, 5)), "rhs_np": np.ones((6, 3, 2)),
"dims": (((1,), (1,)), ((0,), (0,)))},
{"lhs_np": np.ones((5, 3, 2, 2)), "rhs_np": np.ones((5, 2, 2, 6)),
"dims": (((2, 3), (1, 2)), ((0,), (0,)))},
{"lhs_np": np.ones((2, 2, 5, 3)), "rhs_np": np.ones((2, 2, 3, 2)),
"dims": (((3,), (2,)), ((0, 1), (0, 1)))},
{"lhs_np": np.ones((2, 2, 5, 2)), "rhs_np": np.ones((2, 2, 3, 2)),
"dims": (((3,), (1,)), ((0,), (0,)))},
{"lhs_np": np.ones((2, 2, 5, 3, 3)), "rhs_np": np.ones((2, 3, 2, 3, 2)),
"dims": (((4,), (1,)), ((0,), (0,)))},
)
def test_tf_dot_general(self, lhs_np, rhs_np, dims):
ans = lax.dot_general(lhs_np, rhs_np, dims)
result = tf_dot_general(lhs_np, rhs_np, dims)
self.assertAllClose(result, np.array(ans))
if __name__ == "__main__":
test.main()
| true
| true
|
1c48b6acf5248277954d3f56227f28df99280d78
| 8,521
|
py
|
Python
|
20200602_homework_week8/xue/assignment2/skin_dect_v1.py
|
DTRademaker/DL_tutorials_2020
|
aaae2833c084547e027e51ebeb0abf6bcd336897
|
[
"Apache-2.0"
] | null | null | null |
20200602_homework_week8/xue/assignment2/skin_dect_v1.py
|
DTRademaker/DL_tutorials_2020
|
aaae2833c084547e027e51ebeb0abf6bcd336897
|
[
"Apache-2.0"
] | null | null | null |
20200602_homework_week8/xue/assignment2/skin_dect_v1.py
|
DTRademaker/DL_tutorials_2020
|
aaae2833c084547e027e51ebeb0abf6bcd336897
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Li Xue
# 3-Jul-2020 16:01
import os
import sys
import re
from os import listdir
import shutil
import glob
from PIL import Image
import imageio
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchsummary import summary
import torch.utils.data as data_utils
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
import subprocess
import pdb
def resize_imgs(img_dir):
# create img_dir/ori
img_dir_ori = f"{img_dir}/ori"
if not os.path.isdir(img_dir_ori):
os.mkdir(img_dir_ori)
# resize img and move the original to img_dir/ori
imgFLs = [f for f in glob.glob(f"{img_dir}/*") if re.search('(jpg|png|jpeg)',f, flags=re.IGNORECASE) ]
i = 0
for imgFL in imgFLs:
i = i+1
img = Image.open(imgFL)
img_new = img.resize((150,150))
try:
shutil.move(imgFL, f"{img_dir_ori}")
except:
# the original imgFL is already save in img_dir_ori
os.remove(imgFL)
newFL = os.path.splitext(imgFL)[0] + '.jpg'
try:
img_new.save(newFL)
except:
print(f"Error: cannot save {imgFL}")
print(f"Resized imgs under: {img_dir}/xx.jpg")
resize_imgs('Dataset/train/nonSkinPhoto')
resize_imgs('Dataset/train/SkinPhoto')
resize_imgs('Dataset/validation/neg')
resize_imgs('Dataset/validation/pos')
resize_imgs('Dataset/test/neg')
resize_imgs('Dataset/test/pos')
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.bn0 = nn.BatchNorm2d(3)
self.conv1 = nn.Conv2d(3, 32, (1,1), stride = 1)
self.bn1 = nn.BatchNorm2d(32)
self.conv2 = nn.Conv2d(32,1, (1,1), stride = 1)
self.sigmoid = nn.Sigmoid()
def forward(self,x):
x = self.conv1(x)
x = F.elu(x)
#x = self.bn1(x) #used to make relu stable
#x = F.relu(x) # relu without bn1 does not work well: tends to generate too many black (0,0,0) pixels
x = self.conv2(x)
y = self.sigmoid(x)
return y
net = Net()
summary(net, input_size=(3, 150, 150))
def get_Flnames(img_dir_pos, img_dir_neg):
# return
# 1. a list of image files (path + names)
# 2. labels
imgFLs_pos = [f"{img_dir_pos}/{f}" for f in os.listdir(img_dir_pos) if re.search('.+.jpg',f)]
imgFLs_neg = [f"{img_dir_neg}/{f}" for f in os.listdir(img_dir_neg) if re.search('.+.jpg',f)]
imgFLs = imgFLs_pos + imgFLs_neg
num_pos = len(imgFLs_pos)
num_neg = len(imgFLs_neg)
labels = [1] * num_pos + [0] * num_neg
print(f"\n{num_pos} positive imgs read from {img_dir_pos}")
print(f"{num_neg} negtive imgs read from {img_dir_neg}\n")
return imgFLs, labels
class myDataset(data_utils.Dataset):
def __init__(self, imgFLs, labels):
self.imgFLs = imgFLs
self.labels = labels
def __len__(self):
return len(self.imgFLs)
def __getitem__(self, idx):
img_name = self.imgFLs[idx]
label = self.labels[idx]
#print(f"Reading --> idx: {idx}, img_name: {img_name}, label: {label}")
image = imageio.imread(img_name)
image = torch.Tensor(image).permute([2,0,1])
image = image/255
return image, label
def train_one_epo(net, data_loader):
net.train()
losses = []
for x, targets in data_loader:
optimizer.zero_grad()
y = net(x)
criterion = nn.BCELoss()
loss = criterion(torch.mean(y,dim=[1,2,3]), targets.to(torch.float))
loss.backward()
losses.append(loss)
optimizer.step()
loss_ave = sum(losses)/len(losses)
return net, loss_ave
def evaluate(net, data_loader):
net.eval()
losses = []
with torch.no_grad():
for x, targets in data_loader:
y = net(x)
criterion = nn.BCELoss()
loss = criterion(torch.mean(y,dim=[1,2,3]), targets.to(torch.float))
losses.append(loss)
loss_ave = sum(losses)/len(losses)
return loss_ave
def prepare_dataset(img_dir_pos, img_dir_neg, batch_size):
imgFLs, labels = get_Flnames(img_dir_pos, img_dir_neg)
train_dataset = myDataset(imgFLs , labels )
index = list(range(train_dataset.__len__()))
data_loader = data_utils.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=2)
return data_loader
def train(net, epoches, data_loader_train, data_loader_eval):
losses_train = torch.Tensor(epoches)
losses_eval = torch.Tensor(epoches)
for epoch in range(epoches):
mini_batch = 0
net, loss_train = train_one_epo(net, data_loader_train)
loss_eval = evaluate(net, data_loader_eval)
print(f"epoch = {epoch}, loss_train = {loss_train:.4f}, loss_eval = {loss_eval:.4f}")
losses_train[epoch] = loss_train
losses_eval[epoch] = loss_eval
#-- save model
if not os.path.isdir('networks'): os.mkdir('networks')
modelFL = f"networks/model_epo{epoch}.pt"
torch.save(net.state_dict(), modelFL)
print(f"--> One epoch finished. Modeled saved: {modelFL}")
if epoch % 1 == 0:
if not os.path.isdir("pred_imgs"): os.mkdir("pred_imgs")
outputFL = f"pred_imgs/eval_{epoch}.png"
visual_check(modelFL, 'Dataset/validation/pos/' , outputFL)
outputFL = f"pred_imgs/train_{epoch}.png"
visual_check(modelFL, 'Dataset/train/SkinPhoto/' , outputFL)
return net, losses_train, losses_eval
def plot_loss(losses_train, losses_eval):
# save as torch file
torch.save(losses_train, "losses_train.pt")
torch.save(losses_eval, "losses_eval.pt")
# save as tsv file
losses = torch.stack( (losses_train, losses_eval), dim = 1).detach().numpy()
losses = pd.DataFrame(losses, columns = ['train', 'eval'])
losses.to_csv('losses.tsv', sep = '\t', index = False)
print(f"losses.tsv generated.")
# generate plots
subprocess.check_call(['Rscript', 'plot_losses.R', 'losses.tsv'])
def black_white(im_new, cutoff = 0.5):
#- convert image (original range 0-1) to black and white
idx1 = im_new < cutoff
idx2 = im_new >= cutoff
im_new[idx1] = 0
im_new[idx2] = 1
return im_new
def visual_check(networkFL, img_dir, outputFL):
# save predicted images into png files
net = Net()
net.load_state_dict(torch.load(networkFL))
# visually check the valiation set
net.eval()
imgFLs = [f"{img_dir}/{f}" for f in os.listdir(img_dir) if os.path.isfile(f"{img_dir}/{f}")]
n_imgs = len(imgFLs)
print(f"There are {n_imgs} images under {img_dir}")
with torch.no_grad():
images = torch.Tensor().to(torch.uint8)
for i in range(len(imgFLs)):
im = imageio.imread(imgFLs[i])
im_new = net(torch.Tensor(im).unsqueeze(0).permute(0,3,1,2))
im_new = (im_new*255).squeeze().to(torch.uint8)
#--
im_new = torch.stack((im_new, im_new, im_new), dim = 2) #im_new.shape = [150, 150] -> im_new.shape = [150, 150, 3]
im = torch.Tensor(im).to(torch.uint8)
this_im = torch.cat((im, im_new), dim = 1) # put the original image and pred image side-by-side
images = torch.cat((images, this_im), dim = 0)
# save into file
imageio.imsave(outputFL,images.to(torch.uint8).numpy())
print(f"{outputFL} generated.")
#-- train and evaluate
data_loader_train = prepare_dataset(img_dir_pos = 'Dataset/train/SkinPhoto',
img_dir_neg = 'Dataset/train/nonSkinPhoto', batch_size = 5)
data_loader_eval = prepare_dataset(img_dir_pos = 'Dataset/validation/pos',
img_dir_neg = 'Dataset/validation/neg', batch_size = 20)
net = Net()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(f"device: {device}")
net.to(device)
#net.load_state_dict(torch.load('networks/old/model_epo299.pt'))
optimizer = torch.optim.Adam(net.parameters())
epoches = 50
net, losses_train, losses_eval = train(net,epoches, data_loader_train, data_loader_eval)
plot_loss(losses_train, losses_eval)
#- the model with lowest eval loss (the loss does not make much sense here ...)
loss_min , idx = torch.min(losses_eval, 0)
networkFL = f'neworks/model_epo{idx}.pt'
print(f"Model with the lowest eval loss: {networkFL}, epoch = {idx}, and loss_eval = {loss_min:4f}")
sys.exit()
# networkFL = 'networks/model_epo11.pt'
# outputFL = f"pred_imgs/test_{11}.png"
# visual_check(networkFL, 'Dataset/test/pos/' , outputFL)
| 33.415686
| 126
| 0.645816
|
import os
import sys
import re
from os import listdir
import shutil
import glob
from PIL import Image
import imageio
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchsummary import summary
import torch.utils.data as data_utils
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
import subprocess
import pdb
def resize_imgs(img_dir):
img_dir_ori = f"{img_dir}/ori"
if not os.path.isdir(img_dir_ori):
os.mkdir(img_dir_ori)
imgFLs = [f for f in glob.glob(f"{img_dir}/*") if re.search('(jpg|png|jpeg)',f, flags=re.IGNORECASE) ]
i = 0
for imgFL in imgFLs:
i = i+1
img = Image.open(imgFL)
img_new = img.resize((150,150))
try:
shutil.move(imgFL, f"{img_dir_ori}")
except:
os.remove(imgFL)
newFL = os.path.splitext(imgFL)[0] + '.jpg'
try:
img_new.save(newFL)
except:
print(f"Error: cannot save {imgFL}")
print(f"Resized imgs under: {img_dir}/xx.jpg")
resize_imgs('Dataset/train/nonSkinPhoto')
resize_imgs('Dataset/train/SkinPhoto')
resize_imgs('Dataset/validation/neg')
resize_imgs('Dataset/validation/pos')
resize_imgs('Dataset/test/neg')
resize_imgs('Dataset/test/pos')
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.bn0 = nn.BatchNorm2d(3)
self.conv1 = nn.Conv2d(3, 32, (1,1), stride = 1)
self.bn1 = nn.BatchNorm2d(32)
self.conv2 = nn.Conv2d(32,1, (1,1), stride = 1)
self.sigmoid = nn.Sigmoid()
def forward(self,x):
x = self.conv1(x)
x = F.elu(x)
_size=(3, 150, 150))
def get_Flnames(img_dir_pos, img_dir_neg):
imgFLs_pos = [f"{img_dir_pos}/{f}" for f in os.listdir(img_dir_pos) if re.search('.+.jpg',f)]
imgFLs_neg = [f"{img_dir_neg}/{f}" for f in os.listdir(img_dir_neg) if re.search('.+.jpg',f)]
imgFLs = imgFLs_pos + imgFLs_neg
num_pos = len(imgFLs_pos)
num_neg = len(imgFLs_neg)
labels = [1] * num_pos + [0] * num_neg
print(f"\n{num_pos} positive imgs read from {img_dir_pos}")
print(f"{num_neg} negtive imgs read from {img_dir_neg}\n")
return imgFLs, labels
class myDataset(data_utils.Dataset):
def __init__(self, imgFLs, labels):
self.imgFLs = imgFLs
self.labels = labels
def __len__(self):
return len(self.imgFLs)
def __getitem__(self, idx):
img_name = self.imgFLs[idx]
label = self.labels[idx]
image = imageio.imread(img_name)
image = torch.Tensor(image).permute([2,0,1])
image = image/255
return image, label
def train_one_epo(net, data_loader):
net.train()
losses = []
for x, targets in data_loader:
optimizer.zero_grad()
y = net(x)
criterion = nn.BCELoss()
loss = criterion(torch.mean(y,dim=[1,2,3]), targets.to(torch.float))
loss.backward()
losses.append(loss)
optimizer.step()
loss_ave = sum(losses)/len(losses)
return net, loss_ave
def evaluate(net, data_loader):
net.eval()
losses = []
with torch.no_grad():
for x, targets in data_loader:
y = net(x)
criterion = nn.BCELoss()
loss = criterion(torch.mean(y,dim=[1,2,3]), targets.to(torch.float))
losses.append(loss)
loss_ave = sum(losses)/len(losses)
return loss_ave
def prepare_dataset(img_dir_pos, img_dir_neg, batch_size):
imgFLs, labels = get_Flnames(img_dir_pos, img_dir_neg)
train_dataset = myDataset(imgFLs , labels )
index = list(range(train_dataset.__len__()))
data_loader = data_utils.DataLoader(train_dataset, batch_size=batch_size,
shuffle=True, num_workers=2)
return data_loader
def train(net, epoches, data_loader_train, data_loader_eval):
losses_train = torch.Tensor(epoches)
losses_eval = torch.Tensor(epoches)
for epoch in range(epoches):
mini_batch = 0
net, loss_train = train_one_epo(net, data_loader_train)
loss_eval = evaluate(net, data_loader_eval)
print(f"epoch = {epoch}, loss_train = {loss_train:.4f}, loss_eval = {loss_eval:.4f}")
losses_train[epoch] = loss_train
losses_eval[epoch] = loss_eval
if not os.path.isdir('networks'): os.mkdir('networks')
modelFL = f"networks/model_epo{epoch}.pt"
torch.save(net.state_dict(), modelFL)
print(f"--> One epoch finished. Modeled saved: {modelFL}")
if epoch % 1 == 0:
if not os.path.isdir("pred_imgs"): os.mkdir("pred_imgs")
outputFL = f"pred_imgs/eval_{epoch}.png"
visual_check(modelFL, 'Dataset/validation/pos/' , outputFL)
outputFL = f"pred_imgs/train_{epoch}.png"
visual_check(modelFL, 'Dataset/train/SkinPhoto/' , outputFL)
return net, losses_train, losses_eval
def plot_loss(losses_train, losses_eval):
torch.save(losses_train, "losses_train.pt")
torch.save(losses_eval, "losses_eval.pt")
losses = torch.stack( (losses_train, losses_eval), dim = 1).detach().numpy()
losses = pd.DataFrame(losses, columns = ['train', 'eval'])
losses.to_csv('losses.tsv', sep = '\t', index = False)
print(f"losses.tsv generated.")
subprocess.check_call(['Rscript', 'plot_losses.R', 'losses.tsv'])
def black_white(im_new, cutoff = 0.5):
idx1 = im_new < cutoff
idx2 = im_new >= cutoff
im_new[idx1] = 0
im_new[idx2] = 1
return im_new
def visual_check(networkFL, img_dir, outputFL):
net = Net()
net.load_state_dict(torch.load(networkFL))
net.eval()
imgFLs = [f"{img_dir}/{f}" for f in os.listdir(img_dir) if os.path.isfile(f"{img_dir}/{f}")]
n_imgs = len(imgFLs)
print(f"There are {n_imgs} images under {img_dir}")
with torch.no_grad():
images = torch.Tensor().to(torch.uint8)
for i in range(len(imgFLs)):
im = imageio.imread(imgFLs[i])
im_new = net(torch.Tensor(im).unsqueeze(0).permute(0,3,1,2))
im_new = (im_new*255).squeeze().to(torch.uint8)
im_new = torch.stack((im_new, im_new, im_new), dim = 2)
im = torch.Tensor(im).to(torch.uint8)
this_im = torch.cat((im, im_new), dim = 1)
images = torch.cat((images, this_im), dim = 0)
imageio.imsave(outputFL,images.to(torch.uint8).numpy())
print(f"{outputFL} generated.")
data_loader_train = prepare_dataset(img_dir_pos = 'Dataset/train/SkinPhoto',
img_dir_neg = 'Dataset/train/nonSkinPhoto', batch_size = 5)
data_loader_eval = prepare_dataset(img_dir_pos = 'Dataset/validation/pos',
img_dir_neg = 'Dataset/validation/neg', batch_size = 20)
net = Net()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(f"device: {device}")
net.to(device)
optimizer = torch.optim.Adam(net.parameters())
epoches = 50
net, losses_train, losses_eval = train(net,epoches, data_loader_train, data_loader_eval)
plot_loss(losses_train, losses_eval)
loss_min , idx = torch.min(losses_eval, 0)
networkFL = f'neworks/model_epo{idx}.pt'
print(f"Model with the lowest eval loss: {networkFL}, epoch = {idx}, and loss_eval = {loss_min:4f}")
sys.exit()
| true
| true
|
1c48b8b8acba07ae3f975d62e14e3107708605ad
| 2,779
|
py
|
Python
|
densmap/densmap.py
|
hhcho/densvis
|
d65bb3133a5072356f45d2d6f4f0d16ad33032fd
|
[
"MIT"
] | 102
|
2020-05-15T14:17:17.000Z
|
2022-03-23T08:38:24.000Z
|
densmap/densmap.py
|
hhcho/densvis
|
d65bb3133a5072356f45d2d6f4f0d16ad33032fd
|
[
"MIT"
] | 11
|
2020-05-16T12:02:19.000Z
|
2022-03-22T16:41:43.000Z
|
densmap/densmap.py
|
hhcho/densvis
|
d65bb3133a5072356f45d2d6f4f0d16ad33032fd
|
[
"MIT"
] | 11
|
2020-05-15T22:06:57.000Z
|
2021-08-18T14:13:15.000Z
|
import sys
import numpy as np
import argparse
import pickle
import densmap
from sklearn.datasets import load_digits
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-i','--input', help='Input .txt or .pkl', default='data.txt')
parser.add_argument('-o','--outname', help='Output prefix for saving _emb.txt, _dens.txt',
default='out')
parser.add_argument('-f','--dens_frac', type=float, default=0.3)
parser.add_argument('-l','--dens_lambda', type=float, default=2.0)
parser.add_argument('-s','--var_shift', type=float, default=0.1)
parser.add_argument('-d','--ndim', type=int, default=2, help='Embedding dimension (default: %(default)s)')
parser.add_argument('-n','--n-epochs', type=int, help='Number of epochs', default=750)
parser.add_argument('-k','--n-nei', type=int, default=30, help='Number of neighbors (default: %(default)s)')
parser.add_argument('--final_dens', action='store_true', default=True)
parser.add_argument('--no_final_dens', dest='final_dens', action='store_false')
parser.add_argument('--outtype', choices=('pkl','txt'), default='txt', help='Output format type (default: %(default)s)')
return parser
def main(args):
if args.input.endswith('.txt'):
data = np.loadtxt(args.input)
elif args.input.endswith('.pkl'):
data = pickle.load(open(args.input,'rb'))
else:
raise RuntimeError(f'File format for {args.input} not supported')
if data.shape[0] < data.shape[1]:
data = data.T
emb = densmap.densMAP(verbose=True,
n_components=args.ndim,
n_neighbors=args.n_nei,
n_epochs=args.n_epochs,
dens_frac=args.dens_frac,
dens_lambda=args.dens_lambda,
logdist_shift=0,
var_shift=args.var_shift,
final_dens=args.final_dens).fit_transform(data)
outname = args.outname
if args.final_dens:
(emb, ro, re) = emb
rero = np.stack((ro,re)).transpose()
if args.outtype=='txt':
np.savetxt(outname+'_dens.txt',rero, fmt='%e')
elif args.outtype=='pkl':
with open(outname + '_dens.pkl','wb') as f:
pickle.dump(rero, f)
else:
raise RuntimeError
if args.outtype == 'txt':
np.savetxt(outname+'_emb.txt',emb, fmt='%e')
elif args.outtype == 'pkl':
with open(outname + '_emb.pkl','wb') as f:
pickle.dump(emb, f)
else: # should not reach here
raise RuntimeError
print("Done")
if __name__ == '__main__':
main(parse_args().parse_args())
| 38.597222
| 124
| 0.594818
|
import sys
import numpy as np
import argparse
import pickle
import densmap
from sklearn.datasets import load_digits
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-i','--input', help='Input .txt or .pkl', default='data.txt')
parser.add_argument('-o','--outname', help='Output prefix for saving _emb.txt, _dens.txt',
default='out')
parser.add_argument('-f','--dens_frac', type=float, default=0.3)
parser.add_argument('-l','--dens_lambda', type=float, default=2.0)
parser.add_argument('-s','--var_shift', type=float, default=0.1)
parser.add_argument('-d','--ndim', type=int, default=2, help='Embedding dimension (default: %(default)s)')
parser.add_argument('-n','--n-epochs', type=int, help='Number of epochs', default=750)
parser.add_argument('-k','--n-nei', type=int, default=30, help='Number of neighbors (default: %(default)s)')
parser.add_argument('--final_dens', action='store_true', default=True)
parser.add_argument('--no_final_dens', dest='final_dens', action='store_false')
parser.add_argument('--outtype', choices=('pkl','txt'), default='txt', help='Output format type (default: %(default)s)')
return parser
def main(args):
if args.input.endswith('.txt'):
data = np.loadtxt(args.input)
elif args.input.endswith('.pkl'):
data = pickle.load(open(args.input,'rb'))
else:
raise RuntimeError(f'File format for {args.input} not supported')
if data.shape[0] < data.shape[1]:
data = data.T
emb = densmap.densMAP(verbose=True,
n_components=args.ndim,
n_neighbors=args.n_nei,
n_epochs=args.n_epochs,
dens_frac=args.dens_frac,
dens_lambda=args.dens_lambda,
logdist_shift=0,
var_shift=args.var_shift,
final_dens=args.final_dens).fit_transform(data)
outname = args.outname
if args.final_dens:
(emb, ro, re) = emb
rero = np.stack((ro,re)).transpose()
if args.outtype=='txt':
np.savetxt(outname+'_dens.txt',rero, fmt='%e')
elif args.outtype=='pkl':
with open(outname + '_dens.pkl','wb') as f:
pickle.dump(rero, f)
else:
raise RuntimeError
if args.outtype == 'txt':
np.savetxt(outname+'_emb.txt',emb, fmt='%e')
elif args.outtype == 'pkl':
with open(outname + '_emb.pkl','wb') as f:
pickle.dump(emb, f)
else:
raise RuntimeError
print("Done")
if __name__ == '__main__':
main(parse_args().parse_args())
| true
| true
|
1c48b9b45a802cd383a85fca3e4fa2436b12d4f7
| 24,710
|
py
|
Python
|
extern_tools/mmd_tools_local/core/morph.py
|
block2333/CATSzh-CNLang
|
b47b68c7fc944c30bcf666773308927ca81967e5
|
[
"MIT"
] | 3
|
2021-11-03T15:14:35.000Z
|
2021-12-10T05:35:38.000Z
|
extern_tools/mmd_tools_local/core/morph.py
|
Vapey/cats-blender-plugin
|
bbc081c504e4fbb28b89e4d0d3b8a716b00bc334
|
[
"MIT"
] | null | null | null |
extern_tools/mmd_tools_local/core/morph.py
|
Vapey/cats-blender-plugin
|
bbc081c504e4fbb28b89e4d0d3b8a716b00bc334
|
[
"MIT"
] | 1
|
2021-01-28T12:19:40.000Z
|
2021-01-28T12:19:40.000Z
|
# -*- coding: utf-8 -*-
import re
import bpy
from mmd_tools_local import bpyutils
from mmd_tools_local.bpyutils import SceneOp
from mmd_tools_local.bpyutils import ObjectOp
from mmd_tools_local.bpyutils import TransformConstraintOp
class FnMorph(object):
def __init__(self, morph, model):
self.__morph = morph
self.__rig = model
@classmethod
def storeShapeKeyOrder(cls, obj, shape_key_names):
if len(shape_key_names) < 1:
return
assert(SceneOp(bpy.context).active_object == obj)
if obj.data.shape_keys is None:
bpy.ops.object.shape_key_add()
if bpy.app.version < (2, 73, 0):
def __move_to_bottom(key_blocks, name):
obj.active_shape_key_index = key_blocks.find(name)
for move in range(len(key_blocks)-1-obj.active_shape_key_index):
bpy.ops.object.shape_key_move(type='DOWN')
else:
def __move_to_bottom(key_blocks, name):
obj.active_shape_key_index = key_blocks.find(name)
bpy.ops.object.shape_key_move(type='BOTTOM')
key_blocks = obj.data.shape_keys.key_blocks
for name in shape_key_names:
if name not in key_blocks:
obj.shape_key_add(name=name)
elif len(key_blocks) > 1:
__move_to_bottom(key_blocks, name)
@classmethod
def fixShapeKeyOrder(cls, obj, shape_key_names):
if len(shape_key_names) < 1:
return
assert(SceneOp(bpy.context).active_object == obj)
key_blocks = getattr(obj.data.shape_keys, 'key_blocks', None)
if key_blocks is None:
return
if bpy.app.version < (2, 73, 0):
len_key_blocks = len(key_blocks)
for ii, name in enumerate(x for x in reversed(shape_key_names) if x in key_blocks):
obj.active_shape_key_index = idx = key_blocks.find(name)
offset = (len_key_blocks - 1 - idx) - ii
move_type = 'UP' if offset < 0 else 'DOWN'
for move in range(abs(offset)):
bpy.ops.object.shape_key_move(type=move_type)
else:
for name in shape_key_names:
idx = key_blocks.find(name)
if idx < 0:
continue
obj.active_shape_key_index = idx
bpy.ops.object.shape_key_move(type='BOTTOM')
@staticmethod
def get_morph_slider(rig):
return _MorphSlider(rig)
@staticmethod
def category_guess(morph):
name_lower = morph.name.lower()
if 'mouth' in name_lower:
morph.category = 'MOUTH'
elif 'eye' in name_lower:
if 'brow' in name_lower:
morph.category = 'EYEBROW'
else:
morph.category = 'EYE'
@classmethod
def load_morphs(cls, rig):
mmd_root = rig.rootObject().mmd_root
vertex_morphs = mmd_root.vertex_morphs
for obj in rig.meshes():
for kb in getattr(obj.data.shape_keys, 'key_blocks', ())[1:]:
if not kb.name.startswith('mmd_') and kb.name not in vertex_morphs:
item = vertex_morphs.add()
item.name = kb.name
item.name_e = kb.name
cls.category_guess(item)
@staticmethod
def remove_shape_key(obj, key_name):
key_blocks = getattr(obj.data.shape_keys, 'key_blocks', None)
if key_blocks and key_name in key_blocks:
ObjectOp(obj).shape_key_remove(key_blocks[key_name])
@staticmethod
def copy_shape_key(obj, src_name, dest_name):
key_blocks = getattr(obj.data.shape_keys, 'key_blocks', None)
if key_blocks and src_name in key_blocks:
if dest_name in key_blocks:
ObjectOp(obj).shape_key_remove(key_blocks[dest_name])
obj.active_shape_key_index = key_blocks.find(src_name)
obj.show_only_shape_key, last = True, obj.show_only_shape_key
obj.shape_key_add(name=dest_name, from_mix=True)
obj.show_only_shape_key = last
obj.active_shape_key_index = key_blocks.find(dest_name)
@staticmethod
def get_uv_morph_vertex_groups(obj, morph_name=None, offset_axes='XYZW'):
pattern = 'UV_%s[+-][%s]$'%(morph_name or '.{1,}', offset_axes or 'XYZW')
# yield (vertex_group, morph_name, axis),...
return ((g, g.name[3:-2], g.name[-2:]) for g in obj.vertex_groups if re.match(pattern, g.name))
@staticmethod
def copy_uv_morph_vertex_groups(obj, src_name, dest_name):
for vg, n, x in FnMorph.get_uv_morph_vertex_groups(obj, dest_name):
obj.vertex_groups.remove(vg)
for vg_name in tuple(i[0].name for i in FnMorph.get_uv_morph_vertex_groups(obj, src_name)):
obj.vertex_groups.active = obj.vertex_groups[vg_name]
override = {'object':obj, 'window':bpy.context.window, 'region':bpy.context.region}
bpy.ops.object.vertex_group_copy(override)
obj.vertex_groups.active.name = vg_name.replace(src_name, dest_name)
@staticmethod
def clean_uv_morph_vertex_groups(obj):
# remove empty vertex groups of uv morphs
vg_indices = {g.index for g, n, x in FnMorph.get_uv_morph_vertex_groups(obj)}
vertex_groups = obj.vertex_groups
for v in obj.data.vertices:
for x in v.groups:
if x.group in vg_indices and x.weight > 0:
vg_indices.remove(x.group)
for i in sorted(vg_indices, reverse=True):
vg = vertex_groups[i]
m = obj.modifiers.get('mmd_bind%s'%hash(vg.name), None)
if m:
obj.modifiers.remove(m)
vertex_groups.remove(vg)
@staticmethod
def get_uv_morph_offset_map(obj, morph):
offset_map = {} # offset_map[vertex_index] = offset_xyzw
if morph.data_type == 'VERTEX_GROUP':
scale = morph.vertex_group_scale
axis_map = {g.index:x for g, n, x in FnMorph.get_uv_morph_vertex_groups(obj, morph.name)}
for v in obj.data.vertices:
i = v.index
for x in v.groups:
if x.group in axis_map and x.weight > 0:
axis, weight = axis_map[x.group], x.weight
d = offset_map.setdefault(i, [0, 0, 0, 0])
d['XYZW'.index(axis[1])] += -weight*scale if axis[0] == '-' else weight*scale
else:
for val in morph.data:
i = val.index
if i in offset_map:
offset_map[i] = [a+b for a, b in zip(offset_map[i], val.offset)]
else:
offset_map[i] = val.offset
return offset_map
@staticmethod
def store_uv_morph_data(obj, morph, offsets=None, offset_axes='XYZW'):
vertex_groups = obj.vertex_groups
morph_name = getattr(morph, 'name', None)
if offset_axes:
for vg, n, x in FnMorph.get_uv_morph_vertex_groups(obj, morph_name, offset_axes):
vertex_groups.remove(vg)
if not morph_name or not offsets:
return
axis_indices = tuple('XYZW'.index(x) for x in offset_axes) or tuple(range(4))
offset_map = FnMorph.get_uv_morph_offset_map(obj, morph) if offset_axes else {}
for data in offsets:
idx, offset = data.index, data.offset
for i in axis_indices:
offset_map.setdefault(idx, [0, 0, 0, 0])[i] += round(offset[i], 5)
max_value = max(max(abs(x) for x in v) for v in offset_map.values() or ([0],))
scale = morph.vertex_group_scale = max(abs(morph.vertex_group_scale), max_value)
for idx, offset in offset_map.items():
for val, axis in zip(offset, 'XYZW'):
if abs(val) > 1e-4:
vg_name = 'UV_{0}{1}{2}'.format(morph_name, '-' if val < 0 else '+', axis)
vg = vertex_groups.get(vg_name, None) or vertex_groups.new(name=vg_name)
vg.add(index=[idx], weight=abs(val)/scale, type='REPLACE')
def update_mat_related_mesh(self, new_mesh=None):
for offset in self.__morph.data:
# Use the new_mesh if provided
meshObj = new_mesh
if new_mesh is None:
# Try to find the mesh by material name
meshObj = self.__rig.findMesh(offset.material)
if meshObj is None:
# Given this point we need to loop through all the meshes
for mesh in self.__rig.meshes():
if mesh.data.materials.find(offset.material) >= 0:
meshObj = mesh
break
# Finally update the reference
if meshObj is not None:
offset.related_mesh = meshObj.data.name
class _MorphSlider:
def __init__(self, model):
self.__rig = model
def placeholder(self, create=False, binded=False):
rig = self.__rig
root = rig.rootObject()
obj = next((x for x in root.children if x.mmd_type == 'PLACEHOLDER' and x.type == 'MESH'), None)
if create and obj is None:
obj = bpy.data.objects.new(name='.placeholder', object_data=bpy.data.meshes.new('.placeholder'))
obj.mmd_type = 'PLACEHOLDER'
obj.parent = root
SceneOp(bpy.context).link_object(obj)
if obj and obj.data.shape_keys is None:
key = obj.shape_key_add(name='--- morph sliders ---')
key.mute = True
if binded and obj and obj.data.shape_keys.key_blocks[0].mute:
return None
return obj
@property
def dummy_armature(self):
obj = self.placeholder()
return self.__dummy_armature(obj) if obj else None
def __dummy_armature(self, obj, create=False):
arm = next((x for x in obj.children if x.mmd_type == 'PLACEHOLDER' and x.type == 'ARMATURE'), None)
if create and arm is None:
arm = bpy.data.objects.new(name='.dummy_armature', object_data=bpy.data.armatures.new(name='.dummy_armature'))
arm.mmd_type = 'PLACEHOLDER'
arm.parent = obj
SceneOp(bpy.context).link_object(arm)
arm.data.draw_type = 'STICK'
return arm
def get(self, morph_name):
obj = self.placeholder()
if obj is None:
return None
key_blocks = obj.data.shape_keys.key_blocks
if key_blocks[0].mute:
return None
return key_blocks.get(morph_name, None)
def create(self):
self.__rig.loadMorphs()
obj = self.placeholder(create=True)
self.__load(obj, self.__rig.rootObject().mmd_root)
return obj
def __load(self, obj, mmd_root):
attr_list = ('group', 'vertex', 'bone', 'uv', 'material')
morph_key_blocks = obj.data.shape_keys.key_blocks
for m in (x for attr in attr_list for x in getattr(mmd_root, attr+'_morphs', ())):
name = m.name
#if name[-1] == '\\': # fix driver's bug???
# m.name = name = name + ' '
if name and name not in morph_key_blocks:
obj.shape_key_add(name=name)
@staticmethod
def __driver_variables(id_data, path, index=-1):
d = id_data.driver_add(path, index)
variables = d.driver.variables
for x in variables:
variables.remove(x)
return d.driver, variables
@staticmethod
def __add_single_prop(variables, id_obj, data_path, prefix):
var = variables.new()
var.name = prefix + str(len(variables))
var.type = 'SINGLE_PROP'
target = var.targets[0]
target.id_type = 'OBJECT'
target.id = id_obj
target.data_path = data_path
return var
def __cleanup(self, names_in_use=None):
names_in_use = names_in_use or {}
rig = self.__rig
for mesh in rig.meshes():
for kb in getattr(mesh.data.shape_keys, 'key_blocks', ()):
if kb.name.startswith('mmd_bind') and kb.name not in names_in_use:
kb.driver_remove('value')
kb.relative_key.mute = False
ObjectOp(mesh).shape_key_remove(kb)
for m in mesh.modifiers: # uv morph
if m.name.startswith('mmd_bind') and m.name not in names_in_use:
mesh.modifiers.remove(m)
from mmd_tools_local.core.shader import _MaterialMorph
for m in rig.materials():
if m and m.node_tree:
for n in sorted((x for x in m.node_tree.nodes if x.name.startswith('mmd_bind')), key=lambda x: -x.location[0]):
_MaterialMorph.reset_morph_links(n)
m.node_tree.nodes.remove(n)
attributes = set(TransformConstraintOp.min_max_attributes('LOCATION', 'to'))
attributes |= set(TransformConstraintOp.min_max_attributes('ROTATION', 'to'))
for b in rig.armature().pose.bones:
for c in b.constraints:
if c.name.startswith('mmd_bind') and c.name[:-4] not in names_in_use:
for attr in attributes:
c.driver_remove(attr)
b.constraints.remove(c)
def unbind(self):
mmd_root = self.__rig.rootObject().mmd_root
for m in mmd_root.bone_morphs:
for d in m.data:
d.name = ''
for m in mmd_root.material_morphs:
for d in m.data:
d.name = ''
obj = self.placeholder()
if obj:
obj.data.shape_keys.key_blocks[0].mute = True
arm = self.__dummy_armature(obj)
if arm:
for b in arm.pose.bones:
if b.name.startswith('mmd_bind'):
b.driver_remove('location')
b.driver_remove('rotation_quaternion')
self.__cleanup()
def bind(self):
rig = self.__rig
root = rig.rootObject()
armObj = rig.armature()
mmd_root = root.mmd_root
obj = self.create()
arm = self.__dummy_armature(obj, create=True)
morph_key_blocks = obj.data.shape_keys.key_blocks
# data gathering
group_map = {}
shape_key_map = {}
uv_morph_map = {}
for mesh in rig.meshes():
mesh.show_only_shape_key = False
key_blocks = getattr(mesh.data.shape_keys, 'key_blocks', ())
for kb in key_blocks:
kb_name = kb.name
if kb_name not in morph_key_blocks:
continue
name_bind = 'mmd_bind%s'%hash(morph_key_blocks[kb_name])
if name_bind not in key_blocks:
mesh.shape_key_add(name=name_bind)
kb_bind = key_blocks[name_bind]
kb_bind.relative_key = kb
kb_bind.slider_min = -10
kb_bind.slider_max = 10
data_path = 'data.shape_keys.key_blocks["%s"].value'%kb_name.replace('"', '\\"')
groups = []
shape_key_map.setdefault(name_bind, []).append((kb_bind, data_path, groups))
group_map.setdefault(('vertex_morphs', kb_name), []).append(groups)
uv_layers = [l.name for l in mesh.data.uv_layers if not l.name.startswith('_')]
uv_layers += ['']*(5-len(uv_layers))
for vg, morph_name, axis in FnMorph.get_uv_morph_vertex_groups(mesh):
morph = mmd_root.uv_morphs.get(morph_name, None)
if morph is None or morph.data_type != 'VERTEX_GROUP':
continue
uv_layer = '_'+uv_layers[morph.uv_index] if axis[1] in 'ZW' else uv_layers[morph.uv_index]
if uv_layer not in mesh.data.uv_layers:
continue
name_bind = 'mmd_bind%s'%hash(vg.name)
uv_morph_map.setdefault(name_bind, ())
mod = mesh.modifiers.get(name_bind, None) or mesh.modifiers.new(name=name_bind, type='UV_WARP')
mod.show_expanded = False
mod.vertex_group = vg.name
mod.axis_u, mod.axis_v = ('Y', 'X') if axis[1] in 'YW' else ('X', 'Y')
mod.uv_layer = uv_layer
name_bind = 'mmd_bind%s'%hash(morph_name)
mod.object_from = mod.object_to = arm
if axis[0] == '-':
mod.bone_from, mod.bone_to = 'mmd_bind_ctrl_base', name_bind
else:
mod.bone_from, mod.bone_to = name_bind, 'mmd_bind_ctrl_base'
bone_offset_map = {}
with bpyutils.edit_object(arm) as data:
edit_bones = data.edit_bones
def __get_bone(name, layer, parent):
b = edit_bones.get(name, None) or edit_bones.new(name=name)
b.layers = [x == layer for x in range(len(b.layers))]
b.head = (0, 0, 0)
b.tail = (0, 0, 1)
b.use_deform = False
b.parent = parent
return b
for m in mmd_root.bone_morphs:
data_path = 'data.shape_keys.key_blocks["%s"].value'%m.name.replace('"', '\\"')
for d in m.data:
if not d.bone:
d.name = ''
continue
d.name = name_bind = 'mmd_bind%s'%hash(d)
b = __get_bone(name_bind, 10, None)
groups = []
bone_offset_map[name_bind] = (m.name, d, b.name, data_path, groups)
group_map.setdefault(('bone_morphs', m.name), []).append(groups)
ctrl_base = __get_bone('mmd_bind_ctrl_base', 11, None)
for m in mmd_root.uv_morphs:
morph_name = m.name.replace('"', '\\"')
data_path = 'data.shape_keys.key_blocks["%s"].value'%morph_name
scale_path = 'mmd_root.uv_morphs["%s"].vertex_group_scale'%morph_name
name_bind = 'mmd_bind%s'%hash(m.name)
b = __get_bone(name_bind, 11, ctrl_base)
groups = []
uv_morph_map.setdefault(name_bind, []).append((b.name, data_path, scale_path, groups))
group_map.setdefault(('uv_morphs', m.name), []).append(groups)
used_bone_names = bone_offset_map.keys()|uv_morph_map.keys()
used_bone_names.add(ctrl_base.name)
for b in edit_bones: # cleanup
if b.name.startswith('mmd_bind') and b.name not in used_bone_names:
edit_bones.remove(b)
material_offset_map = {}
for m in mmd_root.material_morphs:
morph_name = m.name.replace('"', '\\"')
data_path = 'data.shape_keys.key_blocks["%s"].value'%morph_name
groups = []
group_map.setdefault(('material_morphs', m.name), []).append(groups)
material_offset_map.setdefault('group_dict', {})[m.name] = (data_path, groups)
for d in m.data:
d.name = name_bind = 'mmd_bind%s'%hash(d)
table = material_offset_map.setdefault(d.material_id, ([], []))
table[1 if d.offset_type == 'ADD' else 0].append((m.name, d, name_bind))
for m in mmd_root.group_morphs:
if len(m.data) != len(set(m.data.keys())):
print(' * Found duplicated morph data in Group Morph "%s"'%m.name)
morph_name = m.name.replace('"', '\\"')
morph_path = 'data.shape_keys.key_blocks["%s"].value'%morph_name
for d in m.data:
param = (morph_name, d.name.replace('"', '\\"'))
factor_path = 'mmd_root.group_morphs["%s"].data["%s"].factor'%param
for groups in group_map.get((d.morph_type, d.name), ()):
groups.append((m.name, morph_path, factor_path))
self.__cleanup(shape_key_map.keys()|bone_offset_map.keys()|uv_morph_map.keys())
def __config_groups(variables, expression, groups):
for g_name, morph_path, factor_path in groups:
var = self.__add_single_prop(variables, obj, morph_path, 'g')
fvar = self.__add_single_prop(variables, root, factor_path, 'w')
expression = '%s+%s*%s'%(expression, var.name, fvar.name)
return expression
# vertex morphs
for kb_bind, morph_data_path, groups in (i for l in shape_key_map.values() for i in l):
driver, variables = self.__driver_variables(kb_bind, 'value')
var = self.__add_single_prop(variables, obj, morph_data_path, 'v')
driver.expression = '-(%s)'%__config_groups(variables, var.name, groups)
kb_bind.relative_key.mute = True
kb_bind.mute = False
# bone morphs
def __config_bone_morph(constraints, map_type, attributes, val, val_str):
c_name = 'mmd_bind%s.%s'%(hash(data), map_type[:3])
c = TransformConstraintOp.create(constraints, c_name, map_type)
TransformConstraintOp.update_min_max(c, val, None)
c.show_expanded = False
c.target = arm
c.subtarget = bname
for attr in attributes:
driver, variables = self.__driver_variables(armObj, c.path_from_id(attr))
var = self.__add_single_prop(variables, obj, morph_data_path, 'b')
expression = __config_groups(variables, var.name, groups)
sign = '-' if attr.startswith('to_min') else ''
driver.expression = '%s%s*(%s)'%(sign, val_str, expression)
from math import pi
attributes_rot = TransformConstraintOp.min_max_attributes('ROTATION', 'to')
attributes_loc = TransformConstraintOp.min_max_attributes('LOCATION', 'to')
for morph_name, data, bname, morph_data_path, groups in bone_offset_map.values():
b = arm.pose.bones[bname]
b.location = data.location
b.rotation_quaternion = data.rotation.__class__(*data.rotation.to_axis_angle()) # Fix for consistency
b.is_mmd_shadow_bone = True
b.mmd_shadow_bone_type = 'BIND'
pb = armObj.pose.bones[data.bone]
__config_bone_morph(pb.constraints, 'ROTATION', attributes_rot, pi, 'pi')
__config_bone_morph(pb.constraints, 'LOCATION', attributes_loc, 100, '100')
# uv morphs
b = arm.pose.bones['mmd_bind_ctrl_base']
b.is_mmd_shadow_bone = True
b.mmd_shadow_bone_type = 'BIND'
for bname, data_path, scale_path, groups in (i for l in uv_morph_map.values() for i in l):
b = arm.pose.bones[bname]
b.is_mmd_shadow_bone = True
b.mmd_shadow_bone_type = 'BIND'
driver, variables = self.__driver_variables(b, 'location', index=0)
var = self.__add_single_prop(variables, obj, data_path, 'u')
fvar = self.__add_single_prop(variables, root, scale_path, 's')
driver.expression = '(%s)*%s'%(__config_groups(variables, var.name, groups), fvar.name)
# material morphs
from mmd_tools_local.core.shader import _MaterialMorph
group_dict = material_offset_map.get('group_dict', {})
def __config_material_morph(mat, morph_list):
nodes = _MaterialMorph.setup_morph_nodes(mat, tuple(x[1] for x in morph_list))
for (morph_name, data, name_bind), node in zip(morph_list, nodes):
node.label, node.name = morph_name, name_bind
data_path, groups = group_dict[morph_name]
driver, variables = self.__driver_variables(mat.node_tree, node.inputs[0].path_from_id('default_value'))
var = self.__add_single_prop(variables, obj, data_path, 'm')
driver.expression = '%s'%__config_groups(variables, var.name, groups)
for mat in (m for m in rig.materials() if m and m.use_nodes and not m.name.startswith('mmd_')):
mat_id = mat.mmd_material.material_id
mul_all, add_all = material_offset_map.get(-1, ([], []))
mul_list, add_list = material_offset_map.get('' if mat_id < 0 else mat_id, ([], []))
morph_list = tuple(mul_all+mul_list+add_all+add_list)
__config_material_morph(mat, morph_list)
mat_edge = bpy.data.materials.get('mmd_edge.'+mat.name, None)
if mat_edge:
__config_material_morph(mat_edge, morph_list)
morph_key_blocks[0].mute = False
| 44.927273
| 127
| 0.581991
|
import re
import bpy
from mmd_tools_local import bpyutils
from mmd_tools_local.bpyutils import SceneOp
from mmd_tools_local.bpyutils import ObjectOp
from mmd_tools_local.bpyutils import TransformConstraintOp
class FnMorph(object):
def __init__(self, morph, model):
self.__morph = morph
self.__rig = model
@classmethod
def storeShapeKeyOrder(cls, obj, shape_key_names):
if len(shape_key_names) < 1:
return
assert(SceneOp(bpy.context).active_object == obj)
if obj.data.shape_keys is None:
bpy.ops.object.shape_key_add()
if bpy.app.version < (2, 73, 0):
def __move_to_bottom(key_blocks, name):
obj.active_shape_key_index = key_blocks.find(name)
for move in range(len(key_blocks)-1-obj.active_shape_key_index):
bpy.ops.object.shape_key_move(type='DOWN')
else:
def __move_to_bottom(key_blocks, name):
obj.active_shape_key_index = key_blocks.find(name)
bpy.ops.object.shape_key_move(type='BOTTOM')
key_blocks = obj.data.shape_keys.key_blocks
for name in shape_key_names:
if name not in key_blocks:
obj.shape_key_add(name=name)
elif len(key_blocks) > 1:
__move_to_bottom(key_blocks, name)
@classmethod
def fixShapeKeyOrder(cls, obj, shape_key_names):
if len(shape_key_names) < 1:
return
assert(SceneOp(bpy.context).active_object == obj)
key_blocks = getattr(obj.data.shape_keys, 'key_blocks', None)
if key_blocks is None:
return
if bpy.app.version < (2, 73, 0):
len_key_blocks = len(key_blocks)
for ii, name in enumerate(x for x in reversed(shape_key_names) if x in key_blocks):
obj.active_shape_key_index = idx = key_blocks.find(name)
offset = (len_key_blocks - 1 - idx) - ii
move_type = 'UP' if offset < 0 else 'DOWN'
for move in range(abs(offset)):
bpy.ops.object.shape_key_move(type=move_type)
else:
for name in shape_key_names:
idx = key_blocks.find(name)
if idx < 0:
continue
obj.active_shape_key_index = idx
bpy.ops.object.shape_key_move(type='BOTTOM')
@staticmethod
def get_morph_slider(rig):
return _MorphSlider(rig)
@staticmethod
def category_guess(morph):
name_lower = morph.name.lower()
if 'mouth' in name_lower:
morph.category = 'MOUTH'
elif 'eye' in name_lower:
if 'brow' in name_lower:
morph.category = 'EYEBROW'
else:
morph.category = 'EYE'
@classmethod
def load_morphs(cls, rig):
mmd_root = rig.rootObject().mmd_root
vertex_morphs = mmd_root.vertex_morphs
for obj in rig.meshes():
for kb in getattr(obj.data.shape_keys, 'key_blocks', ())[1:]:
if not kb.name.startswith('mmd_') and kb.name not in vertex_morphs:
item = vertex_morphs.add()
item.name = kb.name
item.name_e = kb.name
cls.category_guess(item)
@staticmethod
def remove_shape_key(obj, key_name):
key_blocks = getattr(obj.data.shape_keys, 'key_blocks', None)
if key_blocks and key_name in key_blocks:
ObjectOp(obj).shape_key_remove(key_blocks[key_name])
@staticmethod
def copy_shape_key(obj, src_name, dest_name):
key_blocks = getattr(obj.data.shape_keys, 'key_blocks', None)
if key_blocks and src_name in key_blocks:
if dest_name in key_blocks:
ObjectOp(obj).shape_key_remove(key_blocks[dest_name])
obj.active_shape_key_index = key_blocks.find(src_name)
obj.show_only_shape_key, last = True, obj.show_only_shape_key
obj.shape_key_add(name=dest_name, from_mix=True)
obj.show_only_shape_key = last
obj.active_shape_key_index = key_blocks.find(dest_name)
@staticmethod
def get_uv_morph_vertex_groups(obj, morph_name=None, offset_axes='XYZW'):
pattern = 'UV_%s[+-][%s]$'%(morph_name or '.{1,}', offset_axes or 'XYZW')
return ((g, g.name[3:-2], g.name[-2:]) for g in obj.vertex_groups if re.match(pattern, g.name))
@staticmethod
def copy_uv_morph_vertex_groups(obj, src_name, dest_name):
for vg, n, x in FnMorph.get_uv_morph_vertex_groups(obj, dest_name):
obj.vertex_groups.remove(vg)
for vg_name in tuple(i[0].name for i in FnMorph.get_uv_morph_vertex_groups(obj, src_name)):
obj.vertex_groups.active = obj.vertex_groups[vg_name]
override = {'object':obj, 'window':bpy.context.window, 'region':bpy.context.region}
bpy.ops.object.vertex_group_copy(override)
obj.vertex_groups.active.name = vg_name.replace(src_name, dest_name)
@staticmethod
def clean_uv_morph_vertex_groups(obj):
vg_indices = {g.index for g, n, x in FnMorph.get_uv_morph_vertex_groups(obj)}
vertex_groups = obj.vertex_groups
for v in obj.data.vertices:
for x in v.groups:
if x.group in vg_indices and x.weight > 0:
vg_indices.remove(x.group)
for i in sorted(vg_indices, reverse=True):
vg = vertex_groups[i]
m = obj.modifiers.get('mmd_bind%s'%hash(vg.name), None)
if m:
obj.modifiers.remove(m)
vertex_groups.remove(vg)
@staticmethod
def get_uv_morph_offset_map(obj, morph):
offset_map = {}
if morph.data_type == 'VERTEX_GROUP':
scale = morph.vertex_group_scale
axis_map = {g.index:x for g, n, x in FnMorph.get_uv_morph_vertex_groups(obj, morph.name)}
for v in obj.data.vertices:
i = v.index
for x in v.groups:
if x.group in axis_map and x.weight > 0:
axis, weight = axis_map[x.group], x.weight
d = offset_map.setdefault(i, [0, 0, 0, 0])
d['XYZW'.index(axis[1])] += -weight*scale if axis[0] == '-' else weight*scale
else:
for val in morph.data:
i = val.index
if i in offset_map:
offset_map[i] = [a+b for a, b in zip(offset_map[i], val.offset)]
else:
offset_map[i] = val.offset
return offset_map
@staticmethod
def store_uv_morph_data(obj, morph, offsets=None, offset_axes='XYZW'):
vertex_groups = obj.vertex_groups
morph_name = getattr(morph, 'name', None)
if offset_axes:
for vg, n, x in FnMorph.get_uv_morph_vertex_groups(obj, morph_name, offset_axes):
vertex_groups.remove(vg)
if not morph_name or not offsets:
return
axis_indices = tuple('XYZW'.index(x) for x in offset_axes) or tuple(range(4))
offset_map = FnMorph.get_uv_morph_offset_map(obj, morph) if offset_axes else {}
for data in offsets:
idx, offset = data.index, data.offset
for i in axis_indices:
offset_map.setdefault(idx, [0, 0, 0, 0])[i] += round(offset[i], 5)
max_value = max(max(abs(x) for x in v) for v in offset_map.values() or ([0],))
scale = morph.vertex_group_scale = max(abs(morph.vertex_group_scale), max_value)
for idx, offset in offset_map.items():
for val, axis in zip(offset, 'XYZW'):
if abs(val) > 1e-4:
vg_name = 'UV_{0}{1}{2}'.format(morph_name, '-' if val < 0 else '+', axis)
vg = vertex_groups.get(vg_name, None) or vertex_groups.new(name=vg_name)
vg.add(index=[idx], weight=abs(val)/scale, type='REPLACE')
def update_mat_related_mesh(self, new_mesh=None):
for offset in self.__morph.data:
meshObj = new_mesh
if new_mesh is None:
meshObj = self.__rig.findMesh(offset.material)
if meshObj is None:
for mesh in self.__rig.meshes():
if mesh.data.materials.find(offset.material) >= 0:
meshObj = mesh
break
if meshObj is not None:
offset.related_mesh = meshObj.data.name
class _MorphSlider:
def __init__(self, model):
self.__rig = model
def placeholder(self, create=False, binded=False):
rig = self.__rig
root = rig.rootObject()
obj = next((x for x in root.children if x.mmd_type == 'PLACEHOLDER' and x.type == 'MESH'), None)
if create and obj is None:
obj = bpy.data.objects.new(name='.placeholder', object_data=bpy.data.meshes.new('.placeholder'))
obj.mmd_type = 'PLACEHOLDER'
obj.parent = root
SceneOp(bpy.context).link_object(obj)
if obj and obj.data.shape_keys is None:
key = obj.shape_key_add(name='--- morph sliders ---')
key.mute = True
if binded and obj and obj.data.shape_keys.key_blocks[0].mute:
return None
return obj
@property
def dummy_armature(self):
obj = self.placeholder()
return self.__dummy_armature(obj) if obj else None
def __dummy_armature(self, obj, create=False):
arm = next((x for x in obj.children if x.mmd_type == 'PLACEHOLDER' and x.type == 'ARMATURE'), None)
if create and arm is None:
arm = bpy.data.objects.new(name='.dummy_armature', object_data=bpy.data.armatures.new(name='.dummy_armature'))
arm.mmd_type = 'PLACEHOLDER'
arm.parent = obj
SceneOp(bpy.context).link_object(arm)
arm.data.draw_type = 'STICK'
return arm
def get(self, morph_name):
obj = self.placeholder()
if obj is None:
return None
key_blocks = obj.data.shape_keys.key_blocks
if key_blocks[0].mute:
return None
return key_blocks.get(morph_name, None)
def create(self):
self.__rig.loadMorphs()
obj = self.placeholder(create=True)
self.__load(obj, self.__rig.rootObject().mmd_root)
return obj
def __load(self, obj, mmd_root):
attr_list = ('group', 'vertex', 'bone', 'uv', 'material')
morph_key_blocks = obj.data.shape_keys.key_blocks
for m in (x for attr in attr_list for x in getattr(mmd_root, attr+'_morphs', ())):
name = m.name
ame = name = name + ' '
if name and name not in morph_key_blocks:
obj.shape_key_add(name=name)
@staticmethod
def __driver_variables(id_data, path, index=-1):
d = id_data.driver_add(path, index)
variables = d.driver.variables
for x in variables:
variables.remove(x)
return d.driver, variables
@staticmethod
def __add_single_prop(variables, id_obj, data_path, prefix):
var = variables.new()
var.name = prefix + str(len(variables))
var.type = 'SINGLE_PROP'
target = var.targets[0]
target.id_type = 'OBJECT'
target.id = id_obj
target.data_path = data_path
return var
def __cleanup(self, names_in_use=None):
names_in_use = names_in_use or {}
rig = self.__rig
for mesh in rig.meshes():
for kb in getattr(mesh.data.shape_keys, 'key_blocks', ()):
if kb.name.startswith('mmd_bind') and kb.name not in names_in_use:
kb.driver_remove('value')
kb.relative_key.mute = False
ObjectOp(mesh).shape_key_remove(kb)
for m in mesh.modifiers: # uv morph
if m.name.startswith('mmd_bind') and m.name not in names_in_use:
mesh.modifiers.remove(m)
from mmd_tools_local.core.shader import _MaterialMorph
for m in rig.materials():
if m and m.node_tree:
for n in sorted((x for x in m.node_tree.nodes if x.name.startswith('mmd_bind')), key=lambda x: -x.location[0]):
_MaterialMorph.reset_morph_links(n)
m.node_tree.nodes.remove(n)
attributes = set(TransformConstraintOp.min_max_attributes('LOCATION', 'to'))
attributes |= set(TransformConstraintOp.min_max_attributes('ROTATION', 'to'))
for b in rig.armature().pose.bones:
for c in b.constraints:
if c.name.startswith('mmd_bind') and c.name[:-4] not in names_in_use:
for attr in attributes:
c.driver_remove(attr)
b.constraints.remove(c)
def unbind(self):
mmd_root = self.__rig.rootObject().mmd_root
for m in mmd_root.bone_morphs:
for d in m.data:
d.name = ''
for m in mmd_root.material_morphs:
for d in m.data:
d.name = ''
obj = self.placeholder()
if obj:
obj.data.shape_keys.key_blocks[0].mute = True
arm = self.__dummy_armature(obj)
if arm:
for b in arm.pose.bones:
if b.name.startswith('mmd_bind'):
b.driver_remove('location')
b.driver_remove('rotation_quaternion')
self.__cleanup()
def bind(self):
rig = self.__rig
root = rig.rootObject()
armObj = rig.armature()
mmd_root = root.mmd_root
obj = self.create()
arm = self.__dummy_armature(obj, create=True)
morph_key_blocks = obj.data.shape_keys.key_blocks
# data gathering
group_map = {}
shape_key_map = {}
uv_morph_map = {}
for mesh in rig.meshes():
mesh.show_only_shape_key = False
key_blocks = getattr(mesh.data.shape_keys, 'key_blocks', ())
for kb in key_blocks:
kb_name = kb.name
if kb_name not in morph_key_blocks:
continue
name_bind = 'mmd_bind%s'%hash(morph_key_blocks[kb_name])
if name_bind not in key_blocks:
mesh.shape_key_add(name=name_bind)
kb_bind = key_blocks[name_bind]
kb_bind.relative_key = kb
kb_bind.slider_min = -10
kb_bind.slider_max = 10
data_path = 'data.shape_keys.key_blocks["%s"].value'%kb_name.replace('"', '\\"')
groups = []
shape_key_map.setdefault(name_bind, []).append((kb_bind, data_path, groups))
group_map.setdefault(('vertex_morphs', kb_name), []).append(groups)
uv_layers = [l.name for l in mesh.data.uv_layers if not l.name.startswith('_')]
uv_layers += ['']*(5-len(uv_layers))
for vg, morph_name, axis in FnMorph.get_uv_morph_vertex_groups(mesh):
morph = mmd_root.uv_morphs.get(morph_name, None)
if morph is None or morph.data_type != 'VERTEX_GROUP':
continue
uv_layer = '_'+uv_layers[morph.uv_index] if axis[1] in 'ZW' else uv_layers[morph.uv_index]
if uv_layer not in mesh.data.uv_layers:
continue
name_bind = 'mmd_bind%s'%hash(vg.name)
uv_morph_map.setdefault(name_bind, ())
mod = mesh.modifiers.get(name_bind, None) or mesh.modifiers.new(name=name_bind, type='UV_WARP')
mod.show_expanded = False
mod.vertex_group = vg.name
mod.axis_u, mod.axis_v = ('Y', 'X') if axis[1] in 'YW' else ('X', 'Y')
mod.uv_layer = uv_layer
name_bind = 'mmd_bind%s'%hash(morph_name)
mod.object_from = mod.object_to = arm
if axis[0] == '-':
mod.bone_from, mod.bone_to = 'mmd_bind_ctrl_base', name_bind
else:
mod.bone_from, mod.bone_to = name_bind, 'mmd_bind_ctrl_base'
bone_offset_map = {}
with bpyutils.edit_object(arm) as data:
edit_bones = data.edit_bones
def __get_bone(name, layer, parent):
b = edit_bones.get(name, None) or edit_bones.new(name=name)
b.layers = [x == layer for x in range(len(b.layers))]
b.head = (0, 0, 0)
b.tail = (0, 0, 1)
b.use_deform = False
b.parent = parent
return b
for m in mmd_root.bone_morphs:
data_path = 'data.shape_keys.key_blocks["%s"].value'%m.name.replace('"', '\\"')
for d in m.data:
if not d.bone:
d.name = ''
continue
d.name = name_bind = 'mmd_bind%s'%hash(d)
b = __get_bone(name_bind, 10, None)
groups = []
bone_offset_map[name_bind] = (m.name, d, b.name, data_path, groups)
group_map.setdefault(('bone_morphs', m.name), []).append(groups)
ctrl_base = __get_bone('mmd_bind_ctrl_base', 11, None)
for m in mmd_root.uv_morphs:
morph_name = m.name.replace('"', '\\"')
data_path = 'data.shape_keys.key_blocks["%s"].value'%morph_name
scale_path = 'mmd_root.uv_morphs["%s"].vertex_group_scale'%morph_name
name_bind = 'mmd_bind%s'%hash(m.name)
b = __get_bone(name_bind, 11, ctrl_base)
groups = []
uv_morph_map.setdefault(name_bind, []).append((b.name, data_path, scale_path, groups))
group_map.setdefault(('uv_morphs', m.name), []).append(groups)
used_bone_names = bone_offset_map.keys()|uv_morph_map.keys()
used_bone_names.add(ctrl_base.name)
for b in edit_bones: # cleanup
if b.name.startswith('mmd_bind') and b.name not in used_bone_names:
edit_bones.remove(b)
material_offset_map = {}
for m in mmd_root.material_morphs:
morph_name = m.name.replace('"', '\\"')
data_path = 'data.shape_keys.key_blocks["%s"].value'%morph_name
groups = []
group_map.setdefault(('material_morphs', m.name), []).append(groups)
material_offset_map.setdefault('group_dict', {})[m.name] = (data_path, groups)
for d in m.data:
d.name = name_bind = 'mmd_bind%s'%hash(d)
table = material_offset_map.setdefault(d.material_id, ([], []))
table[1 if d.offset_type == 'ADD' else 0].append((m.name, d, name_bind))
for m in mmd_root.group_morphs:
if len(m.data) != len(set(m.data.keys())):
print(' * Found duplicated morph data in Group Morph "%s"'%m.name)
morph_name = m.name.replace('"', '\\"')
morph_path = 'data.shape_keys.key_blocks["%s"].value'%morph_name
for d in m.data:
param = (morph_name, d.name.replace('"', '\\"'))
factor_path = 'mmd_root.group_morphs["%s"].data["%s"].factor'%param
for groups in group_map.get((d.morph_type, d.name), ()):
groups.append((m.name, morph_path, factor_path))
self.__cleanup(shape_key_map.keys()|bone_offset_map.keys()|uv_morph_map.keys())
def __config_groups(variables, expression, groups):
for g_name, morph_path, factor_path in groups:
var = self.__add_single_prop(variables, obj, morph_path, 'g')
fvar = self.__add_single_prop(variables, root, factor_path, 'w')
expression = '%s+%s*%s'%(expression, var.name, fvar.name)
return expression
# vertex morphs
for kb_bind, morph_data_path, groups in (i for l in shape_key_map.values() for i in l):
driver, variables = self.__driver_variables(kb_bind, 'value')
var = self.__add_single_prop(variables, obj, morph_data_path, 'v')
driver.expression = '-(%s)'%__config_groups(variables, var.name, groups)
kb_bind.relative_key.mute = True
kb_bind.mute = False
# bone morphs
def __config_bone_morph(constraints, map_type, attributes, val, val_str):
c_name = 'mmd_bind%s.%s'%(hash(data), map_type[:3])
c = TransformConstraintOp.create(constraints, c_name, map_type)
TransformConstraintOp.update_min_max(c, val, None)
c.show_expanded = False
c.target = arm
c.subtarget = bname
for attr in attributes:
driver, variables = self.__driver_variables(armObj, c.path_from_id(attr))
var = self.__add_single_prop(variables, obj, morph_data_path, 'b')
expression = __config_groups(variables, var.name, groups)
sign = '-' if attr.startswith('to_min') else ''
driver.expression = '%s%s*(%s)'%(sign, val_str, expression)
from math import pi
attributes_rot = TransformConstraintOp.min_max_attributes('ROTATION', 'to')
attributes_loc = TransformConstraintOp.min_max_attributes('LOCATION', 'to')
for morph_name, data, bname, morph_data_path, groups in bone_offset_map.values():
b = arm.pose.bones[bname]
b.location = data.location
b.rotation_quaternion = data.rotation.__class__(*data.rotation.to_axis_angle()) # Fix for consistency
b.is_mmd_shadow_bone = True
b.mmd_shadow_bone_type = 'BIND'
pb = armObj.pose.bones[data.bone]
__config_bone_morph(pb.constraints, 'ROTATION', attributes_rot, pi, 'pi')
__config_bone_morph(pb.constraints, 'LOCATION', attributes_loc, 100, '100')
# uv morphs
b = arm.pose.bones['mmd_bind_ctrl_base']
b.is_mmd_shadow_bone = True
b.mmd_shadow_bone_type = 'BIND'
for bname, data_path, scale_path, groups in (i for l in uv_morph_map.values() for i in l):
b = arm.pose.bones[bname]
b.is_mmd_shadow_bone = True
b.mmd_shadow_bone_type = 'BIND'
driver, variables = self.__driver_variables(b, 'location', index=0)
var = self.__add_single_prop(variables, obj, data_path, 'u')
fvar = self.__add_single_prop(variables, root, scale_path, 's')
driver.expression = '(%s)*%s'%(__config_groups(variables, var.name, groups), fvar.name)
# material morphs
from mmd_tools_local.core.shader import _MaterialMorph
group_dict = material_offset_map.get('group_dict', {})
def __config_material_morph(mat, morph_list):
nodes = _MaterialMorph.setup_morph_nodes(mat, tuple(x[1] for x in morph_list))
for (morph_name, data, name_bind), node in zip(morph_list, nodes):
node.label, node.name = morph_name, name_bind
data_path, groups = group_dict[morph_name]
driver, variables = self.__driver_variables(mat.node_tree, node.inputs[0].path_from_id('default_value'))
var = self.__add_single_prop(variables, obj, data_path, 'm')
driver.expression = '%s'%__config_groups(variables, var.name, groups)
for mat in (m for m in rig.materials() if m and m.use_nodes and not m.name.startswith('mmd_')):
mat_id = mat.mmd_material.material_id
mul_all, add_all = material_offset_map.get(-1, ([], []))
mul_list, add_list = material_offset_map.get('' if mat_id < 0 else mat_id, ([], []))
morph_list = tuple(mul_all+mul_list+add_all+add_list)
__config_material_morph(mat, morph_list)
mat_edge = bpy.data.materials.get('mmd_edge.'+mat.name, None)
if mat_edge:
__config_material_morph(mat_edge, morph_list)
morph_key_blocks[0].mute = False
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.