hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
02fa08e1d7d6a2dd1a0ec5c6a4cf778d70f3ecb2 | 67,697 | py | Python | pyopencl/__init__.py | yxliang01/pyopencl | 8aceb0d11159c23ccd0b1de09e04f45176ca385d | [
"Apache-2.0"
] | null | null | null | pyopencl/__init__.py | yxliang01/pyopencl | 8aceb0d11159c23ccd0b1de09e04f45176ca385d | [
"Apache-2.0"
] | null | null | null | pyopencl/__init__.py | yxliang01/pyopencl | 8aceb0d11159c23ccd0b1de09e04f45176ca385d | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
__copyright__ = "Copyright (C) 2009-15 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import six
from six.moves import input, intern
from pyopencl.version import VERSION, VERSION_STATUS, VERSION_TEXT # noqa
import logging
logger = logging.getLogger(__name__)
import os
os.environ["PYOPENCL_HOME"] = os.path.dirname(os.path.abspath(__file__))
try:
import pyopencl._cl as _cl
except ImportError:
import os
from os.path import dirname, join, realpath
if realpath(join(os.getcwd(), "pyopencl")) == realpath(dirname(__file__)):
from warnings import warn
warn("It looks like you are importing PyOpenCL from "
"its source directory. This likely won't work.")
raise
import numpy as np
import sys
_PYPY = '__pypy__' in sys.builtin_module_names
_CPY2 = not _PYPY and sys.version_info < (3,)
from pyopencl._cl import ( # noqa
get_cl_header_version,
program_kind,
status_code,
platform_info,
device_type,
device_info,
device_fp_config,
device_mem_cache_type,
device_local_mem_type,
device_exec_capabilities,
device_svm_capabilities,
command_queue_properties,
context_info,
gl_context_info,
context_properties,
command_queue_info,
queue_properties,
mem_flags,
svm_mem_flags,
channel_order,
channel_type,
mem_object_type,
mem_info,
image_info,
addressing_mode,
filter_mode,
sampler_info,
map_flags,
program_info,
program_build_info,
program_binary_type,
kernel_info,
kernel_arg_info,
kernel_arg_address_qualifier,
kernel_arg_access_qualifier,
kernel_arg_type_qualifier,
kernel_work_group_info,
event_info,
command_type,
command_execution_status,
profiling_info,
mem_migration_flags,
device_partition_property,
device_affinity_domain,
Error, MemoryError, LogicError, RuntimeError,
Platform,
get_platforms,
Device,
Context,
CommandQueue,
LocalMemory,
MemoryObjectHolder,
MemoryObject,
MemoryMap,
Buffer,
_Program,
Kernel,
Event,
wait_for_events,
NannyEvent,
enqueue_nd_range_kernel,
_enqueue_marker,
_enqueue_read_buffer,
_enqueue_write_buffer,
_enqueue_copy_buffer,
_enqueue_read_buffer_rect,
_enqueue_write_buffer_rect,
_enqueue_copy_buffer_rect,
_enqueue_read_image,
_enqueue_copy_image,
_enqueue_write_image,
_enqueue_copy_image_to_buffer,
_enqueue_copy_buffer_to_image,
have_gl,
ImageFormat,
get_supported_image_formats,
Image,
Sampler,
DeviceTopologyAmd,
)
if not _PYPY:
# FIXME: Add back to default set when pypy support catches up
from pyopencl._cl import ( # noqa
enqueue_map_buffer,
enqueue_map_image,
)
if get_cl_header_version() >= (1, 1):
from pyopencl._cl import ( # noqa
UserEvent,
)
if get_cl_header_version() >= (1, 2):
from pyopencl._cl import ( # noqa
_enqueue_marker_with_wait_list,
_enqueue_barrier_with_wait_list,
unload_platform_compiler,
enqueue_migrate_mem_objects,
_enqueue_fill_buffer,
enqueue_fill_image,
ImageDescriptor,
)
if get_cl_header_version() >= (2, 0):
from pyopencl._cl import ( # noqa
SVMAllocation,
SVM,
# FIXME
#enqueue_svm_migratemem,
)
if _cl.have_gl():
from pyopencl._cl import ( # noqa
gl_object_type,
gl_texture_info,
GLBuffer,
GLRenderBuffer,
GLTexture,
)
try:
from pyopencl._cl import get_apple_cgl_share_group # noqa
except ImportError:
pass
try:
from pyopencl._cl import ( # noqa
enqueue_acquire_gl_objects,
enqueue_release_gl_objects,
)
except ImportError:
pass
import inspect as _inspect
CONSTANT_CLASSES = tuple(
getattr(_cl, name) for name in dir(_cl)
if _inspect.isclass(getattr(_cl, name))
and name[0].islower() and name not in ["zip", "map", "range"])
# {{{ diagnostics
class CompilerWarning(UserWarning):
pass
def compiler_output(text):
import os
from warnings import warn
if int(os.environ.get("PYOPENCL_COMPILER_OUTPUT", "0")):
warn(text, CompilerWarning)
else:
warn("Non-empty compiler output encountered. Set the "
"environment variable PYOPENCL_COMPILER_OUTPUT=1 "
"to see more.", CompilerWarning)
# }}}
# {{{ find pyopencl shipped source code
def _find_pyopencl_include_path():
from pkg_resources import Requirement, resource_filename, DistributionNotFound
try:
# Try to find the resource with pkg_resources (the recommended
# setuptools approach)
include_path = resource_filename(
Requirement.parse("pyopencl"), "pyopencl/cl")
except DistributionNotFound:
# If pkg_resources can't find it (e.g. if the module is part of a
# frozen application), try to find the include path in the same
# directory as this file
from os.path import join, abspath, dirname, exists
include_path = join(abspath(dirname(__file__)), "cl")
# If that doesn't exist, just re-raise the exception caught from
# resource_filename.
if not exists(include_path):
raise
# Quote the path if it contains a space and is not quoted already.
# See https://github.com/inducer/pyopencl/issues/250 for discussion.
if ' ' in include_path and not include_path.startswith('"'):
return '"' + include_path + '"'
else:
return include_path
# }}}
# {{{ build option munging
def _split_options_if_necessary(options):
if isinstance(options, six.string_types):
import shlex
if six.PY2:
# shlex.split takes bytes (py2 str) on py2
if isinstance(options, six.text_type):
options = options.encode("utf-8")
else:
# shlex.split takes unicode (py3 str) on py3
if isinstance(options, six.binary_type):
options = options.decode("utf-8")
options = shlex.split(options)
return options
def _find_include_path(options):
def unquote(path):
if path.startswith('"') and path.endswith('"'):
return path[1:-1]
else:
return path
include_path = ["."]
option_idx = 0
while option_idx < len(options):
option = options[option_idx].strip()
if option.startswith("-I") or option.startswith("/I"):
if len(option) == 2:
if option_idx+1 < len(options):
include_path.append(unquote(options[option_idx+1]))
option_idx += 2
else:
include_path.append(unquote(option[2:].lstrip()))
option_idx += 1
else:
option_idx += 1
# }}}
return include_path
def _options_to_bytestring(options):
def encode_if_necessary(s):
if isinstance(s, six.text_type):
return s.encode("utf-8")
else:
return s
return b" ".join(encode_if_necessary(s) for s in options)
# }}}
# {{{ Program (wrapper around _Program, adds caching support)
_DEFAULT_BUILD_OPTIONS = []
_DEFAULT_INCLUDE_OPTIONS = ["-I", _find_pyopencl_include_path()]
# map of platform.name to build options list
_PLAT_BUILD_OPTIONS = {
"Oclgrind": ["-D", "PYOPENCL_USING_OCLGRIND"],
}
def enable_debugging(platform_or_context):
"""Enables debugging for all code subsequently compiled by
PyOpenCL on the passed *platform*. Alternatively, a context
may be passed.
"""
if isinstance(platform_or_context, Context):
platform = platform_or_context.devices[0].platform
else:
platform = platform_or_context
if "AMD Accelerated" in platform.name:
_PLAT_BUILD_OPTIONS.setdefault(platform.name, []).extend(
["-g", "-O0"])
import os
os.environ["CPU_MAX_COMPUTE_UNITS"] = "1"
else:
from warnings import warn
warn("do not know how to enable debugging on '%s'"
% platform.name)
class Program(object):
def __init__(self, arg1, arg2=None, arg3=None):
if arg2 is None:
# 1-argument form: program
self._prg = arg1
elif arg3 is None:
# 2-argument form: context, source
context, source = arg1, arg2
from pyopencl.tools import is_spirv
if is_spirv(source):
# FIXME no caching in SPIR-V case
self._context = context
self._prg = _cl._create_program_with_il(context, source)
return
import sys
if isinstance(source, six.text_type) and sys.version_info < (3,):
from warnings import warn
warn("Received OpenCL source code in Unicode, "
"should be ASCII string. Attempting conversion.",
stacklevel=2)
source = source.encode()
self._context = context
self._source = source
self._prg = None
else:
context, device, binaries = arg1, arg2, arg3
self._context = context
self._prg = _cl._Program(context, device, binaries)
self._build_duration_info = None
def _get_prg(self):
if self._prg is not None:
return self._prg
else:
# "no program" can only happen in from-source case.
from warnings import warn
warn("Pre-build attribute access defeats compiler caching.",
stacklevel=3)
self._prg = _cl._Program(self._context, self._source)
del self._context
return self._prg
def get_info(self, arg):
return self._get_prg().get_info(arg)
def get_build_info(self, *args, **kwargs):
return self._get_prg().get_build_info(*args, **kwargs)
def all_kernels(self):
result = self._get_prg().all_kernels()
for knl in result:
knl._setup(self)
return result
def int_ptr(self):
return self._get_prg().int_ptr
int_ptr = property(int_ptr, doc=_cl._Program.int_ptr.__doc__)
def from_int_ptr(int_ptr_value, retain=True):
return Program(_cl._Program.from_int_ptr(int_ptr_value, retain))
from_int_ptr.__doc__ = _cl._Program.from_int_ptr.__doc__
from_int_ptr = staticmethod(from_int_ptr)
def __getattr__(self, attr):
try:
knl = Kernel(self, attr)
# Nvidia does not raise errors even for invalid names,
# but this will give an error if the kernel is invalid.
knl.num_args
knl._source = getattr(self, "_source", None)
if self._build_duration_info is not None:
build_descr, was_cached, duration = self._build_duration_info
if duration > 0.2:
logger.info("build program: kernel '%s' was part of a "
"lengthy %s (%.2f s)" % (attr, build_descr, duration))
return knl
except LogicError:
raise AttributeError("'%s' was not found as a program "
"info attribute or as a kernel name" % attr)
# {{{ build
@classmethod
def _process_build_options(cls, context, options, _add_include_path=False):
options = _split_options_if_necessary(options)
options = (options
+ _DEFAULT_BUILD_OPTIONS
+ _DEFAULT_INCLUDE_OPTIONS
+ _PLAT_BUILD_OPTIONS.get(
context.devices[0].platform.name, []))
import os
forced_options = os.environ.get("PYOPENCL_BUILD_OPTIONS")
if forced_options:
options = options + forced_options.split()
return (
_options_to_bytestring(options),
_find_include_path(options))
def build(self, options=[], devices=None, cache_dir=None):
options_bytes, include_path = self._process_build_options(
self._context, options)
if cache_dir is None:
cache_dir = getattr(self._context, 'cache_dir', None)
import os
build_descr = None
if os.environ.get("PYOPENCL_NO_CACHE") and self._prg is None:
build_descr = "uncached source build (cache disabled by user)"
self._prg = _cl._Program(self._context, self._source)
from time import time
start_time = time()
was_cached = False
if self._prg is not None:
# uncached
if build_descr is None:
build_descr = "uncached source build"
self._build_and_catch_errors(
lambda: self._prg.build(options_bytes, devices),
options_bytes=options_bytes)
else:
# cached
from pyopencl.cache import create_built_program_from_source_cached
self._prg, was_cached = self._build_and_catch_errors(
lambda: create_built_program_from_source_cached(
self._context, self._source, options_bytes, devices,
cache_dir=cache_dir, include_path=include_path),
options_bytes=options_bytes, source=self._source)
if was_cached:
build_descr = "cache retrieval"
else:
build_descr = "source build resulting from a binary cache miss"
del self._context
end_time = time()
self._build_duration_info = (build_descr, was_cached, end_time-start_time)
return self
def _build_and_catch_errors(self, build_func, options_bytes, source=None):
try:
return build_func()
except _cl.RuntimeError as e:
msg = str(e)
if options_bytes:
msg = msg + "\n(options: %s)" % options_bytes.decode("utf-8")
if source is not None:
from tempfile import NamedTemporaryFile
srcfile = NamedTemporaryFile(mode="wt", delete=False, suffix=".cl")
try:
srcfile.write(source)
finally:
srcfile.close()
msg = msg + "\n(source saved as %s)" % srcfile.name
code = e.code
routine = e.routine
err = _cl.RuntimeError(
_cl._ErrorRecord(
msg=msg,
code=code,
routine=routine))
# Python 3.2 outputs the whole list of currently active exceptions
# This serves to remove one (redundant) level from that nesting.
raise err
# }}}
def compile(self, options=[], devices=None, headers=[]):
options_bytes, _ = self._process_build_options(self._context, options)
self._get_prg().compile(options_bytes, devices, headers)
return self
def __eq__(self, other):
return self._get_prg() == other._get_prg()
def __ne__(self, other):
return self._get_prg() == other._get_prg()
def __hash__(self):
return hash(self._get_prg())
def create_program_with_built_in_kernels(context, devices, kernel_names):
if not isinstance(kernel_names, str):
kernel_names = ":".join(kernel_names)
return Program(_Program.create_with_built_in_kernels(
context, devices, kernel_names))
def link_program(context, programs, options=None, devices=None):
if options is None:
options = []
options_bytes = _options_to_bytestring(_split_options_if_necessary(options))
programs = [prg._get_prg() for prg in programs]
raw_prg = _Program.link(context, programs, options_bytes, devices)
return Program(raw_prg)
# }}}
# {{{ monkeypatch C++ wrappers to add functionality
def _add_functionality():
def generic_get_cl_version(self):
import re
version_string = self.version
match = re.match(r"^OpenCL ([0-9]+)\.([0-9]+) .*$", version_string)
if match is None:
raise RuntimeError("%s %s returned non-conformant "
"platform version string '%s'" %
(type(self).__name__, self, version_string))
return int(match.group(1)), int(match.group(2))
# {{{ Platform
def platform_repr(self):
return "<pyopencl.Platform '%s' at 0x%x>" % (self.name, self.int_ptr)
Platform.__repr__ = platform_repr
Platform._get_cl_version = generic_get_cl_version
# }}}
# {{{ Device
def device_repr(self):
return "<pyopencl.Device '%s' on '%s' at 0x%x>" % (
self.name.strip(), self.platform.name.strip(), self.int_ptr)
def device_persistent_unique_id(self):
return (self.vendor, self.vendor_id, self.name, self.version)
Device.__repr__ = device_repr
# undocumented for now:
Device._get_cl_version = generic_get_cl_version
Device.persistent_unique_id = property(device_persistent_unique_id)
# }}}
# {{{ Context
context_old_init = Context.__init__
def context_init(self, devices, properties, dev_type, cache_dir=None):
if cache_dir is not None:
from warnings import warn
warn("The 'cache_dir' argument to the Context constructor "
"is deprecated and no longer has an effect. "
"It was removed because it only applied to the wrapper "
"object and not the context itself, leading to inconsistencies.",
DeprecationWarning, stacklevel=2)
context_old_init(self, devices, properties, dev_type)
def context_repr(self):
return "<pyopencl.Context at 0x%x on %s>" % (self.int_ptr,
", ".join(repr(dev) for dev in self.devices))
def context_get_cl_version(self):
return self.devices[0].platform._get_cl_version()
Context.__repr__ = context_repr
from pytools import memoize_method
Context._get_cl_version = memoize_method(context_get_cl_version)
# }}}
# {{{ CommandQueue
def command_queue_enter(self):
return self
def command_queue_exit(self, exc_type, exc_val, exc_tb):
self.finish()
def command_queue_get_cl_version(self):
return self.context._get_cl_version()
CommandQueue.__enter__ = command_queue_enter
CommandQueue.__exit__ = command_queue_exit
CommandQueue._get_cl_version = memoize_method(command_queue_get_cl_version)
# }}}
# {{{ _Program (the internal, non-caching version)
def program_get_build_logs(self):
build_logs = []
for dev in self.get_info(_cl.program_info.DEVICES):
try:
log = self.get_build_info(dev, program_build_info.LOG)
except Exception:
log = "<error retrieving log>"
build_logs.append((dev, log))
return build_logs
def program_build(self, options_bytes, devices=None):
err = None
try:
self._build(options=options_bytes, devices=devices)
except Error as e:
msg = str(e) + "\n\n" + (75*"="+"\n").join(
"Build on %s:\n\n%s" % (dev, log)
for dev, log in self._get_build_logs())
code = e.code
routine = e.routine
err = _cl.RuntimeError(
_cl._ErrorRecord(
msg=msg,
code=code,
routine=routine))
if err is not None:
# Python 3.2 outputs the whole list of currently active exceptions
# This serves to remove one (redundant) level from that nesting.
raise err
message = (75*"="+"\n").join(
"Build on %s succeeded, but said:\n\n%s" % (dev, log)
for dev, log in self._get_build_logs()
if log is not None and log.strip())
if message:
if self.kind() == program_kind.SOURCE:
build_type = "From-source build"
elif self.kind() == program_kind.BINARY:
build_type = "From-binary build"
elif self.kind() == program_kind.IL:
build_type = "From-IL build"
else:
build_type = "Build"
compiler_output("%s succeeded, but resulted in non-empty logs:\n%s"
% (build_type, message))
return self
_cl._Program._get_build_logs = program_get_build_logs
_cl._Program.build = program_build
# }}}
# {{{ Event
class ProfilingInfoGetter:
def __init__(self, event):
self.event = event
def __getattr__(self, name):
info_cls = _cl.profiling_info
try:
inf_attr = getattr(info_cls, name.upper())
except AttributeError:
raise AttributeError("%s has no attribute '%s'"
% (type(self), name))
else:
return self.event.get_profiling_info(inf_attr)
_cl.Event.profile = property(ProfilingInfoGetter)
# }}}
# {{{ Kernel
kernel_old_init = Kernel.__init__
kernel_old_get_info = Kernel.get_info
kernel_old_get_work_group_info = Kernel.get_work_group_info
def kernel_init(self, prg, name):
if not isinstance(prg, _cl._Program):
prg = prg._get_prg()
kernel_old_init(self, prg, name)
self._setup(prg)
def kernel__setup(self, prg):
self._source = getattr(prg, "_source", None)
from pyopencl.invoker import generate_enqueue_and_set_args
self._enqueue, self._set_args = generate_enqueue_and_set_args(
self.function_name, self.num_args, self.num_args,
None,
warn_about_arg_count_bug=None,
work_around_arg_count_bug=None)
self._wg_info_cache = {}
return self
def kernel_set_scalar_arg_dtypes(self, scalar_arg_dtypes):
self._scalar_arg_dtypes = tuple(scalar_arg_dtypes)
# {{{ arg counting bug handling
# For example:
# https://github.com/pocl/pocl/issues/197
# (but Apple CPU has a similar bug)
work_around_arg_count_bug = False
warn_about_arg_count_bug = False
from pyopencl.characterize import has_struct_arg_count_bug
count_bug_per_dev = [
has_struct_arg_count_bug(dev, self.context)
for dev in self.context.devices]
from pytools import single_valued
if any(count_bug_per_dev):
if all(count_bug_per_dev):
work_around_arg_count_bug = single_valued(count_bug_per_dev)
else:
warn_about_arg_count_bug = True
# }}}
from pyopencl.invoker import generate_enqueue_and_set_args
self._enqueue, self._set_args = generate_enqueue_and_set_args(
self.function_name,
len(scalar_arg_dtypes), self.num_args,
self._scalar_arg_dtypes,
warn_about_arg_count_bug=warn_about_arg_count_bug,
work_around_arg_count_bug=work_around_arg_count_bug)
def kernel_get_work_group_info(self, param, device):
try:
return self._wg_info_cache[param, device]
except KeyError:
pass
result = kernel_old_get_work_group_info(self, param, device)
self._wg_info_cache[param, device] = result
return result
def kernel_set_args(self, *args, **kwargs):
# Need to dupicate the 'self' argument for dynamically generated method
return self._set_args(self, *args, **kwargs)
def kernel_call(self, queue, global_size, local_size, *args, **kwargs):
# __call__ can't be overridden directly, so we need this
# trampoline hack.
return self._enqueue(self, queue, global_size, local_size, *args, **kwargs)
def kernel_capture_call(self, filename, queue, global_size, local_size,
*args, **kwargs):
from pyopencl.capture_call import capture_kernel_call
capture_kernel_call(self, filename, queue, global_size, local_size,
*args, **kwargs)
def kernel_get_info(self, param_name):
val = kernel_old_get_info(self, param_name)
if isinstance(val, _Program):
return Program(val)
else:
return val
Kernel.__init__ = kernel_init
Kernel._setup = kernel__setup
Kernel.get_work_group_info = kernel_get_work_group_info
Kernel.set_scalar_arg_dtypes = kernel_set_scalar_arg_dtypes
Kernel.set_args = kernel_set_args
Kernel.__call__ = kernel_call
Kernel.capture_call = kernel_capture_call
Kernel.get_info = kernel_get_info
# }}}
# {{{ ImageFormat
def image_format_repr(self):
return "ImageFormat(%s, %s)" % (
channel_order.to_string(self.channel_order,
"<unknown channel order 0x%x>"),
channel_type.to_string(self.channel_data_type,
"<unknown channel data type 0x%x>"))
def image_format_eq(self, other):
return (self.channel_order == other.channel_order
and self.channel_data_type == other.channel_data_type)
def image_format_ne(self, other):
return not image_format_eq(self, other)
def image_format_hash(self):
return hash((type(self), self.channel_order, self.channel_data_type))
ImageFormat.__repr__ = image_format_repr
ImageFormat.__eq__ = image_format_eq
ImageFormat.__ne__ = image_format_ne
ImageFormat.__hash__ = image_format_hash
# }}}
# {{{ Image
image_old_init = Image.__init__
def image_init(self, context, flags, format, shape=None, pitches=None,
hostbuf=None, is_array=False, buffer=None):
if shape is None and hostbuf is None:
raise Error("'shape' must be passed if 'hostbuf' is not given")
if shape is None and hostbuf is not None:
shape = hostbuf.shape
if hostbuf is not None and not \
(flags & (mem_flags.USE_HOST_PTR | mem_flags.COPY_HOST_PTR)):
from warnings import warn
warn("'hostbuf' was passed, but no memory flags to make use of it.")
if hostbuf is None and pitches is not None:
raise Error("'pitches' may only be given if 'hostbuf' is given")
if context._get_cl_version() >= (1, 2) and get_cl_header_version() >= (1, 2):
if buffer is not None and is_array:
raise ValueError(
"'buffer' and 'is_array' are mutually exclusive")
if len(shape) == 3:
if buffer is not None:
raise TypeError(
"'buffer' argument is not supported for 3D arrays")
elif is_array:
image_type = mem_object_type.IMAGE2D_ARRAY
else:
image_type = mem_object_type.IMAGE3D
elif len(shape) == 2:
if buffer is not None:
raise TypeError(
"'buffer' argument is not supported for 2D arrays")
elif is_array:
image_type = mem_object_type.IMAGE1D_ARRAY
else:
image_type = mem_object_type.IMAGE2D
elif len(shape) == 1:
if buffer is not None:
image_type = mem_object_type.IMAGE1D_BUFFER
elif is_array:
raise TypeError("array of zero-dimensional images not supported")
else:
image_type = mem_object_type.IMAGE1D
else:
raise ValueError("images cannot have more than three dimensions")
desc = ImageDescriptor()
desc.image_type = image_type
desc.shape = shape # also sets desc.array_size
if pitches is None:
desc.pitches = (0, 0)
else:
desc.pitches = pitches
desc.num_mip_levels = 0 # per CL 1.2 spec
desc.num_samples = 0 # per CL 1.2 spec
desc.buffer = buffer
image_old_init(self, context, flags, format, desc, hostbuf)
else:
# legacy init for CL 1.1 and older
if is_array:
raise TypeError("'is_array=True' is not supported for CL < 1.2")
# if num_mip_levels is not None:
# raise TypeError(
# "'num_mip_levels' argument is not supported for CL < 1.2")
# if num_samples is not None:
# raise TypeError(
# "'num_samples' argument is not supported for CL < 1.2")
if buffer is not None:
raise TypeError("'buffer' argument is not supported for CL < 1.2")
image_old_init(self, context, flags, format, shape,
pitches, hostbuf)
class _ImageInfoGetter:
def __init__(self, event):
from warnings import warn
warn("Image.image.attr is deprecated. "
"Use Image.attr directly, instead.")
self.event = event
def __getattr__(self, name):
try:
inf_attr = getattr(_cl.image_info, name.upper())
except AttributeError:
raise AttributeError("%s has no attribute '%s'"
% (type(self), name))
else:
return self.event.get_image_info(inf_attr)
def image_shape(self):
if self.type == mem_object_type.IMAGE2D:
return (self.width, self.height)
elif self.type == mem_object_type.IMAGE3D:
return (self.width, self.height, self.depth)
else:
raise LogicError("only images have shapes")
Image.__init__ = image_init
Image.image = property(_ImageInfoGetter)
Image.shape = property(image_shape)
# }}}
# {{{ Error
def error_str(self):
val = self.what
try:
val.routine
except AttributeError:
return str(val)
else:
result = ""
if val.code() != status_code.SUCCESS:
result = status_code.to_string(
val.code(), "<unknown error %d>")
routine = val.routine()
if routine:
result = "%s failed: %s" % (routine, result)
what = val.what()
if what:
if result:
result += " - "
result += what
return result
def error_code(self):
return self.args[0].code()
def error_routine(self):
return self.args[0].routine()
def error_what(self):
return self.args[0]
Error.__str__ = error_str
Error.code = property(error_code)
Error.routine = property(error_routine)
Error.what = property(error_what)
# }}}
# {{{ MemoryMap
def memory_map_enter(self):
return self
def memory_map_exit(self, exc_type, exc_val, exc_tb):
self.release()
MemoryMap.__doc__ = """
This class may also be used as a context manager in a ``with`` statement.
The memory corresponding to this object will be unmapped when
this object is deleted or :meth:`release` is called.
.. automethod:: release
"""
MemoryMap.__enter__ = memory_map_enter
MemoryMap.__exit__ = memory_map_exit
# }}}
# {{{ SVMAllocation
if get_cl_header_version() >= (2, 0):
SVMAllocation.__doc__ = """An object whose lifetime is tied to an allocation of shared virtual memory.
.. note::
Most likely, you will not want to use this directly, but rather
:func:`svm_empty` and related functions which allow access to this
functionality using a friendlier, more Pythonic interface.
.. versionadded:: 2016.2
.. automethod:: __init__(self, ctx, size, alignment, flags=None)
.. automethod:: release
.. automethod:: enqueue_release
"""
if get_cl_header_version() >= (2, 0):
svmallocation_old_init = SVMAllocation.__init__
def svmallocation_init(self, ctx, size, alignment, flags, _interface=None):
"""
:arg ctx: a :class:`Context`
:arg flags: some of :class:`svm_mem_flags`.
"""
svmallocation_old_init(self, ctx, size, alignment, flags)
read_write = (
flags & mem_flags.WRITE_ONLY != 0
or flags & mem_flags.READ_WRITE != 0)
_interface["data"] = (
int(self._ptr_as_int()), not read_write)
self.__array_interface__ = _interface
if get_cl_header_version() >= (2, 0):
SVMAllocation.__init__ = svmallocation_init
# }}}
# {{{ SVM
if get_cl_header_version() >= (2, 0):
SVM.__doc__ = """Tags an object exhibiting the Python buffer interface (such as a
:class:`numpy.ndarray`) as referring to shared virtual memory.
Depending on the features of the OpenCL implementation, the following
types of objects may be passed to/wrapped in this type:
* coarse-grain shared memory as returned by (e.g.) :func:`csvm_empty`
for any implementation of OpenCL 2.0.
This is how coarse-grain SVM may be used from both host and device::
svm_ary = cl.SVM(
cl.csvm_empty(ctx, 1000, np.float32, alignment=64))
assert isinstance(svm_ary.mem, np.ndarray)
with svm_ary.map_rw(queue) as ary:
ary.fill(17) # use from host
prg.twice(queue, svm_ary.mem.shape, None, svm_ary)
* fine-grain shared memory as returned by (e.g.) :func:`fsvm_empty`,
if the implementation supports fine-grained shared virtual memory.
This memory may directly be passed to a kernel::
ary = cl.fsvm_empty(ctx, 1000, np.float32)
assert isinstance(ary, np.ndarray)
prg.twice(queue, ary.shape, None, cl.SVM(ary))
queue.finish() # synchronize
print(ary) # access from host
Observe how mapping (as needed in coarse-grain SVM) is no longer
necessary.
* any :class:`numpy.ndarray` (or other Python object with a buffer
interface) if the implementation supports fine-grained *system*
shared virtual memory.
This is how plain :mod:`numpy` arrays may directly be passed to a
kernel::
ary = np.zeros(1000, np.float32)
prg.twice(queue, ary.shape, None, cl.SVM(ary))
queue.finish() # synchronize
print(ary) # access from host
Objects of this type may be passed to kernel calls and
:func:`enqueue_copy`. Coarse-grain shared-memory *must* be mapped
into host address space using :meth:`map` before being accessed
through the :mod:`numpy` interface.
.. note::
This object merely serves as a 'tag' that changes the behavior
of functions to which it is passed. It has no special management
relationship to the memory it tags. For example, it is permissible
to grab a :mod:`numpy.array` out of :attr:`SVM.mem` of one
:class:`SVM` instance and use the array to construct another.
Neither of the tags need to be kept alive.
.. versionadded:: 2016.2
.. attribute:: mem
The wrapped object.
.. automethod:: __init__
.. automethod:: map
.. automethod:: map_ro
.. automethod:: map_rw
.. automethod:: as_buffer
"""
if get_cl_header_version() >= (2, 0):
svm_old_init = SVM.__init__
def svm_init(self, mem):
svm_old_init(self, mem)
self.mem = mem
def svm_map(self, queue, flags, is_blocking=True, wait_for=None):
"""
:arg is_blocking: If *False*, subsequent code must wait on
:attr:`SVMMap.event` in the returned object before accessing the
mapped memory.
:arg flags: a combination of :class:`pyopencl.map_flags`, defaults to
read-write.
:returns: an :class:`SVMMap` instance
|std-enqueue-blurb|
"""
return SVMMap(
self,
queue,
_cl._enqueue_svm_map(queue, is_blocking, flags, self, wait_for))
def svm_map_ro(self, queue, is_blocking=True, wait_for=None):
"""Like :meth:`map`, but with *flags* set for a read-only map."""
return self.map(queue, map_flags.READ,
is_blocking=is_blocking, wait_for=wait_for)
def svm_map_rw(self, queue, is_blocking=True, wait_for=None):
"""Like :meth:`map`, but with *flags* set for a read-only map."""
return self.map(queue, map_flags.READ | map_flags.WRITE,
is_blocking=is_blocking, wait_for=wait_for)
def svm__enqueue_unmap(self, queue, wait_for=None):
return _cl._enqueue_svm_unmap(queue, self, wait_for)
def svm_as_buffer(self, ctx, flags=None):
"""
:arg ctx: a :class:`Context`
:arg flags: a combination of :class:`pyopencl.map_flags`, defaults to
read-write.
:returns: a :class:`Buffer` corresponding to *self*.
The memory referred to by this object must not be freed before
the returned :class:`Buffer` is released.
"""
if flags is None:
flags = mem_flags.READ_WRITE
return Buffer(ctx, flags, size=self.mem.nbytes, hostbuf=self.mem)
if get_cl_header_version() >= (2, 0):
SVM.__init__ = svm_init
SVM.map = svm_map
SVM.map_ro = svm_map_ro
SVM.map_rw = svm_map_rw
SVM._enqueue_unmap = svm__enqueue_unmap
SVM.as_buffer = svm_as_buffer
# }}}
# ORDER DEPENDENCY: Some of the above may override get_info, the effect needs
# to be visible through the attributes. So get_info attr creation needs to happen
# after the overriding is complete.
cls_to_info_cls = {
_cl.Platform: (_cl.Platform.get_info, _cl.platform_info, []),
_cl.Device: (_cl.Device.get_info, _cl.device_info,
["PLATFORM", "MAX_WORK_GROUP_SIZE", "MAX_COMPUTE_UNITS"]),
_cl.Context: (_cl.Context.get_info, _cl.context_info, []),
_cl.CommandQueue: (_cl.CommandQueue.get_info, _cl.command_queue_info,
["CONTEXT", "DEVICE"]),
_cl.Event: (_cl.Event.get_info, _cl.event_info, []),
_cl.MemoryObjectHolder:
(MemoryObjectHolder.get_info, _cl.mem_info, []),
Image: (_cl.Image.get_image_info, _cl.image_info, []),
Program: (Program.get_info, _cl.program_info, []),
Kernel: (Kernel.get_info, _cl.kernel_info, []),
_cl.Sampler: (Sampler.get_info, _cl.sampler_info, []),
}
def to_string(cls, value, default_format=None):
for name in dir(cls):
if (not name.startswith("_") and getattr(cls, name) == value):
return name
if default_format is None:
raise ValueError("a name for value %d was not found in %s"
% (value, cls.__name__))
else:
return default_format % value
for cls in CONSTANT_CLASSES:
cls.to_string = classmethod(to_string)
# {{{ get_info attributes -------------------------------------------------
def make_getinfo(info_method, info_name, info_attr):
def result(self):
return info_method(self, info_attr)
return property(result)
def make_cacheable_getinfo(info_method, info_name, cache_attr, info_attr):
def result(self):
try:
return getattr(self, cache_attr)
except AttributeError:
pass
result = info_method(self, info_attr)
setattr(self, cache_attr, result)
return result
return property(result)
for cls, (info_method, info_class, cacheable_attrs) \
in six.iteritems(cls_to_info_cls):
for info_name, info_value in six.iteritems(info_class.__dict__):
if info_name == "to_string" or info_name.startswith("_"):
continue
info_lower = info_name.lower()
info_constant = getattr(info_class, info_name)
if info_name in cacheable_attrs:
cache_attr = intern("_info_cache_"+info_lower)
setattr(cls, info_lower, make_cacheable_getinfo(
info_method, info_lower, cache_attr, info_constant))
else:
setattr(cls, info_lower, make_getinfo(
info_method, info_name, info_constant))
# }}}
if _cl.have_gl():
def gl_object_get_gl_object(self):
return self.get_gl_object_info()[1]
GLBuffer.gl_object = property(gl_object_get_gl_object)
GLTexture.gl_object = property(gl_object_get_gl_object)
_add_functionality()
# }}}
# {{{ create_some_context
def create_some_context(interactive=None, answers=None):
import os
if answers is None:
if "PYOPENCL_CTX" in os.environ:
ctx_spec = os.environ["PYOPENCL_CTX"]
answers = ctx_spec.split(":")
if "PYOPENCL_TEST" in os.environ:
from pyopencl.tools import get_test_platforms_and_devices
for plat, devs in get_test_platforms_and_devices():
for dev in devs:
return Context([dev])
if answers is not None:
pre_provided_answers = answers
answers = answers[:]
else:
pre_provided_answers = None
user_inputs = []
if interactive is None:
interactive = True
try:
import sys
if not sys.stdin.isatty():
interactive = False
except Exception:
interactive = False
def cc_print(s):
if interactive:
print(s)
def get_input(prompt):
if answers:
return str(answers.pop(0))
elif not interactive:
return ''
else:
user_input = input(prompt)
user_inputs.append(user_input)
return user_input
# {{{ pick a platform
platforms = get_platforms()
if not platforms:
raise Error("no platforms found")
else:
if not answers:
cc_print("Choose platform:")
for i, pf in enumerate(platforms):
cc_print("[%d] %s" % (i, pf))
answer = get_input("Choice [0]:")
if not answer:
platform = platforms[0]
else:
platform = None
try:
int_choice = int(answer)
except ValueError:
pass
else:
if 0 <= int_choice < len(platforms):
platform = platforms[int_choice]
if platform is None:
answer = answer.lower()
for i, pf in enumerate(platforms):
if answer in pf.name.lower():
platform = pf
if platform is None:
raise RuntimeError("input did not match any platform")
# }}}
# {{{ pick a device
devices = platform.get_devices()
def parse_device(choice):
try:
int_choice = int(choice)
except ValueError:
pass
else:
if 0 <= int_choice < len(devices):
return devices[int_choice]
choice = choice.lower()
for i, dev in enumerate(devices):
if choice in dev.name.lower():
return dev
raise RuntimeError("input did not match any device")
if not devices:
raise Error("no devices found")
elif len(devices) == 1:
pass
else:
if not answers:
cc_print("Choose device(s):")
for i, dev in enumerate(devices):
cc_print("[%d] %s" % (i, dev))
answer = get_input("Choice, comma-separated [0]:")
if not answer:
devices = [devices[0]]
else:
devices = [parse_device(i) for i in answer.split(",")]
# }}}
if user_inputs:
if pre_provided_answers is not None:
user_inputs = pre_provided_answers + user_inputs
cc_print("Set the environment variable PYOPENCL_CTX='%s' to "
"avoid being asked again." % ":".join(user_inputs))
if answers:
raise RuntimeError("not all provided choices were used by "
"create_some_context. (left over: '%s')" % ":".join(answers))
return Context(devices)
_csc = create_some_context
# }}}
# {{{ SVMMap
class SVMMap(object):
"""
.. attribute:: event
.. versionadded:: 2016.2
.. automethod:: release
This class may also be used as a context manager in a ``with`` statement.
:meth:`release` will be called upon exit from the ``with`` region.
The value returned to the ``as`` part of the context manager is the
mapped Python object (e.g. a :mod:`numpy` array).
"""
def __init__(self, svm, queue, event):
self.svm = svm
self.queue = queue
self.event = event
def __del__(self):
if self.svm is not None:
self.release()
def __enter__(self):
return self.svm.mem
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
def release(self, queue=None, wait_for=None):
"""
:arg queue: a :class:`pyopencl.CommandQueue`. Defaults to the one
with which the map was created, if not specified.
:returns: a :class:`pyopencl.Event`
|std-enqueue-blurb|
"""
evt = self.svm._enqueue_unmap(self.queue)
self.svm = None
return evt
# }}}
# {{{ enqueue_copy
def enqueue_copy(queue, dest, src, **kwargs):
"""Copy from :class:`Image`, :class:`Buffer` or the host to
:class:`Image`, :class:`Buffer` or the host. (Note: host-to-host
copies are unsupported.)
The following keyword arguments are available:
:arg wait_for: (optional, default empty)
:arg is_blocking: Wait for completion. Defaults to *True*.
(Available on any copy involving host memory)
:return: A :class:`NannyEvent` if the transfer involved a
host-side buffer, otherwise an :class:`Event`.
.. note::
Be aware that when the deletion of the :class:`NannyEvent` that is
returned by the function if the transfer involved a host-side buffer
will block until the transfer is complete, so be sure to keep a
reference to this :class:`Event` as long as necessary for the
transfer to complete.
.. note::
Two types of 'buffer' occur in the arguments to this function,
:class:`Buffer` and 'host-side buffers'. The latter are
defined by Python and commonly called `buffer objects
<https://docs.python.org/3.4/c-api/buffer.html>`_. :mod:`numpy`
arrays are a very common example.
Make sure to always be clear on whether a :class:`Buffer` or a
Python buffer object is needed.
.. ------------------------------------------------------------------------
.. rubric :: Transfer :class:`Buffer` ↔ host
.. ------------------------------------------------------------------------
:arg device_offset: offset in bytes (optional)
.. note::
The size of the transfer is controlled by the size of the
of the host-side buffer. If the host-side buffer
is a :class:`numpy.ndarray`, you can control the transfer size by
transfering into a smaller 'view' of the target array, like this::
cl.enqueue_copy(queue, large_dest_numpy_array[:15], src_buffer)
.. ------------------------------------------------------------------------
.. rubric :: Transfer :class:`Buffer` ↔ :class:`Buffer`
.. ------------------------------------------------------------------------
:arg byte_count: (optional) If not specified, defaults to the
size of the source in versions 2012.x and earlier,
and to the minimum of the size of the source and target
from 2013.1 on.
:arg src_offset: (optional)
:arg dest_offset: (optional)
.. ------------------------------------------------------------------------
.. rubric :: Rectangular :class:`Buffer` ↔ host transfers (CL 1.1 and newer)
.. ------------------------------------------------------------------------
:arg buffer_origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg host_origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg region: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg buffer_pitches: :class:`tuple` of :class:`int` of length
two or shorter. (optional, "tightly-packed" if unspecified)
:arg host_pitches: :class:`tuple` of :class:`int` of length
two or shorter. (optional, "tightly-packed" if unspecified)
.. ------------------------------------------------------------------------
.. rubric :: Rectangular :class:`Buffer` ↔ :class:`Buffer`
transfers (CL 1.1 and newer)
.. ------------------------------------------------------------------------
:arg src_origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg dst_origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg region: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg src_pitches: :class:`tuple` of :class:`int` of length
two or shorter. (optional, "tightly-packed" if unspecified)
:arg dst_pitches: :class:`tuple` of :class:`int` of length
two or shorter. (optional, "tightly-packed" if unspecified)
.. ------------------------------------------------------------------------
.. rubric :: Transfer :class:`Image` ↔ host
.. ------------------------------------------------------------------------
:arg origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg region: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg pitches: :class:`tuple` of :class:`int` of length
two or shorter. (optional)
.. ------------------------------------------------------------------------
.. rubric :: Transfer :class:`Buffer` ↔ :class:`Image`
.. ------------------------------------------------------------------------
:arg offset: offset in buffer (mandatory)
:arg origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg region: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
.. ------------------------------------------------------------------------
.. rubric :: Transfer :class:`Image` ↔ :class:`Image`
.. ------------------------------------------------------------------------
:arg src_origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg dest_origin: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
:arg region: :class:`tuple` of :class:`int` of length
three or shorter. (mandatory)
.. ------------------------------------------------------------------------
.. rubric :: Transfer :class:`SVM`/host ↔ :class:`SVM`/host
.. ------------------------------------------------------------------------
:arg byte_count: (optional) If not specified, defaults to the
size of the source in versions 2012.x and earlier,
and to the minimum of the size of the source and target
from 2013.1 on.
|std-enqueue-blurb|
.. versionadded:: 2011.1
"""
if isinstance(dest, MemoryObjectHolder):
if dest.type == mem_object_type.BUFFER:
if isinstance(src, MemoryObjectHolder):
if src.type == mem_object_type.BUFFER:
if "src_origin" in kwargs:
return _cl._enqueue_copy_buffer_rect(
queue, src, dest, **kwargs)
else:
kwargs["dst_offset"] = kwargs.pop("dest_offset", 0)
return _cl._enqueue_copy_buffer(queue, src, dest, **kwargs)
elif src.type in [mem_object_type.IMAGE2D, mem_object_type.IMAGE3D]:
return _cl._enqueue_copy_image_to_buffer(
queue, src, dest, **kwargs)
else:
raise ValueError("invalid src mem object type")
else:
# assume from-host
if "buffer_origin" in kwargs:
return _cl._enqueue_write_buffer_rect(queue, dest, src, **kwargs)
else:
return _cl._enqueue_write_buffer(queue, dest, src, **kwargs)
elif dest.type in [mem_object_type.IMAGE2D, mem_object_type.IMAGE3D]:
if isinstance(src, MemoryObjectHolder):
if src.type == mem_object_type.BUFFER:
return _cl._enqueue_copy_buffer_to_image(
queue, src, dest, **kwargs)
elif src.type in [mem_object_type.IMAGE2D, mem_object_type.IMAGE3D]:
return _cl._enqueue_copy_image(queue, src, dest, **kwargs)
else:
raise ValueError("invalid src mem object type")
else:
# assume from-host
origin = kwargs.pop("origin")
region = kwargs.pop("region")
pitches = kwargs.pop("pitches", (0, 0))
if len(pitches) == 1:
kwargs["row_pitch"], = pitches
else:
kwargs["row_pitch"], kwargs["slice_pitch"] = pitches
return _cl._enqueue_write_image(
queue, dest, origin, region, src, **kwargs)
else:
raise ValueError("invalid dest mem object type")
elif get_cl_header_version() >= (2, 0) and isinstance(dest, SVM):
# to SVM
if not isinstance(src, SVM):
src = SVM(src)
is_blocking = kwargs.pop("is_blocking", True)
return _cl._enqueue_svm_memcpy(queue, is_blocking, dest, src, **kwargs)
else:
# assume to-host
if isinstance(src, MemoryObjectHolder):
if src.type == mem_object_type.BUFFER:
if "buffer_origin" in kwargs:
return _cl._enqueue_read_buffer_rect(queue, src, dest, **kwargs)
else:
return _cl._enqueue_read_buffer(queue, src, dest, **kwargs)
elif src.type in [mem_object_type.IMAGE2D, mem_object_type.IMAGE3D]:
origin = kwargs.pop("origin")
region = kwargs.pop("region")
pitches = kwargs.pop("pitches", (0, 0))
if len(pitches) == 1:
kwargs["row_pitch"], = pitches
else:
kwargs["row_pitch"], kwargs["slice_pitch"] = pitches
return _cl._enqueue_read_image(
queue, src, origin, region, dest, **kwargs)
else:
raise ValueError("invalid src mem object type")
elif isinstance(src, SVM):
# from svm
# dest is not a SVM instance, otherwise we'd be in the branch above
is_blocking = kwargs.pop("is_blocking", True)
return _cl._enqueue_svm_memcpy(
queue, is_blocking, SVM(dest), src, **kwargs)
else:
# assume from-host
raise TypeError("enqueue_copy cannot perform host-to-host transfers")
# }}}
# {{{ image creation
DTYPE_TO_CHANNEL_TYPE = {
np.dtype(np.float32): channel_type.FLOAT,
np.dtype(np.int16): channel_type.SIGNED_INT16,
np.dtype(np.int32): channel_type.SIGNED_INT32,
np.dtype(np.int8): channel_type.SIGNED_INT8,
np.dtype(np.uint16): channel_type.UNSIGNED_INT16,
np.dtype(np.uint32): channel_type.UNSIGNED_INT32,
np.dtype(np.uint8): channel_type.UNSIGNED_INT8,
}
try:
np.float16
except Exception:
pass
else:
DTYPE_TO_CHANNEL_TYPE[np.dtype(np.float16)] = channel_type.HALF_FLOAT
DTYPE_TO_CHANNEL_TYPE_NORM = {
np.dtype(np.int16): channel_type.SNORM_INT16,
np.dtype(np.int8): channel_type.SNORM_INT8,
np.dtype(np.uint16): channel_type.UNORM_INT16,
np.dtype(np.uint8): channel_type.UNORM_INT8,
}
def image_from_array(ctx, ary, num_channels=None, mode="r", norm_int=False):
if not ary.flags.c_contiguous:
raise ValueError("array must be C-contiguous")
dtype = ary.dtype
if num_channels is None:
import pyopencl.cltypes
try:
dtype, num_channels = \
pyopencl.cltypes.vec_type_to_scalar_and_count[dtype]
except KeyError:
# It must be a scalar type then.
num_channels = 1
shape = ary.shape
strides = ary.strides
elif num_channels == 1:
shape = ary.shape
strides = ary.strides
else:
if ary.shape[-1] != num_channels:
raise RuntimeError("last dimension must be equal to number of channels")
shape = ary.shape[:-1]
strides = ary.strides[:-1]
if mode == "r":
mode_flags = mem_flags.READ_ONLY
elif mode == "w":
mode_flags = mem_flags.WRITE_ONLY
else:
raise ValueError("invalid value '%s' for 'mode'" % mode)
img_format = {
1: channel_order.R,
2: channel_order.RG,
3: channel_order.RGB,
4: channel_order.RGBA,
}[num_channels]
assert ary.strides[-1] == ary.dtype.itemsize
if norm_int:
channel_type = DTYPE_TO_CHANNEL_TYPE_NORM[dtype]
else:
channel_type = DTYPE_TO_CHANNEL_TYPE[dtype]
return Image(ctx, mode_flags | mem_flags.COPY_HOST_PTR,
ImageFormat(img_format, channel_type),
shape=shape[::-1], pitches=strides[::-1][1:],
hostbuf=ary)
# }}}
# {{{ enqueue_* compatibility shims
def enqueue_marker(queue, wait_for=None):
if queue._get_cl_version() >= (1, 2) and get_cl_header_version() >= (1, 2):
return _cl._enqueue_marker_with_wait_list(queue, wait_for)
else:
if wait_for:
_cl._enqueue_wait_for_events(queue, wait_for)
return _cl._enqueue_marker(queue)
def enqueue_barrier(queue, wait_for=None):
if queue._get_cl_version() >= (1, 2) and get_cl_header_version() >= (1, 2):
return _cl._enqueue_barrier_with_wait_list(queue, wait_for)
else:
_cl._enqueue_barrier(queue)
if wait_for:
_cl._enqueue_wait_for_events(queue, wait_for)
return _cl._enqueue_marker(queue)
def enqueue_fill_buffer(queue, mem, pattern, offset, size, wait_for=None):
if not (queue._get_cl_version() >= (1, 2) and get_cl_header_version() >= (1, 2)):
from warnings import warn
warn("The context for this queue does not declare OpenCL 1.2 support, so "
"the next thing you might see is a crash")
if _PYPY and isinstance(pattern, np.generic):
pattern = np.asarray(pattern)
return _cl._enqueue_fill_buffer(queue, mem, pattern, offset, size, wait_for)
# }}}
# {{{ numpy-like svm allocation
def enqueue_svm_memfill(queue, dest, pattern, byte_count=None, wait_for=None):
"""Fill shared virtual memory with a pattern.
:arg dest: a Python buffer object, optionally wrapped in an :class:`SVM` object
:arg pattern: a Python buffer object (e.g. a :class:`numpy.ndarray` with the
fill pattern to be used.
:arg byte_count: The size of the memory to be fill. Defaults to the
entirety of *dest*.
|std-enqueue-blurb|
.. versionadded:: 2016.2
"""
if not isinstance(dest, SVM):
dest = SVM(dest)
return _cl._enqueue_svm_memfill(
queue, dest, pattern, byte_count=None, wait_for=None)
def enqueue_svm_migratemem(queue, svms, flags, wait_for=None):
"""
:arg svms: a collection of Python buffer objects (e.g. :mod:`numpy`
arrrays), optionally wrapped in :class:`SVM` objects.
:arg flags: a combination of :class:`mem_migration_flags`
|std-enqueue-blurb|
.. versionadded:: 2016.2
This function requires OpenCL 2.1.
"""
return _cl._enqueue_svm_migratemem(
queue,
[svm.mem if isinstance(svm, SVM) else svm
for svm in svms],
flags,
wait_for)
def svm_empty(ctx, flags, shape, dtype, order="C", alignment=None):
"""Allocate an empty :class:`numpy.ndarray` of the given *shape*, *dtype*
and *order*. (See :func:`numpy.empty` for the meaning of these arguments.)
The array will be allocated in shared virtual memory belonging
to *ctx*.
:arg ctx: a :class:`Context`
:arg flags: a combination of flags from :class:`svm_mem_flags`.
:arg alignment: the number of bytes to which the beginning of the memory
is aligned. Defaults to the :attr:`numpy.dtype.itemsize` of *dtype*.
:returns: a :class:`numpy.ndarray` whose :attr:`numpy.ndarray.base` attribute
is a :class:`SVMAllocation`.
To pass the resulting array to an OpenCL kernel or :func:`enqueue_copy`, you
will likely want to wrap the returned array in an :class:`SVM` tag.
.. versionadded:: 2016.2
"""
dtype = np.dtype(dtype)
try:
s = 1
for dim in shape:
s *= dim
except TypeError:
import sys
if sys.version_info >= (3,):
admissible_types = (int, np.integer)
else:
admissible_types = (np.integer,) + six.integer_types
if not isinstance(shape, admissible_types):
raise TypeError("shape must either be iterable or "
"castable to an integer")
s = shape
shape = (shape,)
itemsize = dtype.itemsize
nbytes = s * itemsize
from pyopencl.compyte.array import c_contiguous_strides, f_contiguous_strides
if order in "fF":
strides = f_contiguous_strides(itemsize, shape)
elif order in "cC":
strides = c_contiguous_strides(itemsize, shape)
else:
raise ValueError("order not recognized: %s" % order)
descr = dtype.descr
interface = {
"version": 3,
"shape": shape,
"strides": strides,
}
if len(descr) == 1:
interface["typestr"] = descr[0][1]
else:
interface["typestr"] = "V%d" % itemsize
interface["descr"] = descr
if alignment is None:
alignment = itemsize
svm_alloc = SVMAllocation(ctx, nbytes, alignment, flags, _interface=interface)
return np.asarray(svm_alloc)
def svm_empty_like(ctx, flags, ary, alignment=None):
"""Allocate an empty :class:`numpy.ndarray` like the existing
:class:`numpy.ndarray` *ary*. The array will be allocated in shared
virtual memory belonging to *ctx*.
:arg ctx: a :class:`Context`
:arg flags: a combination of flags from :class:`svm_mem_flags`.
:arg alignment: the number of bytes to which the beginning of the memory
is aligned. Defaults to the :attr:`numpy.dtype.itemsize` of *dtype*.
:returns: a :class:`numpy.ndarray` whose :attr:`numpy.ndarray.base` attribute
is a :class:`SVMAllocation`.
To pass the resulting array to an OpenCL kernel or :func:`enqueue_copy`, you
will likely want to wrap the returned array in an :class:`SVM` tag.
.. versionadded:: 2016.2
"""
if ary.flags.c_contiguous:
order = "C"
elif ary.flags.f_contiguous:
order = "F"
else:
raise ValueError("array is neither C- nor Fortran-contiguous")
return svm_empty(ctx, flags, ary.shape, ary.dtype, order,
alignment=alignment)
def csvm_empty(ctx, shape, dtype, order="C", alignment=None):
"""
Like :func:`svm_empty`, but with *flags* set for a coarse-grain read-write
buffer.
.. versionadded:: 2016.2
"""
return svm_empty(ctx, svm_mem_flags.READ_WRITE, shape, dtype, order, alignment)
def csvm_empty_like(ctx, ary, alignment=None):
"""
Like :func:`svm_empty_like`, but with *flags* set for a coarse-grain
read-write buffer.
.. versionadded:: 2016.2
"""
return svm_empty_like(ctx, svm_mem_flags.READ_WRITE, ary)
def fsvm_empty(ctx, shape, dtype, order="C", alignment=None):
"""
Like :func:`svm_empty`, but with *flags* set for a fine-grain read-write
buffer.
.. versionadded:: 2016.2
"""
return svm_empty(ctx,
svm_mem_flags.READ_WRITE | svm_mem_flags.SVM_FINE_GRAIN_BUFFER,
shape, dtype, order, alignment)
def fsvm_empty_like(ctx, ary, alignment=None):
"""
Like :func:`svm_empty_like`, but with *flags* set for a fine-grain
read-write buffer.
.. versionadded:: 2016.2
"""
return svm_empty_like(
ctx,
svm_mem_flags.READ_WRITE | svm_mem_flags.SVM_FINE_GRAIN_BUFFER,
ary)
# }}}
_KERNEL_ARG_CLASSES = (
MemoryObjectHolder,
Sampler,
CommandQueue,
LocalMemory,
)
if get_cl_header_version() >= (2, 0):
_KERNEL_ARG_CLASSES = _KERNEL_ARG_CLASSES + (SVM,)
# vim: foldmethod=marker
| 32.735493 | 110 | 0.589096 | 8,132 | 67,697 | 4.666011 | 0.114363 | 0.004349 | 0.008565 | 0.008065 | 0.365381 | 0.30408 | 0.254902 | 0.219218 | 0.19513 | 0.180476 | 0 | 0.007415 | 0.30279 | 67,697 | 2,067 | 111 | 32.75133 | 0.79633 | 0.184351 | 0 | 0.240637 | 0 | 0.000797 | 0.155505 | 0.004176 | 0 | 0 | 0 | 0.000968 | 0.00239 | 1 | 0.077291 | false | 0.012749 | 0.046215 | 0.022311 | 0.214343 | 0.007968 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
02fd5a1ea41141f494abc58365b06789410e5a09 | 20,684 | py | Python | src/UI_Code_Q2/UI_V1/BotMidWindow.py | KevinEwoudLee/HU3-UI | 16d63e0be8c515540daf4f9cfcff2d0a85c1cbab | [
"MIT"
] | 1 | 2019-12-11T15:27:53.000Z | 2019-12-11T15:27:53.000Z | src/UI_Code_Q2/UI_V1/BotMidWindow.py | KevinEwoudLee/HU3-UI | 16d63e0be8c515540daf4f9cfcff2d0a85c1cbab | [
"MIT"
] | null | null | null | src/UI_Code_Q2/UI_V1/BotMidWindow.py | KevinEwoudLee/HU3-UI | 16d63e0be8c515540daf4f9cfcff2d0a85c1cbab | [
"MIT"
] | 1 | 2019-12-11T15:23:56.000Z | 2019-12-11T15:23:56.000Z | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 11 11:55:05 2019
@author: Kevin Lee
"""
#import libraries
#import * is import all
from tkinter import *
#math used for calculations
import math
#time used for keeping track of the time
import time
from time import sleep
#import os and datetime used for saving data on excel and writing it to USB
import os
from datetime import datetime
#import classes
import MainMidWindow as mw
# spidev used for SPI communication
#import spidev
#import RPi.GPIO as GPIO
#==============================================Global Variables==============================
global WindowX
WindowX = 1200
global WindowY
WindowY = 840
global FormulaOrange1
FormulaOrange1 = '#ee6d24'
global FormulaBlue1
FormulaBlue1 = '#12bfd7'
global FormulaBlack1
FormulaBlack1 = '#1d323e'
global count
count =0
global countcheck
countcheck = 0
global spinBrake
spinBrake = 0
global spinGas
spinGas = 0
global degree_sign
degree_sign= u'\N{DEGREE SIGN}'
#===================================================================================================
class BotMidWindow:
def __init__(self, window):
# mydebug(f"WinMid.__init__()") # f-string of Python 3.6+
#Define variables
self.window = window
self.angle = 0
self.counter = 0
self.size = 30
self.choice = 0
self.p_width = 2
self.centerx = 410
self.centery = 210
self.old_choice = 0
self.color = '#000000'
#PointerLengths
self.arrow = [1,1,6,1]
# ALL attributes of class here
self.rect = 0 # no rectangle yet
self.index = 0 # no arrow yet
self.BotCanvas = 0 # no canvas yet
#PointerLengths
self.screen_1 = [3,3,3,-1]
#Setup variables for all temperature based functions
self.temp = 0
self.temp_dir = 1
self.text_temp = 0
#Make a canvas in the bottom half of the screen to place other objects inside of and give it the name BotCanvas.
self.BotCanvas = Canvas(self.window, width= 840, height=170,borderwidth = 0.0, bg='black', highlightthickness=0)
self.BotCanvas.pack() #place the created canvas into the window.
#Sensor simulator
#Load button + spinbox
self.spin = StringVar()
self.spinBox = Spinbox(self.BotCanvas, from_=0, to=100, width = 5, bg = 'snow')
self.spinBox.place(relx=0.05,rely=0.5)
#Make a button for inputting "sensordata"
#Make a button named "Load" in the Canvas named "Botcanvas", with background color "snow" and execute the function useSensor.
self.sensorButton = Button(self.BotCanvas, text = 'Load', command = self.useSensor, bg = 'snow', height = 1)
self.sensorButton.place(relx=0.12, rely=0.5) # Place the button 50 pixels to the right and 150 pixels down (top left is 0,0).
#Make a label (a box to place text in).
#Place the label in BotCanvas, with 10 pixels of empty room to the left and right of the text. Position it at coordinates (360,340) and give it font Courier with size 20 and make it bold.
self.sensorData = Label(self.BotCanvas, padx =10 , textvariable=self.spin, bg = 'black', fg = 'white')
self.sensorData.config(font=("Courier 20 bold"))
self.sensorData.place(relx= 0.5, rely=0.1)
#Brake pedal position
self.spinBrake = StringVar()
self.spinBoxBrake = Spinbox(self.BotCanvas, from_=0, to=120, width = 5, bg = 'snow')
self.spinBoxBrake.place(relx=0.05, rely=0.1)
self.sensorButtonBrake = Button(self.BotCanvas, text = 'Brake', command = self.Update_brake, bg = 'snow', height = 1)
self.sensorButtonBrake.place(relx=0.12, rely=0.1)
#Gas pedal position
self.spinGas = StringVar()
self.spinBoxGas = Spinbox(self.BotCanvas, from_=0, to=180, width = 5, bg = 'snow')
self.spinBoxGas.place(relx=0.05, rely=0.3)
self.sensorButtonGas = Button(self.BotCanvas, text = 'Gas', command = self.Update_gas, bg = 'snow', height = 1)
self.sensorButtonGas.place(relx=0.12, rely=0.3)
#Function useSensor is used to update the value of self.spin to what is currently the value in the spinbox.
def useSensor(self):
self.spin.set(str(self.spinBox.get()))
def Update_brake(self):
global spinBrake #To alter the value of the global variable it has to be specified you are using the global variable first
spinBrake = float(self.spinBoxBrake.get())
def Update_gas(self):
global spinGas #To alter the value of the global variable it has to be specified you are using the global variable first
spinGas = float(self.spinBoxGas.get())
#Animated Polygon, Animated Polygon currently not updating
def delete_Poly(self):
# mydebug(f"WinMid.delete_Poly() self.index={self.index}")
if self.index > 0: # avoid list of arrows now for simplicity
self.BotCanvas.delete(self.index)
self.index = 0
#function to delete temperature value in the bottom middle window
def del_temp(self):
if(self.text_temp > 0):
self.BotCanvas.delete(self.text_temp)
self.text_temp = 0
#Function to delete the rectangle in the bottom middle window
def delete_rect(self):
# mydebug(f"WinMid.delete_rect()")
if self.rect > 0: # avoid list of rects now for simplicity
self.BotCanvas
self.BotCanvas.delete(self.rect)
self.rect = 0
#Function to remove all objects on the bottom half of the screen and reset the background color.
def screen_clear(self):
# mydebug(f"WinMid.screen_clear()")
self.delete_rect() #remove rectangular object
self.delete_Poly() #remove rotating object
self.BotCanvas.configure(bg = 'black') #reset background color to 'snow'
self.del_temp() #remove temperature text from choice 6
#Function to determine what to do when a button is pressed.
def function_choose(self):
self.update_val()
# mydebug(f"WinMid.function_choose() self.choice={self.choice} self.old_choice = {self.old_choice}")
self.color_update()
if(self.choice != self.old_choice):
# mydebug(f"WinMid.function_choose() self.screen_clear()")
self.screen_clear()
self.old_choice = self.choice
#option 0 which is the start screen. Make an empty window with a width,height,border and background color and place it in the bottom window
if self.BotCanvas == 0:
self.screen_clear()
if(self.choice != self.old_choice):
self.screen_clear()
self.old_choice = self.choice
if(self.choice == 1): # or GPIO.input() == GPIO.HIGH): ##if button 1 (top left) is pressed do functions below
self.screen_clear() #empty bottom screen
self.rotate_Poly() #put object onto the screen
elif(self.choice == 2): #if button 2 (mid left) is pressed do functions below
self.screen_clear() #empty bottom screen
self.rect = self.BotCanvas.create_rectangle(100, 100, 200, 200, fill='red')
elif(self.choice == 3): #if button 3 (bottom left) is pressed do functions below
self.screen_clear() #empty Bottom screen
self.rect = self.BotCanvas.create_rectangle(200, 200, 200 + (self.angle//10)%1000 , 300, fill='blue')
elif(self.choice == 4): #if button 4 (top right) is pressed do functions below
self.screen_clear() #empty bottom screen
self.rect = self.BotCanvas.create_rectangle(300, 300, 400, 400, fill=self.color)
elif(self.choice == 5): #if button 5 (top mid left) is pressed do functions below
self.screen_clear() #empty bottom screen
self.rect = self.BotCanvas.create_rectangle(200, 200, 200 + (self.angle//10)%1000 , 300, fill=self.color)
elif(self.choice == 6): #if button 6 (bottom mid left) is pressed do functions below
self.screen_clear() #empty bottom screen
self.temp_gradient()
elif(self.choice == 7): #if button 7 (bottom right) is pressed do functions below
self.screen_clear() #empty bottom screen
#add function for button 7 here
# else:
# print("Do Nothing") #is the option is not 1-7 print do nothing to stop errors (also for debugging)
#function to adjust color
def colorize(self,a,b,c):
self.color = '#%02x%02x%02x' % (a, b, c)
# print(self.color, b)
# function to update the color in the bottom middle window
def temp_gradient(self):
self.temp += self.temp_dir # .. was 1 (too small to see something)
if(self.temp >= 250 or self.temp <=0 ):
self.temp_dir = -self.temp_dir
self.del_temp()
self.colorize(255, 255-self.temp, 0) #Color goes from yellow to red as temp goes up and down
# mydebug(f"WinMid.temp_gradient() self.temp_gradient={self.angle}")
self.BotCanvas.configure(bg = self.color) #Set the background color to the updated color
self.text_temp = self.BotCanvas.create_text(420,240, text = int(self.temp/250*60) , fill="black", font = ("Purisa", 30)) #Make some text to display the temperature
#function to rotate
def rotate_Poly(self):
# mydebug(f"WinMid.rotate_Poly() self.angle={self.angle} self.index={self.index}")
# .. was 1 (too small to see something)
# delete old arrow
self.delete_Poly()
# draw new arrow
self.index = self.BotCanvas.create_polygon(
[ self.centerx + self.screen_1[0] * self.size * math.cos(math.radians(self.angle)) , self.centery + self.screen_1[0] * self.size* math.sin(math.radians(self.angle)) ,
self.centerx + self.screen_1[1] * self.size * math.cos(math.radians(self.angle + 90)) , self.centery + self.screen_1[1] * self.size *math.sin(math.radians(self.angle + 90)),
self.centerx + self.screen_1[2] * self.size * math.cos(math.radians(self.angle + 180)), self.centery + self.screen_1[2] * self.size * math.sin(math.radians(self.angle + 180)) ,
self.centerx + self.screen_1[3] * self.size * math.cos(math.radians(self.angle + 270)), self.centery + self.screen_1[3] * self.size * math.sin(math.radians(self.angle + 270))
], fill = 'purple')
def _from_rgb(self, rgb):
return "#%02x%02x%02x" % rgb
def color_update(self):
self.color = self._from_rgb((0,0,((self.angle//10)%250)))
def update_val(self):
WindowX = self.window.winfo_height()
WindowX = self.window.winfo_width()
self.BotCanvas.delete("all")
self.BotCanvas.create_text(WindowX/4, WindowY/28, text = '{} {}'.format(int(spinBrake), "%") , font=("Purisan", 20), fill="snow")
self.BotCanvas.create_text(WindowX/4, WindowY/14, text = '{} {}'.format(int(spinGas), "%"), font=("Purisan", 20), fill="snow")
# #Make text under gas meter(green bar)
# self.text.append(self.MainMidWindow.create_text(WindowX/1.12, WindowY/1.5, text = '{} {}'.format(int(spinGas),"%"), font=("Purisan", 20), fill="snow"))
self.BotCanvas.create_rectangle(WindowX/3, WindowY/42, WindowX/3+spinBrake, WindowY/21, fill='red3')
# self.my_rectangle = self.round_rectangle(40, 250-((220-self.angle-20)/220)*200, 140, 250 , radius=20, fill="red3")
self.BotCanvas.create_rectangle(WindowX/3, WindowY/16.8, WindowX/3+spinGas, WindowY/12, fill='green2')
# self.rect.append(self.MainMidWindow.create_rectangle(WindowX/(840/720), WindowY/(840/(275*1.83)), WindowX/(840/780), WindowY/(840/((275-(spinGas))*1.83)), fill='green2'))
self.angle += 1
self.color_update()
#Code for layout and buttons
class Layout(Frame, BotMidWindow):
def __init__(self, parent = None):
Frame.__init__(self, parent)
self.master = parent
self.colordict ="navy"
self.angle = 0
self.arrow_dir = 1
self.text = []
self.height_split = 0.333
self.width_split = 0.15
self.time_mark = time.time()
#create buttons on the left side
self.left1_button = Button(self, text = "Sensor1", command = self.left1_, bg = FormulaOrange1) #make a button with name "Sensor1", action when pressed: left1_ and button color "FormulaOrange1"
self.left2_button = Button(self, text = "Sensor2", command = self.left2_, bg = FormulaOrange1) #make a button with name "Sensor2", action when pressed: left2_ and button color "FormulaOrange1"
self.left3_button = Button(self, text = "Sensor3", command = self.left3_, bg = FormulaOrange1) #make a button with name "Sensor3", action when pressed: left3_ and button color "FormulaOrange1"
#create buttons on the right side
self.right1_button = Button(self, text = "Sensor4", command = self.right1_, bg = FormulaBlue1) #make a button with name "Sensor4", action when pressed: right1_ and button color "FormulaBlue1"
self.right2_button = Button(self, text = "Sensor5", command = self.right2_, bg = FormulaBlue1) #make a button with name "Sensor5", action when pressed: right2_ and button color "FormulaBlue1"
self.right3_button = Button(self, text = "Sensor6", command = self.right3_, bg = FormulaBlue1) #make a button with name "Sensor6", action when pressed: right3_ and button color "FormulaBlue1"
self.right4_button = Button(self, text = "Quit", command = self.right4_, bg = 'red2') #make a button with name "Sensor7", action when pressed: right4_ and button color "FormulaBlue1"
#bind buttons on keyboard to functions
self.master.bind('1', self.left1b_) #bind button "1" to function left1b_
self.master.bind('2', self.left2b_) #bind button "2" to function left2b_
self.master.bind('3', self.left3b_) #bind button "3" to function left3b_
self.master.bind('4', self.right1b_) #bind button "4" to function right1b_
self.master.bind('5', self.right2b_) #bind button "5" to function right2b_
self.master.bind('6', self.right3b_) #bind button "6" to function right3b_
self.master.bind('7', self.right4b_) #bind button "7" to function right4b_
self.master.bind('a', self.accelerate) #bind button "a" to function accelerate
self.master.bind('d', self.decelerate) #bind button "d" to function decelerate
#Define BotMidWindow and MainMidWindow in this class
self.mid1 = Frame(parent, bd=0, relief=FLAT, bg=FormulaBlack1, height = 840, width = 420, highlightthickness=0)
self.mid2 = Frame(parent, bd=0, relief=FLAT, bg = 'black', height = 400, width = 420, highlightthickness=0)
self.BotMidWindow = BotMidWindow(self.mid2)
self.MainMidWindow = mw.MainMidWindow(self.mid1)
#Has to be changed to the button next to the UI screen. This button starts the timer.
self.master.bind('t', self.MainMidWindow.start)
#GPIO.add_event_detect(4,GPIO.RISING,callback=button_callback) # Setup event on pin 10 rising edge
def display(self):
#place the buttons created to display sensor data on the right place with the right size (rel = relative size compared to window it is placed inside of, so relative x/y position and height/width)
self.pack(fill = BOTH, expand = 1)
self.left1_button.place(relx = 0, rely = 0, relwidth = self.width_split, relheight = self.height_split)
self.left2_button.place(relx = 0, rely = self.height_split, relwidth = self.width_split, relheight = self.height_split)
self.left3_button.place(relx = 0, rely = 2* self.height_split, relwidth = self.width_split, relheight = self.height_split)
self.right1_button.place(relx = 1-self.width_split, rely = 0, relwidth = self.width_split, relheight = self.height_split*0.75)
self.right2_button.place(relx = 1- self.width_split, rely = self.height_split*0.75, relwidth = self.width_split, relheight = self.height_split*0.75)
self.right3_button.place(relx = 1-self.width_split, rely = 2* self.height_split*0.75, relwidth = self.width_split, relheight = self.height_split*0.75)
self.right4_button.place(relx = 1-self.width_split, rely = 2.25* self.height_split, relwidth = self.width_split, relheight = self.height_split*0.75)
#Placement botmidwindow and mainmidwindow
self.mid1.place( relx = self.width_split, rely = 0, relheight =1., relwidth= 1 - 2*self.width_split)
self.mid2.place( relx = self.width_split, rely = 0.8, relheight =0.2, relwidth= 1 - 2*self.width_split)
#Update the screen
self.screen_Updater()
#Call function to execute the object for the second window screen
# def test(self):
# print("Test")
def screen_Updater(self):
self.BotMidWindow.function_choose()
self.MainMidWindow.Update_val(3) #Make sure gas and brake simulation is not affected.
# print(int(time.time()*1000 - self.time_mark))
# self.time_mark = time.time()*1000
self.master.after(10, self.screen_Updater)
#Function to simulate gas is being pressed
def accelerate(self,event):
self.MainMidWindow.Update_val(0)
#Function to simulate break is being pressed
def decelerate(self,event):
self.MainMidWindow.Update_val(1)
#functions for the working of clicking on the buttons
def left1_(self):
self.BotMidWindow.choice = 1 #if this left1_ function is called set choice in function_choose to be 1
def left1b_(self,event):
self.BotMidWindow.choice = 1 #if this left1b_ function is called set choice in function_choose to be 1
def left2_(self):
self.BotMidWindow.choice = 2 #if this left2_ function is called set choice in function_choose to be 2
def left2b_(self,event):
self.BotMidWindow.choice = 2 #if this left2b_ function is called set choice in function_choose to be 2
def left3_(self):
self.BotMidWindow.choice = 3 #if this left3_ function is called set choice in function_choose to be 3
def left3b_(self,event):
self.BotMidWindow.choice = 3 #if this left3b_ function is called set choice in function_choose to be 3
def right1_(self):
self.BotMidWindow.choice = 4 #if this right_1 function is called set choice in function_choose to be 4
def right1b_(self,event):
self.BotMidWindow.choice = 4 #if this right1b_ function is called set choice in function_choose to be 4
def right2_(self):
self.BotMidWindow.choice = 5 #if this right_2 function is called set choice in function_choose to be 5
def right2b_(self,event):
self.BotMidWindow.choice = 5 #if this right2b_ function is called set choice in function_choose to be 5
def right3_(self):
self.BotMidWindow.choice = 6 #if this right_3 function is called set choice in function_choose to be 6
def right3b_(self,event):
self.BotMidWindow.choice = 6 #if this right3b_ function is called set choice in function_choose to be 6
def right4_(self):
self.master.destroy()
self.BotMidWindow.choice = 7 #if this right_4 function is called set choice in function_choose to be 7
def right4b_(self,event):
self.master.destroy()
self.BotMidWindow.choice = 7 #if this right4b_ function is called set choice in function_choose to be 7
| 46.376682 | 204 | 0.623767 | 2,747 | 20,684 | 4.610848 | 0.153622 | 0.028738 | 0.017685 | 0.021001 | 0.3946 | 0.338702 | 0.257224 | 0.211116 | 0.173299 | 0.153719 | 0 | 0.042286 | 0.268275 | 20,684 | 445 | 205 | 46.480899 | 0.794582 | 0.368884 | 0 | 0.201681 | 0 | 0 | 0.022477 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.029412 | 0.004202 | 0.184874 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
02ff78db0d2a4b8d9c44255abafc20aa497d17ff | 1,326 | py | Python | tests/e2e_tests/test_reviews_all.py | jorgecorrea/google-play-scraper | f0d7303f2fe9d9fdb7a10a0de755a90b2d53c7ce | [
"MIT"
] | null | null | null | tests/e2e_tests/test_reviews_all.py | jorgecorrea/google-play-scraper | f0d7303f2fe9d9fdb7a10a0de755a90b2d53c7ce | [
"MIT"
] | null | null | null | tests/e2e_tests/test_reviews_all.py | jorgecorrea/google-play-scraper | f0d7303f2fe9d9fdb7a10a0de755a90b2d53c7ce | [
"MIT"
] | null | null | null | from unittest import TestCase
from unittest.mock import patch
from google_play_scraper.features.reviews import reviews_all, reviews
class TestReviewsAll(TestCase):
def test_request_once(self):
with patch(
"google_play_scraper.features.reviews.reviews", wraps=reviews
) as mock_reviews:
result = reviews_all("co.kr.uaram.userdeliver_")
self.assertEqual(1, mock_reviews.call_count)
result_of_reviews, _ = reviews("co.kr.uaram.userdeliver_", count=10000)
self.assertTrue(0 < len(result) < 10)
self.assertEqual(len(result), len(result_of_reviews))
def test_request_multiple_times(self):
with patch(
"google_play_scraper.features.reviews.reviews", wraps=reviews
) as mock_reviews:
result = reviews_all("co.kr.uaram.userdeliver_", lang="ko", country="kr")
self.assertEqual(2, mock_reviews.call_count)
result_of_reviews, _ = reviews(
"co.kr.uaram.userdeliver_", lang="ko", country="kr", count=10000
)
self.assertTrue(300 < len(result) < 500)
self.assertEqual(len(result), len(result_of_reviews))
def test_no_reviews(self):
result = reviews_all("com.spotify.music", lang="sw", country="it")
self.assertListEqual([], result)
| 34.894737 | 85 | 0.665913 | 161 | 1,326 | 5.254658 | 0.322981 | 0.06383 | 0.042553 | 0.094563 | 0.602837 | 0.565012 | 0.565012 | 0.565012 | 0.529551 | 0.529551 | 0 | 0.020309 | 0.220211 | 1,326 | 37 | 86 | 35.837838 | 0.797872 | 0 | 0 | 0.296296 | 0 | 0 | 0.160633 | 0.138763 | 0 | 0 | 0 | 0 | 0.259259 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.259259 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30019770757cc2d738f929cce42e5cf868ea7e3 | 1,014 | py | Python | src/core/data/NpDataFromRaw.py | uab-projects/bayesian-tweets | e207e84fdc1e16b13c71a24bc754e39fa04b48cf | [
"Apache-2.0"
] | 1 | 2018-01-10T05:46:16.000Z | 2018-01-10T05:46:16.000Z | src/core/data/NpDataFromRaw.py | uab-projects/bayesian-tweets | e207e84fdc1e16b13c71a24bc754e39fa04b48cf | [
"Apache-2.0"
] | null | null | null | src/core/data/NpDataFromRaw.py | uab-projects/bayesian-tweets | e207e84fdc1e16b13c71a24bc754e39fa04b48cf | [
"Apache-2.0"
] | null | null | null | # Libraries
import logging
import numpy as np
from .RawDataHandler import RawDataHandler
from .NpDataHandler import NpDataHandler
# Constants
LOGGER = logging.getLogger(__name__)
"""
Default column in the data to look for messages
"""
COL_MESSAGES = 1
"""
Default column in the data to look for messages classes
"""
COL_CLASSES = 2
class NpDataFromRaw(RawDataHandler):
"""
Converts a raw data object into a NumPy data object, knowing that the raw data object contains a matrix with two columns, the first one containing data as a message, and the second, it's classification
"""
def __call__(self):
"""
Returns a NumPy data handler, with the messages and classes extracted from the raw data
@return NpDataHandler object with the data converted
"""
# Create messages
messages = np.array(
[sample[0].split() for sample in self._data]
)
# Create classes
classes = np.array(
[bool(int(sample[1])) for sample in self._data],
dtype=np.bool)
return NpDataHandler(messages,classes)
| 26 | 202 | 0.742604 | 143 | 1,014 | 5.181818 | 0.454545 | 0.02834 | 0.040486 | 0.048583 | 0.156545 | 0.105263 | 0.105263 | 0.105263 | 0.105263 | 0 | 0 | 0.004802 | 0.178501 | 1,014 | 38 | 203 | 26.684211 | 0.884754 | 0.389546 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.25 | 0 | 0.4375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f300f8f3af62da8c66ff7f6bfbaa17f6a43ac4a8 | 218 | py | Python | python3/lambda.py | eiadshahtout/Python | b2406b0806bc55a9d8f5482a304a8d6968249018 | [
"MIT"
] | null | null | null | python3/lambda.py | eiadshahtout/Python | b2406b0806bc55a9d8f5482a304a8d6968249018 | [
"MIT"
] | null | null | null | python3/lambda.py | eiadshahtout/Python | b2406b0806bc55a9d8f5482a304a8d6968249018 | [
"MIT"
] | null | null | null | Old_list = [1,2,3,4,5,6,7,8,9,10]
New_list = list(map(lambda x: x + 5 , Old_list))
print(New_list)
numbers1 = [1, 2, 3]
numbers2 = [4, 5, 6]
result = map(lambda x, y: x + y, numbers1, numbers2)
print(list(result)) | 21.8 | 52 | 0.623853 | 45 | 218 | 2.933333 | 0.466667 | 0.106061 | 0.045455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.122222 | 0.174312 | 218 | 10 | 53 | 21.8 | 0.611111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30118842560382b6baf919a3e3788d370994633 | 3,161 | py | Python | web.py | osteele/assignment-tools | 10b7b48965ed363c370a05fdf69876f21bb2fafb | [
"MIT"
] | null | null | null | web.py | osteele/assignment-tools | 10b7b48965ed363c370a05fdf69876f21bb2fafb | [
"MIT"
] | null | null | null | web.py | osteele/assignment-tools | 10b7b48965ed363c370a05fdf69876f21bb2fafb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import re
import os
from collections import namedtuple
from glob import glob
import flask
from flask import Flask
import nbformat
import nbconvert
import pandas as pd
COURSE_NAME = 'SoftDes Spring 2016'
PROJECT_DIR = os.path.dirname(__file__)
SUMMARY_DIR = os.path.join(PROJECT_DIR, 'summaries')
DATAFRAME_TABLE_CLASSES = 'table-condensed table-striped table-hover'
RESPONSE_SUMMARY_PATH_TEMPLATE_RE = re.compile(
r'(.+)_reading_journal_(.+)(?:responses|response_counts)?(?:_with_names)?.csv')
GITHUB_REPO_URL = 'https://github.com/sd16spring/ReadingJournal'
Assignment = namedtuple('Assignment', ['assignment_id', 'name', 'summaries', 'notebook_name'])
app = Flask(__name__)
pd.set_option('display.max_colwidth', -1)
assignments = {}
for path in glob(os.path.join(SUMMARY_DIR, '*.csv')):
m = RESPONSE_SUMMARY_PATH_TEMPLATE_RE.match(os.path.basename(path))
if not m:
continue
assignment_id, summary_type = m.groups()
df = pd.read_csv(path, index_col=0)
assignment = assignments.get(assignment_id)
if not assignment:
assignment_name = assignment_id.replace('day', 'day ').capitalize()
assignment = Assignment(assignment_id, assignment_name, [], '%s_reading_journal.ipynb' % assignment_id)
assignments[assignment_id] = assignment
assignment[2].append((summary_type, df))
def natural_sort_key(s):
int_re = re.compile(r'(-?\d+)')
return tuple(int(c) if int_re.match(c) else c
for c in int_re.split(s))
@app.route('/')
def index():
return flask.render_template(
'index.html',
course_name=COURSE_NAME,
title='Assignments',
assignments=sorted(assignments.values(), key=lambda t: natural_sort_key(t[1]))
)
@app.route('/assignment/<assignment_id>')
def assignment(assignment_id):
def summary_type_to_title(s):
return s.replace('_', ' ').capitalize()
assignment = assignments[assignment_id]
tables = [(summary_type != 'response_counts',
summary_type_to_title(summary_type),
df.to_html(classes=DATAFRAME_TABLE_CLASSES))
for summary_type, df in assignment[2]]
return flask.render_template(
'assignment.html',
assignment=assignment,
notebook_url='/'.join([GITHUB_REPO_URL, 'blob/master', assignment.notebook_name]),
course_name=COURSE_NAME,
title=assignment.name,
tables=[(title, df) for is_poll, title, df in tables if not is_poll],
polls=[(title, df) for is_poll, title, df in tables if is_poll],
)
@app.route('/assignment/<assignment_id>/processed')
def processed_notebook(assignment_id):
with open('processed_notebooks/%s_reading_journal_responses.ipynb' % assignment_id) as f:
nb = nbformat.reads(f.read(), as_version=4)
str, _ = nbconvert.export_html(nb)
assignment_name = assignments[assignment_id][1]
return flask.render_template(
'processed_notebook.html',
course_name=COURSE_NAME,
title=' '.join([assignment_name, 'Processed Notebook']),
nb_html=str)
if __name__ == '__main__':
app.run(debug=True)
| 31.61 | 111 | 0.692502 | 407 | 3,161 | 5.09828 | 0.314496 | 0.080964 | 0.053012 | 0.036145 | 0.128675 | 0.059759 | 0.031807 | 0.031807 | 0.031807 | 0.031807 | 0 | 0.005021 | 0.180955 | 3,161 | 99 | 112 | 31.929293 | 0.796447 | 0.006327 | 0 | 0.08 | 0 | 0 | 0.170064 | 0.076433 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.12 | 0.026667 | 0.253333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f302e1772b975b5a39b5ac1249be1af24593a04f | 567 | py | Python | granatum_deeplearning/setup.py | granatumx/gbox-py | b3e264a22bc6a041f2dd631d952eae29c0ecae21 | [
"MIT"
] | 1 | 2021-03-04T13:04:28.000Z | 2021-03-04T13:04:28.000Z | g_packages/official_py_docker/docker/granatum_deeplearning/setup.py | lanagarmire/granatumx | 3dee3a8fb2ba851c31a9f6338aef1817217769f9 | [
"MIT"
] | 16 | 2020-01-28T23:03:40.000Z | 2022-02-10T00:30:16.000Z | g_packages/official_py_docker/docker/granatum_deeplearning/setup.py | lanagarmire/granatumx | 3dee3a8fb2ba851c31a9f6338aef1817217769f9 | [
"MIT"
] | 2 | 2020-06-16T16:42:40.000Z | 2020-08-28T16:59:42.000Z | from setuptools import setup, find_packages
import sys, os
VERSION = '0.0.4'
setup(name='granatum_deeplearning',
version=VERSION,
description="granatum_deeplearning",
long_description="""""",
classifiers=[],
keywords='granatum deeplearning',
author='o_poirion',
author_email='o.poirion@gmail.com',
url='',
license='MIT',
packages=find_packages(exclude=['examples',
'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[],
)
| 24.652174 | 49 | 0.599647 | 55 | 567 | 5.981818 | 0.709091 | 0.182371 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007246 | 0.269841 | 567 | 22 | 50 | 25.772727 | 0.78744 | 0 | 0 | 0 | 0 | 0 | 0.197531 | 0.074074 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.105263 | 0 | 0.105263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3056207c4d69bea370b4f4bd3c10eca72c5aef4 | 1,590 | py | Python | ToolChain/PyScripts/ShowEnvironement.py | LaichR/Avr | 58908bd9479637f048e9ff30c1f630d2fe620291 | [
"MIT"
] | 3 | 2021-03-22T07:59:51.000Z | 2021-04-05T18:09:34.000Z | ToolChain/PyScripts/ShowEnvironement.py | LaichR/Avr | 58908bd9479637f048e9ff30c1f630d2fe620291 | [
"MIT"
] | 4 | 2020-02-09T14:37:01.000Z | 2021-03-28T08:12:37.000Z | ToolChain/PyScripts/ShowEnvironement.py | LaichR/Avr | 58908bd9479637f048e9ff30c1f630d2fe620291 | [
"MIT"
] | null | null | null | import os, sys, pathlib, re, itertools, functools, json
#link to dot net libraries
p = pathlib.Path(__file__)
def ShowAndCheckVariable( variable, isPath ):
if not variable in os.environ:
print ( "environment variable {0} not set".format(variable))
value = os.environ[variable]
print( "{0} defined as \t'{1}'".format(variable, value))
if isPath:
isValidPath = "ok"
if not os.path.exists(value):
isValidPath = "not ok"
print( "\t\t-- ? {1}".format(value, isValidPath ))
def CheckDotnetAccess():
print( "Dot Net: importing clr" )
import clr
path = pathlib.Path(os.environ['DotNetLib'])
sys.path.append(str(path))
clangPath = path / "Clang.dll"
if not os.path.exists( clangPath ):
print ( "Dot Net: file {0} not available".format(clangPath))
return
try:
print("Dot Net: adding reference to Clang.dll")
clr.AddReference("Clang")
import Clang
except:
print ("Dot Net: failed to add reference to Clang.dll" )
return
print ("Dot Net: Library Clang.dll successfully loaded")
if __name__ == "__main__":
print ("show environment:")
print ("*****************")
print ("current python version = {0}".format(sys.version))
ShowAndCheckVariable('ProjectRoot', True)
ShowAndCheckVariable('ToolsRoot', True)
ShowAndCheckVariable('AvrGcc', True)
ShowAndCheckVariable('AvrDude', True)
ShowAndCheckVariable('DotNetLib', True)
print ("current path = {0}".format(str(p)))
CheckDotnetAccess() | 33.125 | 68 | 0.625786 | 180 | 1,590 | 5.461111 | 0.383333 | 0.036623 | 0.055951 | 0.02238 | 0.034588 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005757 | 0.23522 | 1,590 | 48 | 69 | 33.125 | 0.802632 | 0.015723 | 0 | 0.05 | 0 | 0 | 0.261342 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.1 | 0 | 0.2 | 0.3 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f305b2fe01435086400eca61e3b22176b9001099 | 5,637 | py | Python | examples/rl/lift.py | haosulab/SAPIEN | 6bc3f4e2be910199b793f185aea5791d9f193e4c | [
"MIT"
] | 21 | 2021-10-13T11:56:45.000Z | 2022-03-30T16:09:21.000Z | examples/rl/lift.py | haosulab/SAPIEN | 6bc3f4e2be910199b793f185aea5791d9f193e4c | [
"MIT"
] | 25 | 2021-10-20T20:14:37.000Z | 2022-03-30T05:55:15.000Z | examples/rl/lift.py | haosulab/SAPIEN | 6bc3f4e2be910199b793f185aea5791d9f193e4c | [
"MIT"
] | 5 | 2021-10-31T17:43:52.000Z | 2022-03-01T09:45:53.000Z | """Lift environment."""
import numpy as np
from gym import spaces
import sapien.core as sapien
from sapien.core import Pose
from sapien.utils.viewer import Viewer
from sapien_env import SapienEnv
class LiftEnv(SapienEnv):
def __init__(self):
self.init_qpos = [0, 0.19634954084936207, 0.0, -2.617993877991494,
0.0, 2.941592653589793, 0.7853981633974483, 0, 0]
self.table_height = 0.8
super().__init__(control_freq=20, timestep=0.01)
self.robot = self.get_articulation('panda')
self.end_effector = self.robot.get_links()[8]
self.dof = self.robot.dof
assert self.dof == 9, 'Panda should have 9 DoF'
self.active_joints = self.robot.get_active_joints()
self.cube = self.get_actor('cube')
# The arm is controlled by the internal velocity drive
for joint in self.active_joints[:5]:
joint.set_drive_property(stiffness=0, damping=4.8)
for joint in self.active_joints[5:7]:
joint.set_drive_property(stiffness=0, damping=0.72)
# The gripper will be controlled directly by the torque
self.observation_space = spaces.Box(
low=-np.inf, high=np.inf, shape=[self.dof * 2 + 13], dtype=np.float32)
self.action_space = spaces.Box(
low=-1.0, high=1.0, shape=[self.dof], dtype=np.float32)
# ---------------------------------------------------------------------------- #
# Simulation world
# ---------------------------------------------------------------------------- #
def _build_world(self):
physical_material = self._scene.create_physical_material(1.0, 1.0, 0.0)
self._scene.default_physical_material = physical_material
self._scene.add_ground(0.0)
# table top
builder = self._scene.create_actor_builder()
builder.add_box_collision(half_size=[0.4, 0.4, 0.025])
builder.add_box_visual(half_size=[0.4, 0.4, 0.025])
table = builder.build_kinematic(name='table')
table.set_pose(Pose([0, 0, self.table_height - 0.025]))
# cube
builder = self._scene.create_actor_builder()
builder.add_box_collision(half_size=[0.02, 0.02, 0.02])
builder.add_box_visual(half_size=[0.02, 0.02, 0.02], color=[1, 0, 0])
cube = builder.build(name='cube')
cube.set_pose(Pose([0, 0, self.table_height + 0.02]))
# robot
loader = self._scene.create_urdf_loader()
loader.fix_root_link = True
robot = loader.load('../assets/robot/panda/panda.urdf')
robot.set_name('panda')
robot.set_root_pose(Pose([-0.16 - 0.4, 0, self.table_height]))
robot.set_qpos(self.init_qpos)
# ---------------------------------------------------------------------------- #
# RL
# ---------------------------------------------------------------------------- #
def step(self, action):
# Use internal velocity drive
for idx in range(7):
self.active_joints[idx].set_drive_velocity_target(action[idx])
# Control the gripper directly by torque
qf = self.robot.compute_passive_force(True, True, False)
qf[-2:] += action[-2:]
self.robot.set_qf(qf)
for i in range(self.control_freq):
self._scene.step()
obs = self._get_obs()
reward = self._get_reward()
done = self.cube.get_pose().p[2] > self.table_height + 0.04
if done:
reward += 100.0
return obs, reward, done, {}
def reset(self):
self.robot.set_qpos(self.init_qpos)
self.cube.set_pose(Pose(
[np.random.randn() * 0.05, np.random.randn() * 0.05, self.table_height + 0.02]))
self._scene.step()
return self._get_obs()
def _get_obs(self):
qpos = self.robot.get_qpos()
qvel = self.robot.get_qvel()
cube_pose = self.cube.get_pose()
ee_pose = self.end_effector.get_pose()
cube_to_ee = ee_pose.p - cube_pose.p
return np.hstack([qpos, qvel, cube_pose.p, cube_pose.q, cube_to_ee])
def _get_reward(self):
# reaching reward
cube_pose = self.cube.get_pose()
ee_pose = self.end_effector.get_pose()
distance = np.linalg.norm(ee_pose.p - cube_pose.p)
reaching_reward = 1 - np.tanh(10.0 * distance)
# lifting reward
lifting_reward = max(
0, self.cube.pose.p[2] - self.table_height - 0.02) / 0.02
return reaching_reward + lifting_reward
# ---------------------------------------------------------------------------- #
# Visualization
# ---------------------------------------------------------------------------- #
def _setup_lighting(self):
self._scene.set_ambient_light([.4, .4, .4])
self._scene.add_directional_light([1, -1, -1], [0.3, 0.3, 0.3])
self._scene.add_directional_light([0, 0, -1], [1, 1, 1])
def _setup_viewer(self):
self._setup_lighting()
self.viewer = Viewer(self._renderer)
self.viewer.set_scene(self._scene)
self.viewer.set_camera_xyz(x=1.5, y=0.0, z=2.0)
self.viewer.set_camera_rpy(y=3.14, p=-0.5, r=0)
def main():
env = LiftEnv()
env.reset()
for episode in range(10):
for step in range(100):
env.render()
action = env.action_space.sample()
obs, reward, done, info = env.step(action)
env.step(action)
if done:
print(f'Done at step {step}')
break
obs = env.reset()
env.close()
if __name__ == '__main__':
main()
| 36.367742 | 92 | 0.55455 | 740 | 5,637 | 4.012162 | 0.245946 | 0.008084 | 0.035365 | 0.032334 | 0.255305 | 0.218255 | 0.185248 | 0.113843 | 0.097676 | 0.078141 | 0 | 0.05502 | 0.245521 | 5,637 | 154 | 93 | 36.603896 | 0.643075 | 0.132163 | 0 | 0.093458 | 0 | 0 | 0.021596 | 0.006582 | 0 | 0 | 0 | 0 | 0.009346 | 1 | 0.084112 | false | 0.009346 | 0.056075 | 0 | 0.186916 | 0.009346 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f306ad1d123b3c7c057a5323cb941264e6d662b6 | 523 | py | Python | python/combinations.py | lukasjoc/scritps | ebcffef0a3977ab8bb1bebf20383c350bd7baa37 | [
"0BSD"
] | null | null | null | python/combinations.py | lukasjoc/scritps | ebcffef0a3977ab8bb1bebf20383c350bd7baa37 | [
"0BSD"
] | null | null | null | python/combinations.py | lukasjoc/scritps | ebcffef0a3977ab8bb1bebf20383c350bd7baa37 | [
"0BSD"
] | null | null | null | #!/usr/bin/env python3
from itertools import combinations_with_replacement, product
def sums(m, with_zeros=False):
if with_zeros:
combinations = product(range(m+1), repeat=m)
else:
combinations = combinations_with_replacement(range(m+1), r=m)
perms = [p for p in list(combinations) if sum(p) == m]
return (perms, len(perms))
if __name__ == "__main__":
cz, cz_len = sums(m=5, with_zeros=True)
print(cz)
print(cz_len)
c, c_len = sums(m=5)
print(c)
print(c_len)
| 20.92 | 69 | 0.648184 | 80 | 523 | 4 | 0.4625 | 0.046875 | 0.16875 | 0.05625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012285 | 0.221797 | 523 | 24 | 70 | 21.791667 | 0.773956 | 0.040153 | 0 | 0 | 0 | 0 | 0.015968 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.2 | 0.266667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f306b5147c3ae005b8c15a55889bd33adf5acd00 | 1,013 | py | Python | dblmiddleman/user.py | TitanEmbeds/dbl-webhook-middleman | d136a025cc195b77e9c7bae1b553889c9ddf9653 | [
"MIT"
] | null | null | null | dblmiddleman/user.py | TitanEmbeds/dbl-webhook-middleman | d136a025cc195b77e9c7bae1b553889c9ddf9653 | [
"MIT"
] | null | null | null | dblmiddleman/user.py | TitanEmbeds/dbl-webhook-middleman | d136a025cc195b77e9c7bae1b553889c9ddf9653 | [
"MIT"
] | null | null | null | class User:
def __init__(self, *, data):
self.id = data["id"]
self.username = data["username"]
self.discriminator = data["discriminator"]
self.avatar = data.get("avatar")
self.defAvatar = data["defAvatar"]
self.bio = data.get("bio")
self.banner = data.get("banner")
self.social = data.get("social", {})
self.color = data.get("color")
self.supporter = data["supporter"]
self.certified_dev = data["certifiedDev"]
self.mod = data["mod"]
self.webMod = data["webMod"]
self.admin = data["admin"]
@property
def default_avatar(self):
return "https://discordapp.com/assets/{0.defAvatar}.png".format(self)
@property
def avatar_url(self):
if not self.avatar:
return self.default_avatar
return "https://cdn.discordapp.com/avatars/{0.id}/{0.avatar}.png".format(self)
@property
def mention(self):
return "<@{0.id}>".format(self) | 33.766667 | 86 | 0.57848 | 118 | 1,013 | 4.898305 | 0.330508 | 0.060554 | 0.044983 | 0.072664 | 0.083045 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005362 | 0.263574 | 1,013 | 30 | 87 | 33.766667 | 0.769437 | 0 | 0 | 0.111111 | 0 | 0 | 0.20217 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.148148 | false | 0 | 0 | 0.074074 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3074ca9d7720dad6fd1aac80ae01760e122d89e | 6,451 | py | Python | cookie_repeater.py | byu-imaal/dns-cookies-pam21 | a0e79a3538f2c32aa3f23d89b96ad1afe046d0f3 | [
"BSD-2-Clause"
] | null | null | null | cookie_repeater.py | byu-imaal/dns-cookies-pam21 | a0e79a3538f2c32aa3f23d89b96ad1afe046d0f3 | [
"BSD-2-Clause"
] | null | null | null | cookie_repeater.py | byu-imaal/dns-cookies-pam21 | a0e79a3538f2c32aa3f23d89b96ad1afe046d0f3 | [
"BSD-2-Clause"
] | null | null | null | """
This script sends a number of queries to record the server cookies returned by a given domain.
There are 3 methods of repetition:
1. none: In each query, do not include a server cookie
2. repeat: In each query, include the first server cookie received from the IP
3. follow: In each query, use the most recent server cookie received from the IP
"""
import argparse
import json
import multiprocessing as mp
import signal
import sys
import time
from functools import partial
from typing import Union, Tuple
import dns.resolver
from dns.edns import GenericOption
from dns.message import make_query
from tqdm import tqdm
COOKIE_OPT = 10
CLIENT_COOKIE = "1e4ddeb526a1da40"
json_keys = ["ip", "domain", "num_sent", "queries"]
query_keys = ["sent", "edns", "scook", "rcode", "isbind", "tsdiff", "tsrecv", "tscook", "err", "method"]
def makedict(default=None, keys=json_keys):
return {key: default for key in keys}
def extract_scook(r: dns.message.Message) -> bytes:
for o in r.options:
if o.otype == COOKIE_OPT:
return o.data[8:]
return bytes()
def is_using_bind(scook: str, current_timestamp: int = None) -> Tuple[Union[None, int], int]:
"""
Returns true if the server cookie is 128 bits and has a timestamp at the 5th-8th bytes.
Bind or bind-like implementations have a timestamp at that location.
Tolerance for the timestamp is 1hr in past and 30 min in future being valid. This seemed like a good range to use.
:param scook: the cookie returned by the server
:param current_timestamp: the timestamp to compare against. If none, gets current time
:return: the cookie timestamp or None if not bind-like. Also the current timestamp
"""
if current_timestamp is None:
current_timestamp = int(time.time())
if len(scook) != 32: # bind cookie is 128 bits = 16 bytes = 32 hex characters
return None, current_timestamp
cookie_timestamp = int(scook[8:16], 16)
if (current_timestamp - 60 * 60) <= cookie_timestamp <= (current_timestamp + 60 * 30):
return cookie_timestamp, current_timestamp
return None, current_timestamp
def ind_query(domain: str, ip: str, method: str, scookie: str) -> dict:
# NOTE: didn't handle a None scookie
d = makedict(keys=query_keys)
try:
cookie_opt = GenericOption(COOKIE_OPT, bytes.fromhex(CLIENT_COOKIE + scookie))
q = make_query(domain, dns.rdatatype.A, use_edns=True, want_dnssec=False, options=[cookie_opt])
d["sent"] = scookie
r: dns.message.Message = dns.query.udp(q, ip, timeout=5)
except Exception as e:
d["err"] = str(e)
else:
d["scook"] = extract_scook(r).hex()
d["tscook"], d["tsrecv"] = is_using_bind(d["scook"])
if d["tscook"] is not None:
d["tsdiff"] = d["tscook"] - d["tsrecv"]
d["isbind"] = d["tscook"] is not None
d["rcode"] = r.rcode()
d["edns"] = r.edns >= 0
d["method"] = method
return d
def query(params):
if params['method'] == "all":
params["method"] = "none"
none_res = query(params)
params["method"] = "repeat"
repeat_res = query(params)
params["method"] = "follow"
follow_res = query(params)
none_res["num_sent"] += repeat_res["num_sent"] + follow_res["num_sent"]
none_res["queries"].extend(repeat_res['queries'])
none_res["queries"].extend(follow_res['queries'])
return none_res
res = makedict()
res["ip"] = params["ip"]
res["domain"] = params["domain"]
res["num_sent"] = params["number"]
res["queries"] = []
qry = partial(ind_query, params["domain"], params["ip"], params["method"])
prev_query = qry("")
for i in range(params["number"]):
time.sleep(params["delay"] if i % 10 != 0 else params["delay"] * params["delay-mult"])
if params["method"] == "none":
res["queries"].append(qry(""))
elif params["method"] == "repeat":
res["queries"].append(qry(prev_query["scook"]))
else:
q = qry(prev_query["scook"])
res["queries"].append(q)
if q["scook"] is not None:
prev_query = q
return res
def main(args):
parser = argparse.ArgumentParser(description="Running a series of dns queries on a list of IPs")
parser.add_argument('input', help="Input file containing a json lines with ip and domain keys")
parser.add_argument('output', help="Output file to write results to")
parser.add_argument('mode', help="How to send server cookie.\n"
"none = never send server cookie\n"
"repeat = always send first server cookie received\n"
"follow = send last server cookie received\n"
"all = do all three above and combine into single result",
choices=["none", "repeat", "follow", "all"])
parser.add_argument('-t', '--num-threads', help="Number of threads to execute queries", default=64, type=int)
parser.add_argument('-n', '--num-queries', help="Number of queries to run on a single IP", default=20, type=int)
parser.add_argument('-d', '--delay', help="Delay in seconds between queries to a single IP", default=1, type=float)
parser.add_argument('--delay-mult', help="Every 10 queries increase delay by this factor", default=60, type=int)
args = parser.parse_args(args)
with open(args.input, 'r') as in_file:
targets = [json.loads(t) for t in in_file.readlines()]
for t in targets:
t["number"] = args.num_queries
t["method"] = args.mode
t["delay"] = args.delay
t["delay-mult"] = args.delay_mult
if "domain" not in t.keys():
t["domain"] = "cookie-repeat.example.com"
threads = min(args.num_threads, len(targets))
with open(args.output, 'w') as output:
with mp.Pool(processes=threads, initializer=lambda: signal.signal(signal.SIGINT, signal.SIG_IGN)) as p:
try:
for result in tqdm(p.imap_unordered(query, targets), total=len(targets), unit="query"):
output.write(json.dumps(result) + "\n")
except KeyboardInterrupt:
p.terminate()
p.join()
print("Exiting early from queries.")
if __name__ == "__main__":
main(sys.argv[1:])
| 39.09697 | 119 | 0.624554 | 886 | 6,451 | 4.459368 | 0.277652 | 0.040496 | 0.030119 | 0.008605 | 0.048595 | 0.023285 | 0 | 0 | 0 | 0 | 0 | 0.012351 | 0.246938 | 6,451 | 164 | 120 | 39.335366 | 0.800947 | 0.144009 | 0 | 0.050847 | 0 | 0 | 0.202264 | 0.004564 | 0 | 0 | 0 | 0 | 0 | 1 | 0.050847 | false | 0 | 0.101695 | 0.008475 | 0.228814 | 0.008475 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f308d0b0952d26b17bddb537dcb2e585bc67f772 | 674 | py | Python | gtm_gear/variable.py | etolk/gtm-gear | fcaf9cf0a3e0e6bb5c86c0ed68353beca020b561 | [
"MIT"
] | 2 | 2021-09-22T08:22:29.000Z | 2021-12-05T13:14:57.000Z | gtm_gear/variable.py | etolk/gtm-gear | fcaf9cf0a3e0e6bb5c86c0ed68353beca020b561 | [
"MIT"
] | null | null | null | gtm_gear/variable.py | etolk/gtm-gear | fcaf9cf0a3e0e6bb5c86c0ed68353beca020b561 | [
"MIT"
] | 1 | 2021-11-22T16:45:20.000Z | 2021-11-22T16:45:20.000Z | import sys
import logging
logger = logging.getLogger(__name__)
from .entity import Entity
class Variable(Entity):
def __init__(self, data, parent):
Entity.__init__(self, data, parent)
self.entity_type ='variables'
self.id_name = "variableId"
self.depended_checks = {
'tags':['dependent_variables'],
'triggers':['dependent_variables'],
'variables':['dependent_variables'],
}
@staticmethod
def create_constant(name):
return {
'name': f"{name}",
'type': 'c',
'parameter': [{'type': 'template', 'key': 'value', 'value': f"{name}"}],
} | 26.96 | 84 | 0.565282 | 64 | 674 | 5.65625 | 0.53125 | 0.149171 | 0.066298 | 0.099448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.284866 | 674 | 25 | 85 | 26.96 | 0.751037 | 0 | 0 | 0 | 0 | 0 | 0.225185 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.142857 | 0.047619 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30966c7bece0bf69ff58ade10f7da0ea683aa1e | 9,687 | py | Python | test_mots.py | JAMJU/MalDim | 01b02fe4f56161c9c09d0d5ac03e26342a586a50 | [
"MIT"
] | null | null | null | test_mots.py | JAMJU/MalDim | 01b02fe4f56161c9c09d0d5ac03e26342a586a50 | [
"MIT"
] | null | null | null | test_mots.py | JAMJU/MalDim | 01b02fe4f56161c9c09d0d5ac03e26342a586a50 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import defaultdict
import numpy as np
def search_list_word(namefile, list_world):
phrases = list()
with open(namefile, 'r') as f:
nb_line = 0
for line in f:
line = line.replace('\n', '')
line = line.split(';')
if nb_line != 0:
statement = line[1].split(" ")
tr = True
for world in list_world:
if world in statement:
tr = True and tr
else:
tr = False and tr
if tr:
phrases.append(line[0])
nb_line += 1
return phrases
#mots = ["midi", "matin", "soir"]
#mots = ["##emlapatch##"]
#mots = ["problème"]
# mots = ["problème", "##emlapatch##"]
# resultats = search_list_word("input_train_token_normalized.csv", mots)
# print(resultats)
def count_class(namefile, res):
dico = defaultdict(int)
with open(namefile, 'r') as f:
nb_line = 0
for line in f:
line = line.replace('\n', '')
line = line.split(';')
if line[0] in res:
dico[line[1]] += 1
return dico
# solution_file = "challenge_output_data_training_file_predict_the_expected_answer.csv"
# dict_sol = count_class(solution_file, resultats)
# print(dict_sol)
# mots_donnée_base = [
# ["liste 1", "list 2", "liste II", "liste I"],
# ["frigo", "frigidaire"],
# ["generique", "générique", "DCI"],
# ["secu", "sécurité", "sécu", "securite"],
# ["tarif", "prix", "coût", "couter", "coute", "coûte", "coût"],
# ["dose", "doser", "doses"],
# ["rupture", "stock"],
# ["dosage", "mesure"],
# ["bilan"],
# ["sanguin", "sang", "sanguine", "sanguins", "sanguines"],
# ["prise"],
# ["patch", "evra", "elma"],
# ["Comment", "comment"],
# ["Ou", "ou", "où", "Où"],
# ["Combien", "combien"],
# ["Qu'est", "qu'est"],
# ["prescription", "prescris", "prescrit", "prescrire", "préscrit"],
# ["quoi", "Quoi"],
# ["périmé","perime", "permimés", "perimes"]
# ]
mots_donnée_norm = [
["substituer", "substitution", "substituable"],
["générique", "generique"],
["secable", "insécable", "coupable"],
["couper"],
["gélule", "##arkogelules##"],
["danger", "dangereux"],
["risque", "risquer"],
["nocif"],
["rembourser", "remboursement", "remboursable"],
["sécu"],
["charge"],
["tarif", "prix"],
["coût", "coûter"],
["cher"],
["##adcirca##"],
["grocesse", "enceinte"],
["nourisson", "bébé"],
["dosage", "dose", "doser"],
["posologie"],
["par"],
["jour", "semaine", "mois", "année"],
["métabolisation", "métabolisme"],
["élimination", "éliminer"],
["temps"],
["naturel"],
["origine"],
["moment"],
["soir"],
["matin"],
["midi"],
["heure"],
["prendre"],
["mélanger", "mélange"],
["diluer", "dilution"],
["comment"],
["quand"],
["combien"],
["alcool"],
["soleil"],
["cannabis"],
["cigarette"],
["compatible"],
["quoi"],
["ou"],
["où"],
["traitement"],
["traiter"],
["pourquoi"],
["quel"],
["forme"],
["suppositoire"],
["sirop"],
["comprimer"],
["exister"],
["acheter"],
["alternatif"],
["secondaire"],
["durée"],
["pendant"],
["depuis"],
["effet"],
["trouver"],
["disponible"],
["pharmacie"],
["marché"],
["sevrage", "sevrer"],
["arrêt", "arrêter"],
["marque"],
["péremption", "périmer"],
["vaccin"],
["frigo", "frigidaire", "réfrigérateur"],
["température"],
["réchauffer", "chaud"],
["ouvrir", "ouverture"],
["conserver", "conservation"],
["##emlapatch##", "##evra##"],
["plaquette"],
["pilule"],
["oublier", "oubli", "oublie"],
["passage", "passer"],
["continuer"],
["changer", "changement"],
["prescrire", "prescription"],
["ordonnance"],
["sans"],
["lister", "liste"],
["flacon"],
["contenir"],
["composition", "composer"],
["fabriquant"],
["rupture"],
["stock"],
["manque", "manquer"],
["bilan"],
["sang", "sanguin"],
["prise"],
["remplacer", "remplacement"]
]
def add_list_word(namefile_origin, namefile_res, list_dim):
vects = defaultdict(list)
with open(namefile_origin, 'r') as f:
nb_line = 0
for line in f:
line = line.replace('\n', '')
line = line.split(';')
if nb_line != 0:
statement = line[1].split(" ")
for list_word in list_dim:
tr = False
for word in list_word:
if word in statement:
tr = True
if tr:
vects[nb_line].append(1)
else:
vects[nb_line].append(0)
nb_line += 1
with open(namefile_res, 'r') as f_res:
split_res_file = namefile_res.split(".")
rewrite = split_res_file[0] + "_modified." + split_res_file[1]
with open(rewrite, 'w') as f_mod:
nb = 1
for line in f_res:
new_line = line.replace('\n', '')
for val in vects[nb]:
new_line = new_line + "," + str(val)
new_line = new_line + "\n"
f_mod.write(new_line)
nb += 1
# add_list_word("input_train_norm_medoc_corrected_v2.csv", "vector_input_fasttext_and_other_v2.csv", mots_donnée_norm)
def add_size_phrase(namefile_origin, namefile_res):
vects = defaultdict(int)
with open(namefile_origin, 'r') as f:
nb_line = 0
for line in f:
line = line.replace('\n', '')
line = line.split(';')
if nb_line != 0:
statement = line[1].split(" ")
vects[nb_line] = len(statement)
nb_line += 1
with open(namefile_res, 'r') as f_res:
split_res_file = namefile_res.split(".")
rewrite = split_res_file[0] + "_nb." + split_res_file[1]
with open(rewrite, 'w') as f_mod:
nb = 1
for line in f_res:
new_line = line.replace('\n', '')
if vects[nb] < 5:
new_line = new_line + "," + "1,0,0,0,0,0,0"
new_line = new_line + "\n"
f_mod.write(new_line)
elif vects[nb] < 10:
new_line = new_line + "," + "0,1,0,0,0,0,0"
new_line = new_line + "\n"
f_mod.write(new_line)
elif vects[nb] < 15:
new_line = new_line + "," + "0,0,1,0,0,0,0"
new_line = new_line + "\n"
f_mod.write(new_line)
elif vects[nb] < 20:
new_line = new_line + "," + "0,0,0,1,0,0,0"
new_line = new_line + "\n"
f_mod.write(new_line)
elif vects[nb] < 30:
new_line = new_line + "," + "0,0,0,0,1,0,0"
new_line = new_line + "\n"
f_mod.write(new_line)
elif vects[nb] < 50:
new_line = new_line + "," + "0,0,0,0,0,1,0"
new_line = new_line + "\n"
f_mod.write(new_line)
else:
new_line = new_line + "," + "0,0,0,0,0,0,1"
new_line = new_line + "\n"
f_mod.write(new_line)
nb += 1
#add_size_phrase("input_test_norm_medoc_corrected_v2.csv", "vector_input_test_fasttext_and_other_v2_modified.csv")
def get_medoc_used(namefile_med, namefile_data):
medocs = defaultdict(int)
name_medocs = list()
with open(namefile_med, 'r') as f:
for line in f:
line = line.replace('\n', '')
name = "##" + str(line) + "##"
medocs[name] = 0
name_medocs.append(name)
with open(namefile_data, 'r') as f:
nb_line = 0
for line in f:
line = line.replace('\n', '')
line = line.split(';')
if nb_line != 0:
statement = line[1].split(" ")
for med_name in name_medocs:
if med_name in line[1]:
medocs[med_name] += 1
nb_line += 1
s_nb = 0
final_list = list()
for med_name in name_medocs:
if medocs[med_name] > 10:
s_nb += 1
final_list.append(med_name)
print(s_nb)
print(final_list)
return final_list
selected_list = get_medoc_used("train_v2_list_medoc.csv", "input_train_norm_medoc_corrected_v2.csv")
def add_list_medocs(namefile_origin, namefile_res, list_medocs):
vects = defaultdict(list)
with open(namefile_origin, 'r') as f:
nb_line = 0
for line in f:
line = line.replace('\n', '')
line = line.split(';')
if nb_line != 0:
statement = line[1].split(" ")
for med in list_medocs:
tr = False
if med in statement:
tr = True
if tr:
vects[nb_line].append(1)
else:
vects[nb_line].append(0)
nb_line += 1
with open(namefile_res, 'r') as f_res:
split_res_file = namefile_res.split(".")
rewrite = split_res_file[0] + "_meds." + split_res_file[1]
with open(rewrite, 'w') as f_mod:
nb = 1
for line in f_res:
new_line = line.replace('\n', '')
for val in vects[nb]:
new_line = new_line + "," + str(val)
new_line = new_line + "\n"
f_mod.write(new_line)
nb += 1
# add_list_medocs("input_test_norm_medoc_corrected_v2.csv", "vector_input_test_fasttext_and_other_v2_modified_nb.csv", selected_list)
| 29.898148 | 133 | 0.505729 | 1,120 | 9,687 | 4.159821 | 0.245536 | 0.072118 | 0.012878 | 0.054089 | 0.452887 | 0.431423 | 0.425842 | 0.396652 | 0.387208 | 0.379051 | 0 | 0.017989 | 0.317126 | 9,687 | 323 | 134 | 29.990712 | 0.686319 | 0.142872 | 0 | 0.406716 | 0 | 0 | 0.154116 | 0.007506 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022388 | false | 0.003731 | 0.007463 | 0 | 0.041045 | 0.007463 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30a9aaecc5773b093760973d5a440714c8d98d2 | 1,044 | py | Python | ezsgame/extra/_sintax_tokens.py | NoxxDev/ezgame | abe7366ceef88b27ac2fbff0aeef4ea6d6cade14 | [
"MIT"
] | 2 | 2021-12-29T21:31:46.000Z | 2021-12-29T21:31:48.000Z | ezsgame/extra/_sintax_tokens.py | NoxxDev/ezgame | abe7366ceef88b27ac2fbff0aeef4ea6d6cade14 | [
"MIT"
] | null | null | null | ezsgame/extra/_sintax_tokens.py | NoxxDev/ezgame | abe7366ceef88b27ac2fbff0aeef4ea6d6cade14 | [
"MIT"
] | null | null | null |
# ----------------------------------------------------------------------------
# TOKENS FOR STRUCTURED OBJECTS MODULE
# ----------------------------------------------------------------------------
# ALL TOKENS MUST BE UNIQUE OR IT CAN CAUSE ERRORS, RUN THIS FILE TO SEE IF ANY TOKEN IS DUPLICATE
# DO NOT REMOVE ANY TOKEN
STYLES_CLASS_TOKEN = "::" # used to denife a set of styles that will be applied to the object with same class
# Example: ::items : {...}
FUNCTION_TOKEN = "on:" # used to define a function
# Example: on:click : {...}
# check if any duplicate token
if __name__ == "__main__":
tokens = {}
_globals = {**globals()}
for k,v in _globals.items():
if k.startswith("__"):continue
if not k or not v:continue
if v in tokens.values():
key = [k for k,v in tokens.items() if v == v][0]
raise SyntaxError(f"Duplicate token: {k} == {key}, Token : {v}")
else:
tokens[k] = v
| 29.828571 | 111 | 0.475096 | 121 | 1,044 | 3.975207 | 0.520661 | 0.012474 | 0.02079 | 0.029106 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00134 | 0.285441 | 1,044 | 35 | 112 | 29.828571 | 0.643432 | 0.482759 | 0 | 0 | 0 | 0 | 0.117409 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30c5bdedbc04985d2afed4f50b5fad8a60328c2 | 5,655 | py | Python | libs/tools.py | B0Qi/hualubei2020-callingsmoking | 73d1049d95554b5d669afa93132a0fce37461ff4 | [
"MIT"
] | 27 | 2021-04-12T07:19:17.000Z | 2022-03-28T06:25:44.000Z | libs/tools.py | B0Qi/hualubei2020-callingsmoking | 73d1049d95554b5d669afa93132a0fce37461ff4 | [
"MIT"
] | 1 | 2021-04-21T05:33:17.000Z | 2021-12-22T03:41:21.000Z | libs/tools.py | B0Qi/hualubei2020-callingsmoking | 73d1049d95554b5d669afa93132a0fce37461ff4 | [
"MIT"
] | 7 | 2021-04-12T10:56:27.000Z | 2021-08-24T07:24:16.000Z | import os
import random
import numpy as np
import torch
import torch.nn as nn
def getAllName(file_dir, tail_list = ['.png','.jpg','.JPG','.PNG']):
L=[]
for root, dirs, files in os.walk(file_dir):
for file in files:
if os.path.splitext(file)[1] in tail_list:
L.append(os.path.join(root, file))
return L
def npSoftmax(x):
x_row_max = x.max(axis=-1)
x_row_max = x_row_max.reshape(list(x.shape)[:-1]+[1])
x = x - x_row_max
x_exp = np.exp(x)
x_exp_row_sum = x_exp.sum(axis=-1).reshape(list(x.shape)[:-1]+[1])
softmax = x_exp / x_exp_row_sum
return softmax
def seed_reproducer(seed=42):
"""Reproducer for pytorch experiment.
Parameters
----------
seed: int, optional (default = 2019)
Radnom seed.
Example
-------
seed_reproducer(seed=2019).
"""
random.seed(seed)
os.environ["PYTHONHASHSEED"] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.enabled = True
def res2itemClassify(res, target):
#print(res)#[[0.6018679 0.2981321, 0.1]]
cate_name = {0:"calling", 1:"normal", 2:"smoking", 3:"smoking_calling"}
scores = res[0]
category = cate_name[np.argmax(scores)]
score = round(float(np.max(scores)), 5)
item = {"image_name": img_name,
"category": category,
"score": score}
return item
def res2itemClassifyTest(res, img_name):
#print(res)#[[0.6018679 0.3981321]]
cate_name = {0:"calling", 1:"normal", 2:"smoking", 3:"smoking_calling"}
# cate_name = {0:"calling", 1:"smoking"}
scores = res[0]
category = cate_name[np.argmax(scores)]
score = round(float(np.max(scores)), 5)
item = {"image_name": img_name,
"category": category,
"score": score}
return item
def res2item(res, img_name):
#print(res)#[[0.6018679 0.3981321]]
#cate_name = {0:"calling", 1:"normal", 2:"smoking"}
cate_name = {0:"calling", 1:"smoking"}
scores = res[0]
#print(scores)
if np.max(scores) > 0.5:
score = round(float(np.max(scores)), 5)
category = cate_name[np.argmax(scores)]
else:
#score = round(float((1.0*2 - sum(scores))/2.0), 5)
score = round(float(1.0 - max(scores)), 5)
category = "normal"
# category = cate_name[np.argmax(scores)]
# score = round(float(np.max(scores)), 5)
item = {"image_name": img_name,
"category": category,
"score": score}
return item
def clip_gradient(optimizer, grad_clip=1):
"""
Clips gradients computed during backpropagation to avoid explosion of gradients.
:param optimizer: optimizer with the gradients to be clipped
:param grad_clip: clip value
"""
for group in optimizer.param_groups:
for param in group["params"]:
if param.grad is not None:
param.grad.data.clamp_(-grad_clip, grad_clip)
class LabelSmoothLoss(nn.Module):
def __init__(self, smoothing=0.1):
super(LabelSmoothLoss, self).__init__()
self.smoothing = smoothing
def forward(self, input, target):
log_prob = F.log_softmax(input, dim=-1)
weight = input.new_ones(input.size()) * \
self.smoothing / (input.size(-1) - 1.)
weight.scatter_(-1, target.unsqueeze(-1), (1. - self.smoothing))
loss = (-weight * log_prob).sum(dim=-1).mean()
return loss
def transferMutilToClass(datalist):
# n*2 [call,smoke] -> n*3
new_list = []
for data in datalist:
# if max(scores) > 0.5:
# score = max(scores)
# if np.argmax(scores)==0:
# [data[0], min(1-data[0],1-data[1]), data[1]]
# else:
# #score = round(float((1.0*2 - sum(scores))/2.0), 5)
# score = round(float(1.0 - max(scores)), 5)
# category = "normal"
#print(data)
new_data = [data[0]+1-data[1],
1-data[0]+1-data[1],
data[1]+1-data[0],
data[0]+data[1]] #call normal smoke sc
# new_data = np.array(new_data)
# new_data /= sum(new_data)
# print(new_data)
new_data = npSoftmax(np.array(new_data)).tolist()
# print(new_data)
# b
new_list.append(new_data)
return new_list
def transferMutilLabel(datalist):
# n*2 [call,smoke] -> n*1
new_list = []
for data in datalist:
if data[0]<0.5:
if data[1]<0.5:
new_data = 1
else:
new_data = 2
else:
if data[1]<0.5:
new_data = 0
else:
new_data = 3
new_list.append(new_data)
return new_list
class CrossEntropyLossOneHot(nn.Module):
def __init__(self):
super(CrossEntropyLossOneHot, self).__init__()
self.log_softmax = nn.LogSoftmax(dim=-1)
def forward(self, preds, labels):
return torch.mean(torch.sum(-labels * self.log_softmax(preds), -1))
if __name__ == '__main__':
# x = torch.FloatTensor([1.0399,0.1582])
# print(nn.Sigmoid()(x))
# x = torch.FloatTensor([0,1])
# print(nn.Sigmoid()(x))
x = [[0.0030388175509870052, 0.002976007293909788],
[1.3112669876136351e-05, 0.9992826581001282],
[0.5, 0.9992826581001282]]
print(transferMutilToClass(x)) | 29.763158 | 84 | 0.574889 | 749 | 5,655 | 4.198932 | 0.222964 | 0.033386 | 0.038156 | 0.025437 | 0.392687 | 0.349126 | 0.31097 | 0.275676 | 0.243879 | 0.225437 | 0 | 0.06543 | 0.275685 | 5,655 | 190 | 85 | 29.763158 | 0.702393 | 0.213616 | 0 | 0.324561 | 0 | 0 | 0.046635 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.114035 | false | 0 | 0.04386 | 0.008772 | 0.254386 | 0.008772 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30e933df3d055865f039ca5ebbf80597d1caa7c | 21,784 | py | Python | tests/test_db.py | Kostiantyn-Salnykov/fastapi-mongodb | e8d0edf59632912fe8854df951d81d63eaf3478d | [
"MIT"
] | 7 | 2021-12-25T11:18:59.000Z | 2022-03-29T14:25:19.000Z | tests/test_db.py | Kostiantyn-Salnykov/fastapi-mongodb | e8d0edf59632912fe8854df951d81d63eaf3478d | [
"MIT"
] | 4 | 2021-08-31T22:59:25.000Z | 2021-09-27T06:26:29.000Z | tests/test_db.py | Kostiantyn-Salnykov/fastapi-mongodb | e8d0edf59632912fe8854df951d81d63eaf3478d | [
"MIT"
] | 3 | 2021-09-26T10:40:43.000Z | 2022-02-16T13:57:57.000Z | import datetime
import decimal
import random
import typing
import unittest.mock
import bson
import motor.motor_asyncio
import pymongo.errors
import pytest
import fastapi_mongodb.db
import fastapi_mongodb.helpers
import fastapi_mongodb.logging
pytestmark = [pytest.mark.asyncio]
class TestBaseDocument:
def setup_method(self) -> None:
self.base_document = fastapi_mongodb.db.BaseDocument()
def test__eq__(self, faker):
data_1, data_2 = faker.pydict(), faker.pydict()
fake_type = random.choice(
[faker.pystr, faker.pybool, faker.pyfloat, faker.pyint, faker.pylist, faker.pyset, faker.pydecimal]
)()
document_1 = fastapi_mongodb.db.BaseDocument(data=data_1)
document_2 = fastapi_mongodb.db.BaseDocument(data=data_2)
assert data_1 == document_1
assert data_2 == document_2
assert data_1 != document_2
assert data_2 != document_1
assert document_1 != document_2
assert fake_type != document_1
def test_properties(self):
assert self.base_document.oid is None
assert self.base_document.id is None
assert self.base_document.data == {}
oid = bson.ObjectId()
self.base_document["_id"] = oid
assert self.base_document.oid == oid
assert self.base_document.id == str(oid)
assert self.base_document.data == {"_id": oid}
assert self.base_document.generated_at == oid.generation_time.astimezone(
tz=fastapi_mongodb.helpers.get_utc_timezone()
)
def test_getitem_setitem_delitem(self, faker):
# test getitem
with pytest.raises(KeyError) as exception_context:
_ = self.base_document["test"]
assert str(exception_context.value) == str(KeyError("test"))
assert self.base_document.get("test", None) is None
fake_test = faker.pystr()
# test setitem
self.base_document["test"] = fake_test
assert self.base_document["test"] == fake_test
# test delitem
del self.base_document["test"]
assert self.base_document.get("test", None) is None
class TestDecimalCode:
def setup_method(self) -> None:
self.codec = fastapi_mongodb.db.DecimalCodec()
def test_transform(self, faker):
value = faker.pydecimal()
bson_value = self.codec.transform_python(value=value)
python_value = self.codec.transform_bson(value=bson_value)
assert isinstance(bson_value, bson.Decimal128)
assert isinstance(python_value, decimal.Decimal)
assert value == python_value
class TestTimeDeltaCodec:
def setup_method(self) -> None:
self.codec = fastapi_mongodb.db.TimeDeltaCodec()
def test_transform(self, faker):
value = faker.time_delta(end_datetime=faker.future_datetime())
bson_value = self.codec.transform_python(value=value)
python_value = self.codec.transform_bson(value=bson_value)
assert isinstance(bson_value, str)
assert isinstance(python_value, datetime.timedelta)
assert value == python_value
def test_transform_0_micros(self):
weeks, days, hours, minutes, seconds = 1, 2, 3, 4, 5
bson_value = f"P{weeks}W{days}DT{hours}H{minutes}M{seconds}S"
expected_time_delta = datetime.timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds)
python_value = self.codec.transform_bson(value=bson_value)
assert expected_time_delta == python_value
@pytest.fixture()
def patch_logging(patcher):
yield patcher.patch_attr(target=fastapi_mongodb.logging.simple_logger, attribute="debug")
@pytest.fixture()
def event():
return unittest.mock.MagicMock()
class TestCommandLogger:
@classmethod
def setup_class(cls) -> None:
cls.logger = fastapi_mongodb.db.CommandLogger()
def test_started(self, patch_logging, event):
self.logger.started(event=event)
patch_logging.assert_called_once_with(
f"Command '{event.command_name}' with request id {event.request_id} started on server "
f"{event.connection_id}"
)
def test_succeeded(self, patch_logging, event):
self.logger.succeeded(event=event)
patch_logging.assert_called_once_with(
f"Command '{event.command_name}' with request id {event.request_id} on server "
f"{event.connection_id} succeeded in {event.duration_micros} microseconds"
)
def test_failed(self, patch_logging, event):
self.logger.failed(event=event)
patch_logging.assert_called_once_with(
f"Command {event.command_name} with request id {event.request_id} on server "
f"{event.connection_id} failed in {event.duration_micros} microseconds"
)
class TestConnectionPoolLogger:
@classmethod
def setup_class(cls) -> None:
cls.logger = fastapi_mongodb.db.ConnectionPoolLogger()
def test_pool_created(self, patch_logging, event):
self.logger.pool_created(event=event)
patch_logging.assert_called_once_with(f"[pool {event.address}] pool created")
def test_pool_cleared(self, patch_logging, event):
self.logger.pool_cleared(event=event)
patch_logging.assert_called_once_with(f"[pool {event.address}] pool cleared")
def test_pool_closed(self, patch_logging, event):
self.logger.pool_closed(event=event)
patch_logging.assert_called_once_with(f"[pool {event.address}] pool closed")
def test_connection_created(self, patch_logging, event):
self.logger.connection_created(event=event)
patch_logging.assert_called_once_with(f"[pool {event.address}][conn #{event.connection_id}] connection created")
def test_connection_ready(self, patch_logging, event):
self.logger.connection_ready(event=event)
patch_logging.assert_called_once_with(
f"[pool {event.address}][conn #{event.connection_id}] connection setup succeeded"
)
def test_connection_closed(self, patch_logging, event):
self.logger.connection_closed(event=event)
patch_logging.assert_called_once_with(
f"[pool {event.address}][conn #{event.connection_id}] connection closed, reason: {event.reason}"
)
def test_connection_check_out_started(self, patch_logging, event):
self.logger.connection_check_out_started(event=event)
patch_logging.assert_called_once_with(f"[pool {event.address}] connection check out started")
def test_connection_check_out_failed(self, patch_logging, event):
self.logger.connection_check_out_failed(event=event)
patch_logging.assert_called_once_with(
f"[pool {event.address}] connection check out failed, reason: {event.reason}"
)
def test_connection_checked_out(self, patch_logging, event):
self.logger.connection_checked_out(event=event)
patch_logging.assert_called_once_with(
f"[pool {event.address}][conn #{event.connection_id}] connection checked out of pool"
)
def test_connection_checked_in(self, patch_logging, event):
self.logger.connection_checked_in(event=event)
patch_logging.assert_called_once_with(
f"[pool {event.address}][conn #{event.connection_id}] connection checked into pool"
)
class TestServerLogger:
@classmethod
def setup_class(cls) -> None:
cls.logger = fastapi_mongodb.db.ServerLogger()
def test_opened(self, patch_logging, event):
self.logger.opened(event=event)
patch_logging.assert_called_once_with(f"Server {event.server_address} added to topology {event.topology_id}")
def test_description_changed_called(self, patch_logging, event):
new_mock = unittest.mock.MagicMock()
event.new_description.server_type = new_mock
self.logger.description_changed(event=event)
patch_logging.assert_called_once_with(
f"Server {event.server_address} changed type from {event.previous_description.server_type_name} to "
f"{event.new_description.server_type_name}"
)
def test_description_changed_not_called(self, patch_logging, event):
new_mock = unittest.mock.MagicMock()
event.previous_description.server_type = new_mock
event.new_description.server_type = new_mock
self.logger.description_changed(event=event)
patch_logging.assert_not_called()
def test_closed(self, patch_logging, event):
self.logger.closed(event=event)
patch_logging.assert_called_once_with(
f"Server {event.server_address} removed from topology {event.topology_id}"
)
class TestHeartbeatLogger:
@classmethod
def setup_class(cls) -> None:
cls.logger = fastapi_mongodb.db.HeartbeatLogger()
def test_started(self, patch_logging, event):
self.logger.started(event=event)
patch_logging.assert_called_once_with(f"Heartbeat sent to server {event.connection_id}")
def test_succeeded(self, patch_logging, event):
self.logger.succeeded(event=event)
patch_logging.assert_called_once_with(
f"Heartbeat to server {event.connection_id} succeeded with reply {event.reply.document}"
)
def test_failed(self, patch_logging, event):
self.logger.failed(event=event)
patch_logging.assert_called_once_with(
f"Heartbeat to server {event.connection_id} failed with error {event.reply}"
)
class TestTopologyLogger:
@classmethod
def setup_class(cls) -> None:
cls.logger = fastapi_mongodb.db.TopologyLogger()
def test_opened(self, patch_logging, event):
self.logger.opened(event=event)
patch_logging.assert_called_once_with(f"Topology with id {event.topology_id} opened")
def test_description_changed(self, patch_logging, event):
event.new_description.has_writable_server.return_value = False
event.new_description.has_readable_server.return_value = False
self.logger.description_changed(event=event)
patch_logging.assert_has_calls(
calls=[
unittest.mock.call(f"Topology description updated for topology id {event.topology_id}"),
unittest.mock.call(
f"Topology {event.topology_id} changed type from {event.previous_description.topology_type_name} "
f"to {event.new_description.topology_type_name}"
),
unittest.mock.call("No writable servers available."),
unittest.mock.call("No readable servers available."),
]
)
def test_description_changed_not_changed(self, patch_logging, event):
mock_topology_type = unittest.mock.MagicMock()
event.previous_description.topology_type = mock_topology_type
event.new_description.topology_type = mock_topology_type
self.logger.description_changed(event=event)
patch_logging.assert_called_once_with(f"Topology description updated for topology id {event.topology_id}")
def test_closed(self, patch_logging, event):
self.logger.closed(event=event)
patch_logging.assert_called_once_with(f"Topology with id {event.topology_id} closed")
class TestDBHandler:
@classmethod
def setup_class(cls) -> None:
cls.test_db = "test_db"
@pytest.fixture()
async def setup_indexes(self, db_manager, faker, mongodb_session):
index_name, col_name = faker.pystr(), faker.pystr()
await db_manager.create_index(
col_name=col_name,
db_name=self.test_db,
name=index_name,
index=[("test", pymongo.ASCENDING)],
session=mongodb_session,
)
indexes_names = await db_manager.list_indexes(
col_name=col_name, db_name=self.test_db, only_names=True, session=mongodb_session
)
assert index_name in indexes_names
return index_name, col_name
def test_create_client(self, db_manager):
result = db_manager.create_client()
assert result is None
def test_delete_client(self, db_manager):
result = db_manager.delete_client()
assert result is None
def test_retrieve_client(self, db_manager):
result = db_manager.retrieve_client()
assert result.__class__ == motor.motor_asyncio.AsyncIOMotorClient
def test_retrieve_database(self, db_manager):
result = db_manager.retrieve_database()
assert result.__class__ == motor.motor_asyncio.AsyncIOMotorDatabase
assert result.name == "test_db"
async def test_get_server_info(self, db_manager, mongodb_session):
result = await db_manager.get_server_info(session=mongodb_session)
assert dict == result.__class__
assert 1.0 == result["ok"]
async def test_list_databases(self, db_manager, mongodb_session):
required_dbs = ["admin", "local"]
result: list[dict[str, typing.Any()]] = await db_manager.list_databases(session=mongodb_session)
result_2: list[str] = await db_manager.list_databases(only_names=True, session=mongodb_session)
assert all(required_db in [db["name"] for db in result] for required_db in required_dbs)
assert all(required_db in result_2 for required_db in required_dbs)
async def test_delete_database(self, db_manager, faker, mongodb_session):
test_db = self.test_db
await db_manager.create_collection(name=faker.pystr(), db_name=faker.pystr())
db_names = await db_manager.list_databases(only_names=True, session=mongodb_session)
assert test_db in db_names
await db_manager.delete_database(name=test_db, session=mongodb_session)
updated_db_names = await db_manager.list_databases(only_names=True, session=mongodb_session)
assert test_db not in updated_db_names
async def test_set_get_profiling_level(self, db_manager, mongodb_session):
default_level = 0 # OFF
new_level = 2 # ALL
assert default_level == await db_manager.get_profiling_level(db_name=self.test_db, session=mongodb_session)
result = await db_manager.set_profiling_level(
db_name=self.test_db, level=new_level, slow_ms=0, session=mongodb_session
)
assert default_level == result["was"]
assert 1.0 == result["ok"]
assert new_level == await db_manager.get_profiling_level(db_name=self.test_db, session=mongodb_session)
async def test_get_profiling_info(self, db_manager, faker, mongodb_session):
col_name = faker.pystr()
level = 2
await db_manager.set_profiling_level(db_name=self.test_db, level=level, session=mongodb_session)
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
result = await db_manager.get_profiling_info(db_name=self.test_db, session=mongodb_session)
assert list == result.__class__
assert "command" == result[0]["op"]
async def test_create_collection(self, db_manager, faker, mongodb_session):
col_name = faker.pystr()
col_names = await db_manager.list_collections(db_name=self.test_db, only_names=True, session=mongodb_session)
assert col_name not in col_names
collection = await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
updated_col_names = await db_manager.list_collections(
db_name=self.test_db, only_names=True, session=mongodb_session
)
assert col_name in updated_col_names
assert motor.motor_asyncio.AsyncIOMotorCollection == collection.__class__
assert col_name == collection.name
assert self.test_db == collection.database.name
async def test_create_collection_not_safe(self, db_manager, faker, mongodb_session):
col_name = faker.pystr()
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
with pytest.raises(pymongo.errors.CollectionInvalid) as exception_context:
await db_manager.create_collection(name=col_name, db_name=self.test_db, safe=False, session=mongodb_session)
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
assert f"collection {col_name} already exists" == str(exception_context.value)
async def test_delete_collection(self, db_manager, faker, mongodb_session):
col_name = faker.pystr()
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
col_names = await db_manager.list_collections(db_name=self.test_db, only_names=True, session=mongodb_session)
assert col_name in col_names
await db_manager.delete_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
updated_col_names = await db_manager.list_collections(
db_name=self.test_db, only_names=True, session=mongodb_session
)
assert col_name not in updated_col_names
async def test_list_collections(self, db_manager, faker, mongodb_session):
col_name, col_type = faker.pystr(), "collection"
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
result: list[dict[str, typing.Any]] = await db_manager.list_collections(
db_name=self.test_db, session=mongodb_session
)
for i, col in enumerate(result):
if col["name"] == col_name:
assert col_type == result[i]["type"]
async def test_list_collections_only_names(self, db_manager, faker, mongodb_session):
col_name = faker.pystr()
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
result = await db_manager.list_collections(db_name=self.test_db, only_names=True, session=mongodb_session)
assert col_name in result
async def test_create_index(self, db_manager, faker, mongodb_session):
index_name, col_name = faker.pystr(), faker.pystr()
indexes_names = await db_manager.list_indexes(
col_name=col_name, db_name=self.test_db, only_names=True, session=mongodb_session
)
assert index_name not in indexes_names
result = await db_manager.create_index(
col_name=col_name,
db_name=self.test_db,
name=index_name,
index=[("test", pymongo.ASCENDING)],
session=mongodb_session,
)
updated_indexes_names = await db_manager.list_indexes(
col_name=col_name, db_name=self.test_db, only_names=True, session=mongodb_session
)
assert index_name in updated_indexes_names
assert index_name == result
async def test_create_indexes(self, db_manager, faker, mongodb_session):
index_name, index_name2, col_name = faker.pystr(), faker.pystr(), faker.pystr()
indexes = [
pymongo.IndexModel(name=index_name, keys=[("test", pymongo.ASCENDING)]),
pymongo.IndexModel(name=index_name2, keys=[("test2", pymongo.DESCENDING)]),
]
result = await db_manager.create_indexes(
col_name=col_name, db_name=self.test_db, indexes=indexes, session=mongodb_session
)
assert [index_name, index_name2] == result
async def test_delete_index(self, db_manager, setup_indexes, mongodb_session):
index_name, col_name = setup_indexes
await db_manager.delete_index(col_name=col_name, db_name=self.test_db, name=index_name, session=mongodb_session)
updated_indexes_names = await db_manager.list_indexes(
col_name=col_name, db_name=self.test_db, only_names=True, session=mongodb_session
)
assert index_name not in updated_indexes_names
async def test_delete_index_not_safe(self, db_manager, faker, mongodb_session):
index_name, col_name = faker.pystr(), faker.pystr()
expected_exception_details = {
"ok": 0.0,
"errmsg": f"index not found with name [{index_name}]",
"code": 27,
"codeName": "IndexNotFound",
}
await db_manager.create_collection(name=col_name, db_name=self.test_db, session=mongodb_session)
with pytest.raises(pymongo.errors.OperationFailure) as exception_context:
await db_manager.delete_index(
col_name=col_name, db_name=self.test_db, name=index_name, safe=False, session=mongodb_session
)
await db_manager.delete_index(col_name=col_name, db_name=self.test_db, name=index_name, session=mongodb_session)
for key, value in expected_exception_details.items():
assert value == exception_context.value.details[key]
async def test_list_indexes_names(self, db_manager, faker, setup_indexes):
"""Same logic as in setup_indexes"""
async def test_list_indexes(self, db_manager, faker, mongodb_session):
index_name, col_name = faker.pystr(), faker.pystr()
index_key, index_order = "test", pymongo.ASCENDING
index_keys = [(index_key, index_order)]
await db_manager.create_index(
col_name=col_name, db_name=self.test_db, name=index_name, index=index_keys, session=mongodb_session
)
result = await db_manager.list_indexes(col_name=col_name, db_name=self.test_db, session=mongodb_session)
created_index_son = result[-1]
assert index_name == created_index_son["name"]
assert index_order == created_index_son["key"][index_key]
assert created_index_son["background"]
assert not created_index_son["sparse"]
| 39.824497 | 120 | 0.701708 | 2,810 | 21,784 | 5.13274 | 0.090391 | 0.04056 | 0.038827 | 0.032032 | 0.712057 | 0.63683 | 0.574499 | 0.531582 | 0.515219 | 0.497816 | 0 | 0.002667 | 0.208318 | 21,784 | 546 | 121 | 39.897436 | 0.833643 | 0.002112 | 0 | 0.293233 | 0 | 0.005013 | 0.102231 | 0.034246 | 0 | 0 | 0 | 0 | 0.215539 | 1 | 0.112782 | false | 0 | 0.030075 | 0.002506 | 0.170426 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f30f64f641bc4e5c63d7d1222ed5789b01fcb6c3 | 1,757 | py | Python | july/game/urls.py | jesstess/julython.org | 1c3044b1cca06cf47ab5a603b72533dd69fb094e | [
"MIT"
] | 1 | 2020-08-11T02:42:45.000Z | 2020-08-11T02:42:45.000Z | july/game/urls.py | jesstess/julython.org | 1c3044b1cca06cf47ab5a603b72533dd69fb094e | [
"MIT"
] | null | null | null | july/game/urls.py | jesstess/julython.org | 1c3044b1cca06cf47ab5a603b72533dd69fb094e | [
"MIT"
] | null | null | null | from django.conf.urls import patterns, url
from july.game import views
urlpatterns = patterns(
'july.game.views',
url(r'^people/$',
views.PlayerList.as_view(),
name='leaderboard'),
url(r'^people/(?P<year>\d{4})/(?P<month>\d{1,2})/((?P<day>\d{1,2})/)?$',
views.PlayerList.as_view(),
name='leaderboard'),
url(r'^teams/$',
views.TeamCollection.as_view(),
name='teams'),
url(r'^teams/(?P<year>\d{4})/(?P<month>\d{1,2})/((?P<day>\d{1,2})/)?$',
views.TeamCollection.as_view(),
name='teams'),
url(r'^teams/(?P<slug>[a-zA-Z0-9\-]+)/$',
views.TeamView.as_view(),
name='team-details'),
url(r'^location/$',
views.LocationCollection.as_view(),
name='locations'),
url(r'^location/(?P<year>\d{4})/(?P<month>\d{1,2})/((?P<day>\d{1,2})/)?$',
views.LocationCollection.as_view(),
name='locations'),
url(r'^location/(?P<slug>[a-zA-Z0-9\-]+)/$',
views.LocationView.as_view(),
name='location-detail'),
url(r'^projects/$',
views.BoardList.as_view(),
name='projects'),
url(r'^projects/(?P<year>\d{4})/(?P<month>\d{1,2})/((?P<day>\d{1,2})/)?$',
views.BoardList.as_view(),
name='projects'),
url(r'^projects/(?P<slug>.+)/$',
views.ProjectView.as_view(),
name='project-details'),
url(r'^languages/$',
views.LanguageBoardList.as_view(),
name='languages'),
url(r'^languages/(?P<year>\d{4})/(?P<month>\d{1,2})/((?P<day>\d{1,2})/)?$',
views.LanguageBoardList.as_view(),
name='languages'),
# for local only debug purposes
url(r'^events/(?P<action>pub|sub|ws)/(?P<channel>.*)$',
'events', name='events'),
)
| 34.45098 | 79 | 0.540125 | 239 | 1,757 | 3.916318 | 0.242678 | 0.059829 | 0.138889 | 0.037393 | 0.632479 | 0.632479 | 0.550214 | 0.518162 | 0.438034 | 0.438034 | 0 | 0.020538 | 0.196357 | 1,757 | 50 | 80 | 35.14 | 0.642351 | 0.016505 | 0 | 0.434783 | 0 | 0.108696 | 0.388181 | 0.269988 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.043478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f310b7a387d88468a25342019df1a36917e0046e | 905 | py | Python | conv_vae/vae_loss.py | gucci-j/utility-load-dataset | f52bdc3cbc96988522aed68d300dce6d8b147136 | [
"MIT"
] | null | null | null | conv_vae/vae_loss.py | gucci-j/utility-load-dataset | f52bdc3cbc96988522aed68d300dce6d8b147136 | [
"MIT"
] | null | null | null | conv_vae/vae_loss.py | gucci-j/utility-load-dataset | f52bdc3cbc96988522aed68d300dce6d8b147136 | [
"MIT"
] | null | null | null | # coding: utf-8
from keras.layers import Layer
from keras import backend as K
from keras import metrics
# loss function layer
class vae_loss(Layer):
def __init__(self, img_size, **kwargs):
self.is_placeholder = True
super(vae_loss, self).__init__(**kwargs)
self.img_size = img_size
def vae_loss(self, x, x_decoded_mean, z_sigma, z_mean):
# クロスエントロピー
reconst_loss = self.img_size[0] * self.img_size[1] * metrics.binary_crossentropy(K.flatten(x), K.flatten(x_decoded_mean))
# 事前分布と事後分布のD_KLの値
kl_loss = - 0.5 * K.sum(1 + K.log(K.square(z_sigma)) - K.square(z_mean) - K.square(z_sigma), axis=-1)
return K.mean(reconst_loss + kl_loss)
def call(self, inputs):
x = inputs[0]
x_decoded_mean = inputs[1]
z_sigma = inputs[2]
z_mean = inputs[3]
loss = self.vae_loss(x, x_decoded_mean, z_sigma, z_mean)
self.add_loss(loss, inputs=inputs)
return x | 33.518519 | 126 | 0.695028 | 152 | 905 | 3.861842 | 0.342105 | 0.059625 | 0.074957 | 0.044293 | 0.081772 | 0.081772 | 0.081772 | 0.081772 | 0 | 0 | 0 | 0.014845 | 0.181215 | 905 | 27 | 127 | 33.518519 | 0.777328 | 0.066298 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15 | false | 0 | 0.15 | 0 | 0.45 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3128c9e3b73dcbe1057e8266a1fe66e26d4af08 | 2,334 | py | Python | whisker_serial_order/extra.py | RudolfCardinal/whisker_serial_order | d22f635219ae5ccd554261a3fe2124e560188a0a | [
"Apache-2.0"
] | null | null | null | whisker_serial_order/extra.py | RudolfCardinal/whisker_serial_order | d22f635219ae5ccd554261a3fe2124e560188a0a | [
"Apache-2.0"
] | null | null | null | whisker_serial_order/extra.py | RudolfCardinal/whisker_serial_order | d22f635219ae5ccd554261a3fe2124e560188a0a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# whisker_serial_order/extra.py
"""
===============================================================================
Copyright © 2016-2018 Rudolf Cardinal (rudolf@pobox.com).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
Additional functions.
"""
import datetime
import logging
from typing import Any, List, Optional, Union
import arrow
TimeType = Union[datetime.datetime, arrow.Arrow]
log = logging.getLogger(__name__)
def latency_s(t1: Optional[TimeType],
t2: Optional[TimeType]) -> Optional[float]:
"""
Calculates the latency in seconds between two datetime-type objects.
Args:
t1: start time
t2: end time
Returns:
time difference in seconds, or ``None`` if either were ``None``
"""
if t1 is None or t2 is None:
return None
delta = t2 - t1
return delta.microseconds / 1000000
def enumerate_to_log(items: List[Any],
description: str = "",
start: int = 1,
linesep: str = "\n",
index_suffix: str = ". ",
loglevel: int = logging.DEBUG) -> None:
r"""
Describes a list to the log.
Args:
items: list of items
start: index to start at (default 1)
description: description
linesep: line separator (default '\n')
index_suffix: index suffix (default '. ')
loglevel: log level
"""
msg = description + linesep + linesep.join(
"{index}{index_suffix}{item}".format(
index=index,
index_suffix=index_suffix,
item=item
)
for index, item in enumerate(items, start=start)
)
log.log(loglevel, msg)
| 28.814815 | 79 | 0.580548 | 272 | 2,334 | 4.933824 | 0.507353 | 0.044709 | 0.019374 | 0.023845 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017049 | 0.271208 | 2,334 | 80 | 80 | 29.175 | 0.771311 | 0.553556 | 0 | 0 | 0 | 0 | 0.032495 | 0.028302 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.142857 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3130307fb4a2cf95561e93c4dd044a8151a405c | 740 | py | Python | tests/test_model.py | aayaffe/or-shifty | d7530c1ceabd92708271207dec38478e8b56b243 | [
"MIT"
] | 5 | 2020-01-15T23:34:22.000Z | 2020-08-28T07:51:19.000Z | tests/test_model.py | aayaffe/or-shifty | d7530c1ceabd92708271207dec38478e8b56b243 | [
"MIT"
] | 5 | 2020-01-10T22:14:59.000Z | 2022-01-21T19:00:28.000Z | tests/test_model.py | aayaffe/or-shifty | d7530c1ceabd92708271207dec38478e8b56b243 | [
"MIT"
] | 2 | 2020-09-01T11:27:29.000Z | 2021-12-16T10:16:17.000Z | from or_shifty.cli import parse_args
from or_shifty.config import Config
from or_shifty.model import solve
def test_solution_when_all_constraints_cannot_be_satisfied():
config_file_path = "tests/test_files/no_solution/config.json"
history_file_path = "tests/test_files/no_solution/history.json"
inputs = parse_args(["--config", config_file_path, "--history", history_file_path])
config = Config.build(
people=inputs.people,
max_shifts_per_person=inputs.max_shifts_per_person,
shifts_by_day=inputs.shifts_by_day,
history=inputs.history,
)
solution = solve(
config=config, objective=inputs.objective, constraints=inputs.constraints,
)
assert len(list(solution)) == 2
| 35.238095 | 87 | 0.744595 | 99 | 740 | 5.222222 | 0.40404 | 0.061896 | 0.069633 | 0.065764 | 0.123791 | 0.123791 | 0.123791 | 0 | 0 | 0 | 0 | 0.001613 | 0.162162 | 740 | 20 | 88 | 37 | 0.832258 | 0 | 0 | 0 | 0 | 0 | 0.132432 | 0.109459 | 0 | 0 | 0 | 0 | 0.058824 | 1 | 0.058824 | false | 0 | 0.176471 | 0 | 0.235294 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3142e91a131324e58f11fe9e3623bb0c474c725 | 7,087 | py | Python | Algorithm-code/Text-Similarity/Input_preprocess.py | cclauss/Knowledge-Graph | 07a1794c20729d5e6bf85b90769266fdc27e3c1e | [
"MIT"
] | 1 | 2019-09-17T00:32:49.000Z | 2019-09-17T00:32:49.000Z | Algorithm-code/Text-Similarity/Input_preprocess.py | jiaolongxue/Knowledge-Graph | 4dcffad1090b902a269f9c85e9004b2014556e94 | [
"MIT"
] | null | null | null | Algorithm-code/Text-Similarity/Input_preprocess.py | jiaolongxue/Knowledge-Graph | 4dcffad1090b902a269f9c85e9004b2014556e94 | [
"MIT"
] | 1 | 2021-02-23T05:51:06.000Z | 2021-02-23T05:51:06.000Z | #coding=utf-8
import numpy as np
import re
import itertools
from collections import Counter
import numpy as np
import time
import gc
from tensorflow.contrib import learn
import gensim
import gzip
from random import random
from preprocess import MyVocabularyProcessor
class InputHelper(object):
pre_emb = dict()
vocab_processor = None
def loadW2V(self, emb_path, type="bin"):
print("Loading W2V data...")
num_keys = 0
if type == "textgz":
# this seems faster than gensim non-binary load
for line in gzip.open(emb_path):
l = line.strip().split()
st = l[0].lower()
self.pre_emb[st] = np.asarray(l[1:])
num_keys = len(self.pre_emb)
if type == "text":
# this seems faster than gensim non-binary load
for line in open(emb_path):
l = line.strip().split()
st = l[0].lower()
self.pre_emb[st] = np.asarray(l[1:])
num_keys = len(self.pre_emb)
else:
self.pre_emb = gensim.models.KeyedVectors.load_word2vec_format(emb_path, binary=True)
self.pre_emb.init_sims(replace=True)
num_keys = len(self.pre_emb.vocab)
print("loaded word2vec len ", num_keys)
gc.collect()
def deletePreEmb(self):
self.pre_emb = dict()
gc.collect()
def getTsvData(self, filepath):
"""
:rtype: object
"""
print("Loading training data from " + filepath)
x1 = []
x2 = []
y = []
# positive samples from file
num_p = 0
num_n = 0
for line in open(filepath):
l = line.strip().split("\t")
if len(l) < 2:
continue
x1.append(l[1])
x2.append(l[2])
y.append(int(l[3]))
if int(l[3]) > 0:
num_p += 1
else:
num_n += 1
print("p:", num_p)
print("n:", num_n)
# 数据存在不平衡现象,进行“过采样”处理
tmp_s1 = []
tmp_s2 = []
tmp_y = []
add_p_num = num_n - num_p
while add_p_num > 0:
for idx, item in enumerate(y):
if item == 1:
tmp_s1.append(x1[idx])
tmp_s2.append(x2[idx])
tmp_y.append(y[idx])
add_p_num -= 1
if add_p_num <= 0:
break
x1 += tmp_s1
x2 += tmp_s2
y += tmp_y
return np.asarray(x1), np.asarray(x2), np.asarray(y)
def getTsvTestData(self, filepath):
print("Loading testing/labelled data from " + filepath)
x1 = []
x2 = []
y = []
# positive samples from file
for line in open(filepath):
l = line.strip().split("\t")
if len(l) < 3:
continue
x1.append(l[1])
x2.append(l[2])
y.append(int(l[0])) # np.array([0,1]))
return np.asarray(x1), np.asarray(x2), np.asarray(y)
def batch_iter(self, data, batch_size, num_epochs, shuffle=True):
"""
Generates a batch iterator for a dataset.
"""
data = np.asarray(data)
print(data)
print(data.shape)
data_size = len(data)
num_batches_per_epoch = int(len(data) / batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
if shuffle:
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
else:
shuffled_data = data
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index:end_index]
def dumpValidation(self, x1_text, x2_text, y, shuffled_index, dev_idx, i):
print("dumping validation " + str(i))
x1_shuffled = x1_text[shuffled_index]
x2_shuffled = x2_text[shuffled_index]
y_shuffled = y[shuffled_index]
x1_dev = x1_shuffled[dev_idx:]
x2_dev = x2_shuffled[dev_idx:]
y_dev = y_shuffled[dev_idx:]
del x1_shuffled
del y_shuffled
with open('validation.txt' + str(i), 'w') as f:
for text1, text2, label in zip(x1_dev, x2_dev, y_dev):
f.write(str(label) + "\t" + text1 + "\t" + text2 + "\n")
f.close()
del x1_dev
del y_dev
# Data Preparatopn
# ==================================================
def getDataSets(self, training_paths, max_document_length, percent_dev, batch_size):
x1_text, x2_text, y = self.getTsvData(training_paths)
# Build vocabulary
print("Building vocabulary")
vocab_processor = MyVocabularyProcessor(max_document_length, min_frequency=0)
vocab_processor.fit_transform(np.concatenate((x2_text, x1_text), axis=0))
print("Length of loaded vocabulary ={}".format(len(vocab_processor.vocabulary_)))
i1 = 0
train_set = []
dev_set = []
sum_no_of_batches = 0
x1 = np.asarray(list(vocab_processor.transform(x1_text)))
x2 = np.asarray(list(vocab_processor.transform(x2_text)))
# Randomly shuffle data
np.random.seed(131)
shuffle_indices = np.random.permutation(np.arange(len(y)))
x1_shuffled = x1[shuffle_indices]
x2_shuffled = x2[shuffle_indices]
y_shuffled = y[shuffle_indices]
dev_idx = -1 * len(y_shuffled) * percent_dev // 100
del x1
del x2
# Split train/test set
self.dumpValidation(x1_text, x2_text, y, shuffle_indices, dev_idx, 0)
# TODO: This is very crude, should use cross-validation
x1_train, x1_dev = x1_shuffled[:dev_idx], x1_shuffled[dev_idx:]
x2_train, x2_dev = x2_shuffled[:dev_idx], x2_shuffled[dev_idx:]
y_train, y_dev = y_shuffled[:dev_idx], y_shuffled[dev_idx:]
print("Train/Dev split for {}: {:d}/{:d}".format(training_paths, len(y_train), len(y_dev)))
sum_no_of_batches = sum_no_of_batches + (len(y_train) // batch_size)
train_set = (x1_train, x2_train, y_train)
dev_set = (x1_dev, x2_dev, y_dev)
gc.collect()
return train_set, dev_set, vocab_processor, sum_no_of_batches
def getTestDataSet(self, data_path, vocab_path, max_document_length):
x1_temp, x2_temp = self.getTsvTestData(data_path)
# Build vocabulary
vocab_processor = MyVocabularyProcessor(max_document_length, min_frequency=0)
vocab_processor = vocab_processor.restore(vocab_path)
len(vocab_processor.vocabulary_)
x1 = np.asarray(list(vocab_processor.transform(x1_temp)))
x2 = np.asarray(list(vocab_processor.transform(x2_temp)))
# Randomly shuffle data
del vocab_processor
gc.collect()
return x1, x2
| 35.084158 | 99 | 0.568224 | 917 | 7,087 | 4.164667 | 0.21265 | 0.051322 | 0.032993 | 0.015711 | 0.348521 | 0.30741 | 0.262896 | 0.241424 | 0.199529 | 0.199529 | 0 | 0.02528 | 0.319035 | 7,087 | 201 | 100 | 35.258706 | 0.766059 | 0.071116 | 0 | 0.229814 | 0 | 0 | 0.037577 | 0 | 0 | 0 | 0 | 0.004975 | 0 | 1 | 0.049689 | false | 0 | 0.074534 | 0 | 0.167702 | 0.074534 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f315c66cdb3ed17ceb5e023a3fb0a854fb7245a5 | 5,495 | py | Python | pareto.py | plundahl/CrowdAnomalyDetection | 4a745545d28861687da5abd057fd9e3e49f576c0 | [
"MIT"
] | 2 | 2020-09-14T05:04:13.000Z | 2020-12-08T13:07:43.000Z | pareto.py | plundahl/CrowdAnomalyDetection | 4a745545d28861687da5abd057fd9e3e49f576c0 | [
"MIT"
] | null | null | null | pareto.py | plundahl/CrowdAnomalyDetection | 4a745545d28861687da5abd057fd9e3e49f576c0 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
from math import cos, sin, radians, floor
import time
import random
import cProfile
length = 100
xs = []
ys = []
xs2 = []
ys2 = []
S = []
n = []
F = [[]]
#plt.subplot(121)
random.seed(2)
for x in range (0,length):
xs.append(random.uniform(1,100))
ys.append(random.uniform(1,100))
xs2.append(random.uniform(1,100))
ys2.append(random.uniform(1,100))
S.append([])
n.append(0)
# print([x, xs[x], ys[x]])
plt.plot(xs,ys,'x')
dom_tests = 0
def dominates(a,b):
global dom_tests
dom_tests = dom_tests + 1
return xs[a] <= xs[b] and ys[a] <= ys[b] and (xs[a] < xs[b] or ys[a] < ys[b])
#return xs[a] <= xs[b] and ys[a] <= ys[b] and xs2[a] <= xs2[b] and ys2[a] <= ys2[b] and (xs[a] < xs[b] or ys[a] < ys[b] or xs2[a] < xs2[b] or ys2[a] < ys2[b])
for p in range(0,length):
for q in range(0, length):
if dominates(p,q):
S[p].append(q)
elif dominates(q,p):
n[p] = n[p] + 1
if n[p] == 0:
F[0].append(p)
i = 0
while F[i]:
H = []
for p in F[i]:
for q in S[p]:
n[q] = n[q] - 1
if n[q] == 0:
H.append(q)
if i <= 9:
plt.plot(xs[p],ys[p],'o', color='C'+str(i%10))
i=i+1
F.append(H)
print("--- FAST: %s ---" % dom_tests)
dom_tests = 0
class Leaf(object):
def __init__(self, index):
self.index = index
self.children = []
def add(self, newLeaf, log=False):
if log:
print(str(self.index) + " ADD " + str(newLeaf.index))
for child in self.children:
print("#" + str(child.index))
dominated = []
random.shuffle(self.children)
for child in self.children:
if log:
print(">" + str(child.index))
if dominates(child.index, newLeaf.index):
if log:
print("dominated")
child.add(newLeaf)
return
elif dominates(newLeaf.index, child.index):
if log:
print("dominates")
dominated.append(child)
newLeaf.add(child)
for child in dominated:
self.children.remove(child)
self.children.append(newLeaf)
return
def merge(self):
if len(self.children) == 1:
self.children = self.children[0].children
else:
if len(self.children) > 1:
self.children = self.mergeLists([self.children[i:i + 2] for i in range(0, len(self.children), 2)])
self.merge()
def mergeLists(self, pairs=[]):
tmpList=[]
for pair in pairs:
if len(pair) == 1:
tmpList.append(pair[0])
else:
merged = []
for child1 in pair[0].children:
add = True
for child2 in pair[1].children:
if dominates(child2.index, child1.index):
child2.add(child1)
add = False
break
if add:
merged.append(child1)
for child1 in pair[1].children:
add = True
for child2 in pair[0].children:
if dominates(child2.index, child1.index):
child2.add(child1)
add = False
break
if add:
merged.append(child1)
tmpLeaf = Leaf(-2)
tmpLeaf.children = merged
tmpList.append(tmpLeaf)
return tmpList
def print(self):
tmp = []
for child in self.children:
tmp.append(child.print())
return {str(self.index) : tmp}
def depth(self):
if len(self.children) == 0:
return 1
tmp = []
for child in self.children:
tmp.append(child.depth())
return max(tmp) + 1
def size(self):
tmp = []
for child in self.children:
tmp.append(child.depth())
return sum(tmp) + 1
start_time = time.time()
def treePareto():
Root = Leaf(-1)
for p in range(0,length):
tmp = Leaf(p)
Root.add(tmp)
print("--- First TREE: %s ---" % dom_tests)
#for child in Root.children:
# print("--- %s depth ---" % child.depth())
# print("--- %s size ---" % child.size())
#plt.subplot(122)
i = 0
while(len(Root.children) > 0):
tmp = []
tmpRoot = Leaf(-1)
for child in Root.children:
tmp.append(child.index)
# for child2 in child.children:
# tmpRoot.add(child2)
#if i <= 9:
#plt.plot(xs[child.index]+1,ys[child.index]+1,'o', color='C'+str(i%10))
#Root = tmpRoot
Root.merge()
#print({i:sorted(tmp)})
if sorted(tmp) != sorted(F[i]):
print("ERROR!! on front %s" % i)
exit()
i = i + 1
if(i+1 != len(F)):
print("ERROR!! F:{} != TREE:{}".format(len(F), i+1))
exit()
print("--- TREE: %s ---" % dom_tests)
print("--- %s seconds ---" % (time.time() - start_time))
#cProfile.run("treePareto()")
#treePareto()
i = 0
for x in F:
#print({i:sorted(x)})
i = i + 1
#plt.axis([0,2*xmax,0,2*ymax])
#plt.show() | 27.893401 | 162 | 0.470246 | 705 | 5,495 | 3.64539 | 0.160284 | 0.079377 | 0.031128 | 0.027237 | 0.323735 | 0.243969 | 0.209728 | 0.186381 | 0.159922 | 0.159922 | 0 | 0.031397 | 0.3798 | 5,495 | 197 | 163 | 27.893401 | 0.722711 | 0.110464 | 0 | 0.288462 | 0 | 0 | 0.029152 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057692 | false | 0 | 0.032051 | 0 | 0.147436 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f316f502aec79cdb366e727c8405d8d5a216731f | 643 | py | Python | raccroche/module2/save_mwmoutput.py | Qiaojilim/raccroche_module2 | 0d56d5aa989d5812c8f2e690af6af4335f703603 | [
"BSD-2-Clause"
] | null | null | null | raccroche/module2/save_mwmoutput.py | Qiaojilim/raccroche_module2 | 0d56d5aa989d5812c8f2e690af6af4335f703603 | [
"BSD-2-Clause"
] | null | null | null | raccroche/module2/save_mwmoutput.py | Qiaojilim/raccroche_module2 | 0d56d5aa989d5812c8f2e690af6af4335f703603 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 13 15:16:43 2020
@author: qiaojixu
"""
"""
Module discription:
--------------------
module to save all mwm adjacency output:
"""
def save_simple (WS1,WS2, TreeNode, gf1, gf2,gf1_old, gf2_old, results_dir):
with open (results_dir +'InputPyfile/mwmOutput/W'+ str(WS1)+ TreeNode + '_' + str(gf1_old) + '_' + str(gf2_old) + '.txt') as f:
with open (results_dir +'InputPyfile/mwmOutput/W'+ str(WS2)+ TreeNode + '_' + str(gf1) + '_' + str(gf2) + '.txt','a') as f1:
for line in f:
f1.write(line)
| 24.730769 | 132 | 0.5521 | 85 | 643 | 4.035294 | 0.6 | 0.087464 | 0.087464 | 0.104956 | 0.244898 | 0.244898 | 0.244898 | 0.244898 | 0 | 0 | 0 | 0.058333 | 0.253499 | 643 | 25 | 133 | 25.72 | 0.65625 | 0.152411 | 0 | 0 | 0 | 0 | 0.135011 | 0.105263 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3171accdb982f5e548856147d91ec883c0eaecf | 3,146 | py | Python | services/pipeline/emission/net/auth/google_auth.py | e-mission/e-mission-ng-aggregator | 0ce43b93192459ac1864b8e88e96b83ea0929aa2 | [
"BSD-3-Clause-Clear",
"BSD-3-Clause"
] | 21 | 2015-02-09T00:35:17.000Z | 2021-12-14T16:41:05.000Z | services/pipeline/emission/net/auth/google_auth.py | e-mission/e-mission-ng-aggregator | 0ce43b93192459ac1864b8e88e96b83ea0929aa2 | [
"BSD-3-Clause-Clear",
"BSD-3-Clause"
] | 672 | 2015-01-29T18:10:56.000Z | 2022-03-24T13:04:51.000Z | services/pipeline/emission/net/auth/google_auth.py | e-mission/e-mission-ng-aggregator | 0ce43b93192459ac1864b8e88e96b83ea0929aa2 | [
"BSD-3-Clause-Clear",
"BSD-3-Clause"
] | 110 | 2015-01-29T18:11:10.000Z | 2022-03-29T17:58:14.000Z | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
from builtins import object
import logging
import json
import traceback
import requests
# For decoding JWTs on the client side
import google.oauth2.id_token as goi
import google.auth.transport.requests as gatr
class GoogleAuthMethod(object):
def __init__(self):
key_file = open('conf/net/auth/google_auth.json')
key_data = json.load(key_file)
key_file.close()
self.client_key = key_data["client_key"]
self.client_key_old = key_data["client_key_old"]
self.ios_client_key = key_data["ios_client_key"]
self.ios_client_key_new = key_data["ios_client_key_new"]
self.valid_keys = [self.client_key, self.client_key_old,
self.ios_client_key, self.ios_client_key_new]
# Code snippet from
# https://developers.google.com/identity/sign-in/android/backend-auth
def __verifyTokenFields(self, tokenFields, audienceKey, issKey):
if audienceKey not in tokenFields:
raise ValueError("Invalid token %s, does not contain %s" %
(tokenFields, audienceKey))
in_client_key = tokenFields[audienceKey]
if in_client_key not in self.valid_keys:
raise ValueError("Incoming client key %s not in valid list %s" %
(in_client_key, self.valid_keys))
if issKey not in tokenFields:
raise ValueError("Invalid token %s" % tokenFields)
in_issuer = tokenFields[issKey]
issuer_valid_list = ['accounts.google.com', 'https://accounts.google.com']
if in_issuer not in issuer_valid_list:
raise ValueError('Wrong issuer %s, expected %s' % (in_issuer, issuer_valid_list))
return tokenFields['email']
def verifyUserToken(self, token):
try:
# attempt to validate token on the client-side
logging.debug("Using the google auth library to verify id token of length %d from android phones" % len(token))
tokenFields = goi.verify_oauth2_token(token, gatr.Request())
logging.debug("tokenFields from library = %s" % tokenFields)
verifiedEmail = self.__verifyTokenFields(tokenFields, "aud", "iss")
logging.debug("Found user email %s" % tokenFields['email'])
return verifiedEmail
except:
logging.debug("OAuth failed to verify id token, falling back to constructedURL")
#fallback to verifying using Google API
constructedURL = ("https://www.googleapis.com/oauth2/v1/tokeninfo?id_token=%s" % token)
r = requests.get(constructedURL)
tokenFields = json.loads(r.content)
logging.debug("tokenFields from constructedURL= %s" % tokenFields)
verifiedEmail = self.__verifyTokenFields(tokenFields, "audience", "issuer")
logging.debug("Found user email %s" % tokenFields['email'])
return verifiedEmail
| 43.694444 | 123 | 0.680547 | 383 | 3,146 | 5.362924 | 0.313316 | 0.070107 | 0.035054 | 0.031159 | 0.24148 | 0.228822 | 0.15482 | 0.133398 | 0.06037 | 0.06037 | 0 | 0.001658 | 0.233312 | 3,146 | 71 | 124 | 44.309859 | 0.849917 | 0.06548 | 0 | 0.070175 | 0 | 0 | 0.202864 | 0.010228 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.22807 | 0 | 0.350877 | 0.017544 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31871509a2a2e434064addfa2e3bf5cd255c848 | 4,011 | py | Python | gateway_testcases/init_gw.py | sandmars/web | f301bce6ecd018709efd6d76167d47cdbdaab21e | [
"CC0-1.0"
] | null | null | null | gateway_testcases/init_gw.py | sandmars/web | f301bce6ecd018709efd6d76167d47cdbdaab21e | [
"CC0-1.0"
] | null | null | null | gateway_testcases/init_gw.py | sandmars/web | f301bce6ecd018709efd6d76167d47cdbdaab21e | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''配置网关环境:DAHDI、SIP/IAX2、ROUTE'''
from selenium import webdriver
from python_lib import gateway_func
import ConfigParser,sys,time,codecs
def __config_dgw_endpoint_route(config_file, section):
config = ConfigParser.ConfigParser()
# 读取中文参数
with codecs.open(config_file, encoding='utf-8-sig') as f:
config.readfp(f)
#python3: config.read(config_file, encoding='utf-8-sig')
hostname = config.get('gateway', 'hostname')
port = config.getint('gateway', 'web_port')
username = config.get('gateway', 'web_username')
password = config.get('gateway', 'web_password')
baseurl = 'http://%s:%s@%s:%s' % (username, password, hostname, port)
#baseurl = 'http://admin:admin@demo.openvox.cn:65325'
driver = webdriver.Firefox()
driver.set_window_size(1024, 768)
driver.implicitly_wait(5)
driver.get(baseurl)
# 读取配置文件中的dahdi,针对E1网关配置
if config.has_option(section, 'dahdi'):
dahdi = gateway_func.dahdi(driver)
args = config.get(section, 'dahdi').split(';')
dahdi.general(*args)
time.sleep(10)
#if config.has_option(section, 'groups') or config.has_option(section, 'gw_route') or config.has_option(section, 'gw_route_exchange' or ):
#gw_type = config.get(section, 'gw_type')
gw_type = config.get(section, 'gw_type')
# 删除所有的SIP、IAX
if config.has_option(section, 'gw_sip') or config.has_option(section, 'gw_iax'):
endpoint = gateway_func.endpoint_func(driver, gw_type)
endpoint.delete_all_endpoints()
# 读取配置文件中的gw_sip,配置网关SIP
if config.has_option(section, 'gw_sip'):
sip = gateway_func.endpoint_func(driver, gw_type)
for gw_sip in config.get(section, 'gw_sip').split(';'):
args = config.get(section, gw_sip).split(';')
# 删除同名的endpoint
#sip.delete_same_sip(args[1])
sip.add_sip_endpoint(*args)
if config.has_option(section, 'gw_iax'):
iax = gateway_func.endpoint_func(driver, gw_type)
for gw_iax in config.get(section, 'gw_iax').split(';'):
args = config.get(section, gw_iax).split(';')
# 删除同名的endpoint
#iax.delete_same_iax(args[1])
iax.add_iax_endpoint(*args)
if config.has_option(section, 'groups'):
group = gateway_func.route_func(driver, gw_type)
#删除所有的group
#group.delete_all_groups()
for groups in config.get(section, 'groups').split(';'):
args = config.get(section, groups).split(';')
# 删除同名的group
#group.delete_same_group(args[0])
group.add_group(*args)
if config.has_option(section, 'gw_route'):
route = gateway_func.route_func(driver, gw_type)
# 删除所有的路由
route.delete_all_routes()
for gw_route in config.get(section, 'gw_route').split(';'):
args = config.get(section, gw_route).split(';')
manipulation = args.pop()
if manipulation != '':
manipulation_sec = config.get(section, manipulation).split(';')
count = 0
manipulation_args = ''
for sec in manipulation_sec:
manipulation_args += config.get(section, sec)
if count != (len(manipulation_sec) - 1):
manipulation_args += ':'
count += 1
args.append(manipulation_args)
else:
args.append('')
# 删除同名的route
#route.delete_same_route(args[0])
route.add_routing_rule(*args)
if config.has_option(section, 'gw_route_exchange'):
route = gateway_func.route_func(driver, gw_type)
for route_pair in config.get(section, 'gw_route_exchange').split(';'):
pair = config.get(section, route_pair).split(';')
route.exchange_routes(*pair)
driver.quit()
__config_dgw_endpoint_route(sys.argv[1], sys.argv[2])
| 38.2 | 142 | 0.615059 | 486 | 4,011 | 4.853909 | 0.230453 | 0.068673 | 0.101738 | 0.102586 | 0.408224 | 0.370496 | 0.236117 | 0.094955 | 0.033913 | 0 | 0 | 0.009983 | 0.25081 | 4,011 | 104 | 143 | 38.567308 | 0.775042 | 0.157816 | 0 | 0.030303 | 0 | 0 | 0.066547 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015152 | false | 0.030303 | 0.045455 | 0 | 0.060606 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f318ca2c074f8c219ffe514ffcab67f9fbf4241a | 692 | py | Python | driver/code/save_training_data.py | khanshehjad/ND113-Self-Driving-Car | f8007ecf4c68cbf7be01a45d41ed6865751f0ea2 | [
"MIT"
] | null | null | null | driver/code/save_training_data.py | khanshehjad/ND113-Self-Driving-Car | f8007ecf4c68cbf7be01a45d41ed6865751f0ea2 | [
"MIT"
] | null | null | null | driver/code/save_training_data.py | khanshehjad/ND113-Self-Driving-Car | f8007ecf4c68cbf7be01a45d41ed6865751f0ea2 | [
"MIT"
] | null | null | null | import cv2
import sys
from hand_coded_lane_follower import HandCodedLaneFollower
def save_image_and_steering_angle(video_file):
lane_follower = HandCodedLaneFollower()
cap = cv2.VideoCapture(video_file + '.avi')
try:
i = 0
while cap.isOpened():
_, frame = cap.read()
lane_follower.follow_lane(frame)
cv2.imwrite("%s_%03d_%03d.png" % (video_file, i, lane_follower.curr_steering_angle), frame)
i += 1
if cv2.waitKey(1) & 0xFF == ord('q'):
break
finally:
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
save_image_and_steering_angle(sys.argv[1]) | 27.68 | 103 | 0.625723 | 83 | 692 | 4.843373 | 0.554217 | 0.119403 | 0.059701 | 0.099502 | 0.124378 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027613 | 0.267341 | 692 | 25 | 104 | 27.68 | 0.765286 | 0 | 0 | 0 | 0 | 0 | 0.041847 | 0 | 0 | 0 | 0.005772 | 0 | 0 | 1 | 0.05 | false | 0 | 0.15 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31c51a220e8384ce1d1fccecca8fc7c7e76f79d | 2,535 | py | Python | bot/bot/tools.py | boulayb/tarkov-helper | f189940598cc61442bbff0ecbaca89aa76570bdf | [
"Unlicense"
] | null | null | null | bot/bot/tools.py | boulayb/tarkov-helper | f189940598cc61442bbff0ecbaca89aa76570bdf | [
"Unlicense"
] | null | null | null | bot/bot/tools.py | boulayb/tarkov-helper | f189940598cc61442bbff0ecbaca89aa76570bdf | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
from datetime import datetime
from settings import *
# delta in days between today and a late date
def days_since(late_date):
if late_date != '':
try:
days_since = datetime.today() - late_date
if days_since.days <= 0:
hours_since = int(days_since.total_seconds() / 3600)
if hours_since <= 0:
return 'this hour'
else:
return str(hours_since) + ' hours ago'
elif days_since.days == 1:
return 'yesterday'
else:
return str(days_since.days) + ' days ago'
except Exception as e:
logger.info("Warning: Days since failed for date: " + str(late_date) + " - Reason: " + e)
return ''
# 'YYYYMMDDHHmmss' to proper date
# fuck this format and fuck you for using it
def convert_date_loot_goblin(date):
try:
year = int(date[0:4])
date_without_year = date[4:]
split = [int(date_without_year[i:i+2]) for i in range(0, len(date_without_year), 2)] # split line every two characters
proper_date = datetime(year, split[0], split[1], split[2], split[3], split[4])
except Exception as e:
logger.info("Warning: Date conversion failed for date: " + str(date) + " - Reason: " + e)
return ''
return proper_date
def convert_date_tarkov_market(date_str):
try:
proper_date = datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%S.%fZ')
except Exception as e:
logger.info("Warning: Date conversion failed for date: " + str(date_str) + " - Reason: " + e)
return ''
return proper_date
# build an embed string from a list of strings
def build_string(string_list, item_url, prefix='', see_more=True):
rest_str = None
embed_str = prefix + ('\n' + prefix).join(string_list) # one string per line
if len(embed_str) > 1024: # one field can only contain a maximum of 1024 characters
if see_more is True:
see_more_str = "\n- See the remainings [here](" + item_url + ")"
last_line = embed_str[:1024-len(see_more_str)].rfind('\n')
rest_str = embed_str[last_line:]
embed_str = embed_str[:last_line]
embed_str += see_more_str
else:
last_line = embed_str[:1024].rfind('\n')
rest_str = embed_str[last_line:]
embed_str = embed_str[:last_line]
result = {"embed_str": embed_str, "rest_str": rest_str}
return result
| 33.8 | 126 | 0.595266 | 347 | 2,535 | 4.146974 | 0.319885 | 0.072272 | 0.04517 | 0.055594 | 0.267547 | 0.23975 | 0.199444 | 0.169562 | 0.169562 | 0.169562 | 0 | 0.019488 | 0.291519 | 2,535 | 74 | 127 | 34.256757 | 0.781737 | 0.115976 | 0 | 0.346154 | 0 | 0 | 0.119176 | 0.009409 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.038462 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31d2e94e5aaef1e4dc0503f5fa86ff2b2966d56 | 1,710 | py | Python | ihna/kozhukhov/imageanalysis/gui/dataprocessing/cutmap.py | serik1987/ihna_kozhuhov_image_analysis | ccfb3b48cbf6b351acb10f8b99315c65281f8ab8 | [
"Unlicense"
] | null | null | null | ihna/kozhukhov/imageanalysis/gui/dataprocessing/cutmap.py | serik1987/ihna_kozhuhov_image_analysis | ccfb3b48cbf6b351acb10f8b99315c65281f8ab8 | [
"Unlicense"
] | null | null | null | ihna/kozhukhov/imageanalysis/gui/dataprocessing/cutmap.py | serik1987/ihna_kozhuhov_image_analysis | ccfb3b48cbf6b351acb10f8b99315c65281f8ab8 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8
import wx
from ihna.kozhukhov.imageanalysis import ImagingMap
from ihna.kozhukhov.imageanalysis.gui.complexmapviewerdlg import ComplexMapViewerDlg
from .datatodataprocessor import DataToDataProcessor
class CutMap(DataToDataProcessor):
__roi_selector = None
def _get_default_minor_name(self):
return "cut"
def _check_input_data(self):
if not isinstance(self._input_data, ImagingMap):
raise ValueError("The processor is available for imaging maps only")
if len(self._considering_case['roi']) == 0:
raise AttributeError("Please, specify at least one ROI using Use Case -> ROI manager facility")
def _place_additional_options(self, parent):
roi_names = [roi.get_name() for roi in self._considering_case['roi']]
sizer = wx.BoxSizer(wx.HORIZONTAL)
caption = wx.StaticText(parent, label="ROI")
sizer.Add(caption, 0, wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, 5)
self.__roi_selector = wx.Choice(parent, choices=roi_names, style=wx.CB_SORT)
self.__roi_selector.SetSelection(0)
sizer.Add(self.__roi_selector, 1, wx.EXPAND)
return sizer
def _process(self):
roi_name = self.__roi_selector.GetStringSelection()
roi = self._considering_case['roi'][roi_name]
features = self._input_data.get_features().copy()
features['minor_name'] = self.get_output_file()
features['original_map'] = self._input_data.get_full_name()
features['is_main'] = 'no'
data = roi.apply(self._input_data.get_data())
self._output_data = ImagingMap(features, data)
def _get_result_viewer(self):
return ComplexMapViewerDlg
| 38.863636 | 107 | 0.69883 | 212 | 1,710 | 5.34434 | 0.45283 | 0.048544 | 0.045896 | 0.058252 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004386 | 0.2 | 1,710 | 43 | 108 | 39.767442 | 0.82383 | 0.009942 | 0 | 0 | 0 | 0 | 0.097575 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.151515 | false | 0 | 0.121212 | 0.060606 | 0.424242 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31d3f3dc86eded747465b5f8f28839b3cc86877 | 844 | py | Python | RecoTauTag/TauTagTools/python/tauDecayModes_cfi.py | nistefan/cmssw | ea13af97f7f2117a4f590a5e654e06ecd9825a5b | [
"Apache-2.0"
] | 3 | 2018-08-24T19:10:26.000Z | 2019-02-19T11:45:32.000Z | RecoTauTag/TauTagTools/python/tauDecayModes_cfi.py | nistefan/cmssw | ea13af97f7f2117a4f590a5e654e06ecd9825a5b | [
"Apache-2.0"
] | 3 | 2018-08-23T13:40:24.000Z | 2019-12-05T21:16:03.000Z | RecoTauTag/TauTagTools/python/tauDecayModes_cfi.py | nistefan/cmssw | ea13af97f7f2117a4f590a5e654e06ecd9825a5b | [
"Apache-2.0"
] | 5 | 2018-08-21T16:37:52.000Z | 2020-01-09T13:33:17.000Z | import FWCore.ParameterSet.Config as cms
#--------------------------------------------------------------------------------
# define tau lepton hadronic decay modes
#
# NOTE: the values defined in the following need to match
# the 'hadronicDecayMode' enum defined in DataFormats/TauReco/interface/PFTau.h
#
#--------------------------------------------------------------------------------
tauToOneProng0PiZero = 0
tauToOneProng1PiZero = 1
tauToOneProng2PiZero = 2
tauToOneProng3PiZero = 3
tauToOneProngNPiZero = 4
tauToTwoProng0PiZero = 5
tauToTwoProng1PiZero = 6
tauToTwoProng2PiZero = 7
tauToTwoProng3PiZero = 8
tauToTwoProngNPiZero = 9
tauToThreeProng0PiZero = 10
tauToThreeProng1PiZero = 11
tauToThreeProng2PiZero = 12
tauToThreeProng3PiZero = 13
tauToThreeProngNPiZero = 14
tauToRareDecayMode = 15
| 31.259259 | 85 | 0.626777 | 64 | 844 | 8.265625 | 0.9375 | 0.034026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047157 | 0.145735 | 844 | 26 | 86 | 32.461538 | 0.686546 | 0.401659 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.058824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31d439d624c4701aded2be1db42a6ffb1a2ae91 | 8,857 | py | Python | ehelply_python_sdk/services/access/auth_rules.py | eHelply/Python-eHelply-SDK | b46f4408b25d85e2f869fa37cf882e20139b1beb | [
"Apache-2.0"
] | null | null | null | ehelply_python_sdk/services/access/auth_rules.py | eHelply/Python-eHelply-SDK | b46f4408b25d85e2f869fa37cf882e20139b1beb | [
"Apache-2.0"
] | null | null | null | ehelply_python_sdk/services/access/auth_rules.py | eHelply/Python-eHelply-SDK | b46f4408b25d85e2f869fa37cf882e20139b1beb | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
from typing import List, Tuple, Callable
from ehelply_python_sdk.services.access.sdk import AuthModel
import asyncio
class AuthException(Exception):
pass
class AuthRule:
"""
Provides a nice interface into developing authorization rules for endpoints
"""
# Global config of whether to exception if unauthorized. Useful for development
exception_if_unauthorized: bool = True
# The exception to throw when auth fails
exception_to_throw: Exception = AuthException
# Global config of whether to override auth rules. Essentially, bypass authorization. Useful for development
override: bool = False
def __init__(
self,
auth_model: AuthModel,
*rules,
exception_if_unauthorized: bool = None,
exception_to_throw: Exception = None,
override: bool = None
):
if exception_if_unauthorized is None:
exception_if_unauthorized = AuthRule.exception_if_unauthorized
self.local_exception_if_unauthorized: bool = exception_if_unauthorized
if exception_to_throw is None:
exception_to_throw = AuthRule.exception_to_throw
self.local_exception_to_throw: Exception = exception_to_throw
if override is None:
override = AuthRule.override
self.local_override: bool = override
self.rules: List[AuthRule] = list(rules)
self.handlers: List[Tuple[Callable, dict]] = []
self.auth_model: AuthModel = auth_model
async def verify(self) -> bool:
"""
Verifies that each changed rule passes using an AND logical operation.
If rules were passed in, it will also verify that those pass successfully. The passed in rules become a logical OR
Returns:
"""
rules_passed: bool = False
for rule in self.rules:
try:
result: bool = await rule.verify()
if result:
rules_passed = True
break
except:
pass
if not rules_passed and len(self.rules) != 0:
if self.local_exception_if_unauthorized:
raise self.local_exception_to_throw
else:
return False
async_handlers: list = []
for handler in self.handlers:
try:
async_handlers.append(asyncio.create_task(handler[0](**handler[1])))
except:
if self.local_exception_if_unauthorized:
raise self.local_exception_to_throw
else:
return False
async_results = await asyncio.gather(*async_handlers)
for result in async_results:
try:
if not result:
if self.local_exception_if_unauthorized:
raise self.local_exception_to_throw
else:
return False
except:
if self.local_exception_if_unauthorized:
raise self.local_exception_to_throw
else:
return False
return True
async def __handler_entity_identifier_eq(self, entity_identifier: str) -> bool:
return self.auth_model.entity_identifier == entity_identifier
def entity_identifier_eq(self, entity_identifier: str):
self.handlers.append((
self.__handler_entity_identifier_eq,
{
"entity_identifier": entity_identifier
}
))
async def __handler_entity_identifier_neq(self, entity_identifier: str) -> bool:
return self.auth_model.entity_identifier != entity_identifier
def entity_identifier_neq(self, entity_identifier: str):
self.handlers.append((
self.__handler_entity_identifier_neq,
{
"entity_identifier": entity_identifier
}
))
async def __handler_entity_has_node_on_target(self, node: str, target_identifier: str, partition: str) -> bool:
return await self.auth_model.access_sdk.is_allowed(
auth_model=self.auth_model,
target_identifier=target_identifier,
node=node,
partition=partition
)
def entity_has_node_on_target(self, node: str, target_identifier: str, partition: str = None) -> AuthRule:
self.handlers.append((
self.__handler_entity_has_node_on_target,
{
"node": node,
"target_identifier": target_identifier,
"partition": partition
}
))
return self
async def __handler_has_entity(self) -> bool:
return self.auth_model.entity_identifier is not None
def has_entity(self) -> AuthRule:
self.handlers.append((
self.__handler_has_entity,
{}
))
return self
async def __handler_has_participant(self) -> bool:
return self.auth_model.active_participant_uuid is not None
def has_participant(self) -> AuthRule:
self.handlers.append((
self.__handler_has_participant,
{}
))
return self
async def __handler_participant_has_node_on_target(self, node: str, target_identifier: str, partition: str) -> bool:
temp_model: AuthModel = AuthModel(
access_sdk=self.auth_model.access_sdk,
active_participant_uuid=self.auth_model.active_participant_uuid,
entity_identifier=self.auth_model.active_participant_uuid,
project_uuid=self.auth_model.project_uuid,
access_token=self.auth_model.access_token,
secret_token=self.auth_model.secret_token,
claims=self.auth_model.claims,
)
return await self.auth_model.access_sdk.is_allowed(
auth_model=temp_model,
target_identifier=target_identifier,
node=node,
partition=partition
)
def participant_has_node_on_target(self, node: str, target_identifier: str, partition: str = None) -> AuthRule:
self.handlers.append((
self.__handler_participant_has_node_on_target,
{
"node": node,
"target_identifier": target_identifier,
"partition": partition
}
))
return self
async def __handler_participant_below_limit(self) -> bool:
return True
def participant_below_limit(self, limit: str) -> AuthRule:
self.handlers.append((
self.__handler_participant_below_limit,
{
"limit": limit
}
))
return self
async def __handler_customentity_has_node_on_target(
self,
node: str,
target_identifier: str,
partition: str,
entity_identifier: str
) -> bool:
temp_model: AuthModel = AuthModel(
access_sdk=self.auth_model.access_sdk,
active_participant_uuid=self.auth_model.active_participant_uuid,
entity_identifier=entity_identifier,
project_uuid=self.auth_model.project_uuid,
access_token=self.auth_model.access_token,
secret_token=self.auth_model.secret_token,
claims=self.auth_model.claims,
)
return await self.auth_model.access_sdk.is_allowed(
auth_model=temp_model,
target_identifier=target_identifier,
node=node,
partition=partition
)
def customentity_has_node_on_target(
self,
node: str,
target_identifier: str,
partition: str,
entity_identifier: str
) -> AuthRule:
self.handlers.append((
self.__handler_customentity_has_node_on_target,
{
"node": node,
"target_identifier": target_identifier,
"partition": partition,
"entity_identifier": entity_identifier
}
))
return self
async def __handler_project_uuid_eq(self, project_uuid: str) -> bool:
return self.auth_model.project_uuid == project_uuid
def project_uuid_eq(self, project_uuid: str):
self.handlers.append((
self.__handler_project_uuid_eq,
{
"project_uuid": project_uuid
}
))
async def __handler_project_uuid_neq(self, project_uuid: str) -> bool:
return self.auth_model.project_uuid != project_uuid
def project_uuid_neq(self, project_uuid: str):
self.handlers.append((
self.__handler_project_uuid_neq,
{
"project_uuid": project_uuid
}
))
| 32.925651 | 122 | 0.608332 | 928 | 8,857 | 5.457974 | 0.132543 | 0.05153 | 0.064166 | 0.043435 | 0.670089 | 0.629418 | 0.594274 | 0.553603 | 0.516486 | 0.516486 | 0 | 0.000502 | 0.325505 | 8,857 | 268 | 123 | 33.048507 | 0.847338 | 0.033872 | 0 | 0.5 | 0 | 0 | 0.020477 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051887 | false | 0.023585 | 0.018868 | 0 | 0.193396 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31e33c292a6ef177019c192974bea10474523a6 | 8,385 | py | Python | lldb/packages/Python/lldbsuite/test/test_runner/test/test_process_control.py | dan-zheng/llvm-project | 6b792850da0345274758c9260fda5df5e57ab486 | [
"Apache-2.0"
] | 765 | 2015-12-03T16:44:59.000Z | 2022-03-07T12:41:10.000Z | lldb/packages/Python/lldbsuite/test/test_runner/test/test_process_control.py | dan-zheng/llvm-project | 6b792850da0345274758c9260fda5df5e57ab486 | [
"Apache-2.0"
] | 1,815 | 2015-12-11T23:56:05.000Z | 2020-01-10T19:28:43.000Z | lldb/packages/Python/lldbsuite/test/test_runner/test/test_process_control.py | dan-zheng/llvm-project | 6b792850da0345274758c9260fda5df5e57ab486 | [
"Apache-2.0"
] | 284 | 2015-12-03T16:47:25.000Z | 2022-03-12T05:39:48.000Z | #!/usr/bin/env python
"""
Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
See https://llvm.org/LICENSE.txt for license information.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Provides classes used by the test results reporting infrastructure
within the LLDB test suite.
Tests the process_control module.
"""
from __future__ import print_function
# System imports.
import os
import os.path
import unittest
import sys
import threading
# Our imports.
from test_runner import process_control
class TestInferiorDriver(process_control.ProcessDriver):
def __init__(self, soft_terminate_timeout=None):
super(TestInferiorDriver, self).__init__(
soft_terminate_timeout=soft_terminate_timeout)
self.started_event = threading.Event()
self.started_event.clear()
self.completed_event = threading.Event()
self.completed_event.clear()
self.was_timeout = False
self.returncode = None
self.output = None
def write(self, content):
# We'll swallow this to keep tests non-noisy.
# Uncomment the following line if you want to see it.
# sys.stdout.write(content)
pass
def on_process_started(self):
self.started_event.set()
def on_process_exited(self, command, output, was_timeout, exit_status):
self.returncode = exit_status
self.was_timeout = was_timeout
self.output = output
self.returncode = exit_status
self.completed_event.set()
class ProcessControlTests(unittest.TestCase):
@classmethod
def _suppress_soft_terminate(cls, command):
# Do the right thing for your platform here.
# Right now only POSIX-y systems are reporting
# soft terminate support, so this is set up for
# those.
helper = process_control.ProcessHelper.process_helper()
signals = helper.soft_terminate_signals()
if signals is not None:
for signum in helper.soft_terminate_signals():
command.extend(["--ignore-signal", str(signum)])
@classmethod
def inferior_command(
cls,
ignore_soft_terminate=False,
options=None):
# Base command.
script_name = "{}/inferior.py".format(os.path.dirname(__file__))
if not os.path.exists(script_name):
raise Exception(
"test inferior python script not found: {}".format(script_name))
command = ([sys.executable, script_name])
if ignore_soft_terminate:
cls._suppress_soft_terminate(command)
# Handle options as string or list.
if isinstance(options, str):
command.extend(options.split())
elif isinstance(options, list):
command.extend(options)
# Return full command.
return command
class ProcessControlNoTimeoutTests(ProcessControlTests):
"""Tests the process_control module."""
def test_run_completes(self):
"""Test that running completes and gets expected stdout/stderr."""
driver = TestInferiorDriver()
driver.run_command(self.inferior_command())
self.assertTrue(
driver.completed_event.wait(5), "process failed to complete")
self.assertEqual(driver.returncode, 0, "return code does not match")
def test_run_completes_with_code(self):
"""Test that running completes and gets expected stdout/stderr."""
driver = TestInferiorDriver()
driver.run_command(self.inferior_command(options="-r10"))
self.assertTrue(
driver.completed_event.wait(5), "process failed to complete")
self.assertEqual(driver.returncode, 10, "return code does not match")
class ProcessControlTimeoutTests(ProcessControlTests):
def test_run_completes(self):
"""Test that running completes and gets expected return code."""
driver = TestInferiorDriver()
timeout_seconds = 5
driver.run_command_with_timeout(
self.inferior_command(),
"{}s".format(timeout_seconds),
False)
self.assertTrue(
driver.completed_event.wait(2 * timeout_seconds),
"process failed to complete")
self.assertEqual(driver.returncode, 0)
def _soft_terminate_works(self, with_core):
# Skip this test if the platform doesn't support soft ti
helper = process_control.ProcessHelper.process_helper()
if not helper.supports_soft_terminate():
self.skipTest("soft terminate not supported by platform")
driver = TestInferiorDriver()
timeout_seconds = 5
driver.run_command_with_timeout(
# Sleep twice as long as the timeout interval. This
# should force a timeout.
self.inferior_command(
options="--sleep {}".format(timeout_seconds * 2)),
"{}s".format(timeout_seconds),
with_core)
# We should complete, albeit with a timeout.
self.assertTrue(
driver.completed_event.wait(2 * timeout_seconds),
"process failed to complete")
# Ensure we received a timeout.
self.assertTrue(driver.was_timeout, "expected to end with a timeout")
self.assertTrue(
helper.was_soft_terminate(driver.returncode, with_core),
("timeout didn't return expected returncode "
"for soft terminate with core: {}").format(driver.returncode))
def test_soft_terminate_works_core(self):
"""Driver uses soft terminate (with core request) when process times out.
"""
self._soft_terminate_works(True)
def test_soft_terminate_works_no_core(self):
"""Driver uses soft terminate (no core request) when process times out.
"""
self._soft_terminate_works(False)
def test_hard_terminate_works(self):
"""Driver falls back to hard terminate when soft terminate is ignored.
"""
driver = TestInferiorDriver(soft_terminate_timeout=2.0)
timeout_seconds = 1
driver.run_command_with_timeout(
# Sleep much longer than the timeout interval,forcing a
# timeout. Do whatever is needed to have the inferior
# ignore soft terminate calls.
self.inferior_command(
ignore_soft_terminate=True,
options="--never-return"),
"{}s".format(timeout_seconds),
True)
# We should complete, albeit with a timeout.
self.assertTrue(
driver.completed_event.wait(60),
"process failed to complete")
# Ensure we received a timeout.
self.assertTrue(driver.was_timeout, "expected to end with a timeout")
helper = process_control.ProcessHelper.process_helper()
self.assertTrue(
helper.was_hard_terminate(driver.returncode),
("timeout didn't return expected returncode "
"for hard teriminate: {} ({})").format(
driver.returncode,
driver.output))
def test_inferior_exits_with_live_child_shared_handles(self):
"""inferior exit detected when inferior children are live with shared
stdout/stderr handles.
"""
# Requires review D13362 or equivalent to be implemented.
self.skipTest("http://reviews.llvm.org/D13362")
driver = TestInferiorDriver()
# Create the inferior (I1), and instruct it to create a child (C1)
# that shares the stdout/stderr handles with the inferior.
# C1 will then loop forever.
driver.run_command_with_timeout(
self.inferior_command(
options="--launch-child-share-handles --return-code 3"),
"5s",
False)
# We should complete without a timetout. I1 should end
# immediately after launching C1.
self.assertTrue(
driver.completed_event.wait(5),
"process failed to complete")
# Ensure we didn't receive a timeout.
self.assertFalse(
driver.was_timeout, "inferior should have completed normally")
self.assertEqual(
driver.returncode, 3,
"expected inferior process to end with expected returncode")
if __name__ == "__main__":
unittest.main()
| 34.64876 | 81 | 0.649135 | 953 | 8,385 | 5.5383 | 0.271773 | 0.061576 | 0.030315 | 0.032967 | 0.362448 | 0.313187 | 0.266768 | 0.251989 | 0.239674 | 0.233422 | 0 | 0.006678 | 0.26774 | 8,385 | 241 | 82 | 34.792531 | 0.852932 | 0.236613 | 0 | 0.345324 | 0 | 0 | 0.117171 | 0.00444 | 0 | 0 | 0 | 0 | 0.107914 | 1 | 0.100719 | false | 0.007194 | 0.05036 | 0 | 0.18705 | 0.007194 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31f43fbc4066fb4626b24686a0e0be76a434d3d | 946 | py | Python | lib/watchopticalanalysis/tests/unit/conftest.py | davehadley/watchoptical | a3a999b1318b021a319497c6c23624051a1b1cb3 | [
"MIT"
] | null | null | null | lib/watchopticalanalysis/tests/unit/conftest.py | davehadley/watchoptical | a3a999b1318b021a319497c6c23624051a1b1cb3 | [
"MIT"
] | null | null | null | lib/watchopticalanalysis/tests/unit/conftest.py | davehadley/watchoptical | a3a999b1318b021a319497c6c23624051a1b1cb3 | [
"MIT"
] | null | null | null | import os
import subprocess
import tempfile
from pathlib import Path
import pytest
from watchopticalmc import AnalysisDataset
from watchopticalutils.client import ClientType, client
@pytest.fixture()
def signaldatasetfixture() -> AnalysisDataset:
with client(ClientType.SINGLE):
dirname = (
f"{tempfile.gettempdir()}"
f"{os.sep}wm{os.sep}tmp{os.sep}"
"tmp_watchoptical_unittest_signaldataset_2"
)
if not os.path.exists(dirname):
subprocess.run(
[
"python",
"-m",
"watchopticalmc",
"--signal-only",
"--num-events-per-job=20",
"--num-jobs=1",
"--client=local",
f"--directory={dirname}",
]
)
return AnalysisDataset.load(Path(dirname) / "analysisdataset.pickle")
| 28.666667 | 73 | 0.530655 | 82 | 946 | 6.073171 | 0.585366 | 0.03012 | 0.032129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006612 | 0.360465 | 946 | 32 | 74 | 29.5625 | 0.816529 | 0 | 0 | 0 | 0 | 0 | 0.232558 | 0.168076 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034483 | false | 0 | 0.241379 | 0 | 0.310345 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31f80d02cfe920cbb047c39c8637f95cdea3f33 | 4,425 | py | Python | KI4RoboFleetUI.py | keim-hs-esslingen/ki4robofleet | 1ff1df5d53ab80c0dcd7b84d87c2df0071e0bf9f | [
"MIT"
] | 4 | 2021-07-06T03:55:25.000Z | 2022-03-27T17:05:59.000Z | KI4RoboFleetUI.py | keim-hs-esslingen/ki4robofleet | 1ff1df5d53ab80c0dcd7b84d87c2df0071e0bf9f | [
"MIT"
] | null | null | null | KI4RoboFleetUI.py | keim-hs-esslingen/ki4robofleet | 1ff1df5d53ab80c0dcd7b84d87c2df0071e0bf9f | [
"MIT"
] | 1 | 2022-02-23T11:53:05.000Z | 2022-02-23T11:53:05.000Z | #!/usr/bin/env python3
# =============================================================================
# Created at Hochschule Esslingen - University of Applied Sciences
# Department: Anwendungszentrum KEIM
# Contact: emanuel.reichsoellner@hs-esslingen.de
# Date: April 2021
# License: MIT License
# =============================================================================
# This Script is the Entrypoint for the KI4RoboFleet SUMO Simulation to analyze
# customized Scenarios for cities with autonomous driving cars
# =============================================================================
import sys
import os
sys.path.append("SumoOsmPoiTools")
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from SumoOsmPoiTools.SumoOsmPoiTools import PoiToolMainWindow
from SumoOsmPoiTools.ScenarioBuilder import ScenarioBuilderWindow
from SimulationToolsUI.SimulationInputUI import SimulationInputWindow
from SimulationToolsUI.ResultsViewerUI import ResultsViewerWindow
class KI4RoboFleetUI(QMainWindow):
def __init__(self):
super().__init__()
self.setWindowTitle("KI4ROBOFLEET User Interface v0.1")
self.setGeometry(100, 100, 500, 370)
self.uiInit()
self.show()
def uiInit(self):
self.buttonCreateList = QPushButton("SUMO Model Tools", self)
self.buttonCreateList.resize(300, 40)
self.buttonCreateList.move(100, 50)
self.buttonCreateList.clicked.connect(self.osmPoiTools)
self.buttonCreateList = QPushButton("Scenario Builder", self)
self.buttonCreateList.resize(300, 40)
self.buttonCreateList.move(100, 120)
self.buttonCreateList.clicked.connect(self.scenarioBuilder)
self.buttonCreateList = QPushButton("Simulation Settings", self)
self.buttonCreateList.resize(300, 40)
self.buttonCreateList.move(100, 190)
self.buttonCreateList.clicked.connect(self.simulation)
self.buttonCreateList = QPushButton("Results Viewer", self)
self.buttonCreateList.resize(300, 40)
self.buttonCreateList.move(100, 260)
self.buttonCreateList.clicked.connect(self.results)
def osmPoiTools(self):
print("SUMO Model Tools")
if "SumoOsmPoiTools" not in os.getcwd():
os.chdir("./SumoOsmPoiTools")
self.poiToolMainWindow = PoiToolMainWindow()
self.poiToolMainWindow.show()
def scenarioBuilder(self):
print("Scenario Builder")
if "SumoOsmPoiTools" not in os.getcwd():
os.chdir("./SumoOsmPoiTools")
self.scenarioBuilder = ScenarioBuilderWindow()
self.scenarioBuilder.show()
def simulation(self):
print("Simulation Settings")
if "SumoOsmPoiTools" in os.getcwd():
os.chdir("../")
self.simulationInputWindow = SimulationInputWindow()
self.simulationInputWindow.show()
def results(self):
print("Results Viewer")
self.resultsViewerWindow = ResultsViewerWindow()
self.resultsViewerWindow.show()
if __name__ == "__main__":
print(
" __ __ ______ __ __ ____ __ ___ ___ __ "
)
print(
"/\\ \\/\\ \\ /\\__ _\\ /\\ \\\\ \\ /\\ _`\\ /\\ \\ /'___\\ /\\_ \\ /\\ \\__ "
)
print(
"\\ \\ \\/'/' \\/_/\\ \\/ \\ \\ \\\\ \\ \\ \\ \\L\\ \\ ___ \\ \\ \\____ ___ /\\ \\__/ \\//\\ \\ __ __ \\ \\ ,_\\ "
)
print(
" \\ \\ , < \\ \\ \\ \\ \\ \\\\ \\_ \\ \\ , / / __`\\ \\ \\ '__`\\ / __`\\ \\ \\ ,__\\ \\ \\ \\ /'__`\\ /'__`\\ \\ \\ \\/ "
)
print(
" \\ \\ \\\\`\\ \\_\\ \\__ \\ \\__ ,__\\ \\ \\ \\\\ \\ /\\ \\L\\ \\ \\ \\ \\L\\ \\/\\ \\L\\ \\ \\ \\ \\_/ \\_\\ \\_ /\\ __/ /\\ __/ \\ \\ \\_ "
)
print(
" \\ \\_\\ \\_\\ /\\_____\\ \\/_/\\_\\_/ \\ \\_\\ \\_\\\\ \\____/ \\ \\_,__/\\ \\____/ \\ \\_\\ /\\____\\\\ \\____\\\\ \\____\\ \\ \\__\\"
)
print(
" \\/_/\\/_/ \\/_____/ \\/_/ \\/_/\\/ / \\/___/ \\/___/ \\/___/ \\/_/ \\/____/ \\/____/ \\/____/ \\/__/"
)
app = QApplication(sys.argv)
kI4RoboFleetUI = KI4RoboFleetUI()
kI4RoboFleetUI.show()
sys.exit(app.exec_())
| 38.815789 | 168 | 0.524972 | 308 | 4,425 | 6.983766 | 0.37013 | 0.148768 | 0.055788 | 0.055788 | 0.245932 | 0.167364 | 0.167364 | 0.167364 | 0.167364 | 0.167364 | 0 | 0.021951 | 0.258757 | 4,425 | 113 | 169 | 39.159292 | 0.633841 | 0.130847 | 0 | 0.180723 | 0 | 0.072289 | 0.32751 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072289 | false | 0 | 0.120482 | 0 | 0.204819 | 0.13253 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f31ffb883c91f65fcda8270d1d2671fcfc3684a9 | 1,069 | py | Python | code/cloudmanager/region_mapping.py | Hybrid-Cloud/cloud_manager | 5f4087ef8753dcb4f542e930b5d8642fe5591c1a | [
"Apache-2.0"
] | null | null | null | code/cloudmanager/region_mapping.py | Hybrid-Cloud/cloud_manager | 5f4087ef8753dcb4f542e930b5d8642fe5591c1a | [
"Apache-2.0"
] | 3 | 2016-03-16T03:26:44.000Z | 2016-03-16T03:46:22.000Z | code/cloudmanager/region_mapping.py | Hybrid-Cloud/orchard | 5f4087ef8753dcb4f542e930b5d8642fe5591c1a | [
"Apache-2.0"
] | null | null | null | # -*- coding:utf-8 -*-
__author__ = 'q00222219@huawei'
class RegionMapException(Exception):
pass
__REGION_MAP = {"tokyo": "ap-northeast-1",
"singapore": "ap-southeast-1",
"sydney": "ap-southeast-2",
"ireland": "eu-west-1",
"sao-paulo": "sa-east-1",
"virginia": "us-east-1",
"california": "us-west-1",
"oregon": "us-west-2",
"frankfurt": "eu-central-1"}
def get_region_name_list():
return __REGION_MAP.keys()
def get_region_id(region_name):
if region_name in __REGION_MAP.keys():
return __REGION_MAP[region_name]
raise RegionMapException("get region id, region name: %s, no such region"
% region_name)
def get_region_name(region_id):
for region_name in __REGION_MAP.keys():
if region_id == __REGION_MAP[region_name]:
return region_name
raise RegionMapException("get region name, region id: %s, no such region"
% region_id)
| 29.694444 | 77 | 0.568756 | 125 | 1,069 | 4.552 | 0.376 | 0.193322 | 0.063269 | 0.056239 | 0.418278 | 0.235501 | 0 | 0 | 0 | 0 | 0 | 0.024194 | 0.304022 | 1,069 | 35 | 78 | 30.542857 | 0.740591 | 0.018709 | 0 | 0 | 0 | 0 | 0.26361 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0.04 | 0 | 0.04 | 0.28 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f32245c924591a72b6c6640d549ed08d89605897 | 2,042 | py | Python | pixel-f/pixelf_c_utils.py | Ilu-Vatar/pixel | 701f3e74f3f118078dc066e0e802036f2475e3a7 | [
"MIT"
] | 2 | 2021-01-15T14:44:00.000Z | 2021-06-08T20:32:34.000Z | pixel-f/pixelf_c_utils.py | Nicolas-Reyland/pixel | 701f3e74f3f118078dc066e0e802036f2475e3a7 | [
"MIT"
] | null | null | null | pixel-f/pixelf_c_utils.py | Nicolas-Reyland/pixel | 701f3e74f3f118078dc066e0e802036f2475e3a7 | [
"MIT"
] | null | null | null | # Pixel Project C code implementation usage utils
import os
from platform import uname
SRC_CODE_FILE = os.path.join(os.path.abspath(__file__).replace(os.path.basename(__file__), ''), 'pixelf.c')
WORK_DIR = os.path.dirname(os.path.abspath(__file__))
DEFINE_START_LINE = 11
os_name = uname()[0]
if os_name == 'Linux':
compiler = 'gcc'
run_prefix = './'
elif os_nme == 'Windows':
compiler = 'cl'
run_prefix = ''
else:
raise NotImplementedError('Only Windows/Linux are supported at the time')
def change_src_code(src_code_file, params):
src_code = open(src_code_file, 'r')
lines = src_code.readlines()
src_code.close()
# all the #define
for i in range(DEFINE_START_LINE,DEFINE_START_LINE + 4):
line = lines[i]
line = line[8:-1]
name = line[:line.index(' ')]
newvalue = params[name]
lines[i] = '#define {} {}\n'.format(name, newvalue)
# static const int
newvalue = '{' + ', '.join(map(str, params['TESTRGB'])) + '}'
lines[DEFINE_START_LINE + 4] = 'static const int TESTRGB[] = {};\n'.format(newvalue)
# create path var for new src code file
src_code_file_basename = os.path.basename(src_code_file)
src_code_dir = os.path.dirname(src_code_file)
new_src_code_file = os.path.join(src_code_dir, '_tmp_' + src_code_file_basename)
# write the new code to it
new_src_code = open(new_src_code_file, 'w')
new_src_code.writelines(lines)
new_src_code.close()
return new_src_code_file
def compile_c(src_code_file, executable_file, options=''):
result = os.system('{} {} -o {} {}'.format(compiler, src_code_file, executable_file, options))
return result
def run_exec(executable_file, options=''):
if os_name == 'Linux' and executable_file[0] == '/':
result = os.system('/./' + executable_file[1:] + options)
else:
result = os.system(run_prefix + executable_file + options)
return result
if __name__ == '__main__':
change_src_code(SRC_CODE_FILE, {
'SENSITIVITY': 45,
'SRC_IMG_PATH': '"SRC IMG.dat"',
'BG_IMG_PATH': '"BG IMG.dat"',
'RESULT_IMG_PATH': '"RESULT IMG.dat"',
'TESTRGB': [255,255,255]
})
| 30.939394 | 107 | 0.70617 | 311 | 2,042 | 4.315113 | 0.318328 | 0.125186 | 0.114754 | 0.041729 | 0.175112 | 0.114754 | 0 | 0 | 0 | 0 | 0 | 0.01137 | 0.13859 | 2,042 | 65 | 108 | 31.415385 | 0.751563 | 0.06954 | 0 | 0.08 | 0 | 0 | 0.141046 | 0 | 0.02 | 0 | 0 | 0 | 0 | 1 | 0.06 | false | 0 | 0.04 | 0 | 0.16 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f322922ce37d13b4fe0c914e94d7a409cb6351bf | 41,673 | py | Python | smc/core/route.py | gabstopper/smc-python | 54386c8a710727cc1acf69334a57b155d2f5408c | [
"Apache-2.0"
] | 30 | 2016-09-01T03:48:04.000Z | 2021-06-23T19:44:05.000Z | smc/core/route.py | ad1rie1/smc-python | 51788783ce14dbeb4e5c90d7f5bba5d9720dcc0e | [
"Apache-2.0"
] | 75 | 2017-04-22T10:56:37.000Z | 2021-12-01T05:13:08.000Z | smc/core/route.py | ad1rie1/smc-python | 51788783ce14dbeb4e5c90d7f5bba5d9720dcc0e | [
"Apache-2.0"
] | 16 | 2016-05-09T16:56:00.000Z | 2020-07-06T11:11:46.000Z | """
Route module encapsulates functions related to static routing and
related configurations on NGFW.
When retrieving routing, it is done from the engine context.
For example, retrieve all routing for an engine in context::
>>> engine = Engine('sg_vm')
>>> for route_node in engine.routing:
... print(route_node)
...
Routing(name=Interface 0,level=interface)
Routing(name=Interface 1,level=interface)
Routing(name=Interface 2,level=interface)
Routing(name=Tunnel Interface 2000,level=interface)
Routing(name=Tunnel Interface 2001,level=interface)
Routing nodes are nested, starting with the engine level.
Routing node nesting is made up of 'levels' and can be
represented as a tree::
engine (root)
|
--> interface
|
--> network
|
--> gateway
|
--> any
You can get a representation of the routing or antispoofing tree nodes
by calling as_tree::
>>> print(engine.routing.as_tree())
Routing(name=myfw,level=engine_cluster)
--Routing(name=Interface 0,level=interface)
----Routing(name=network-1.1.1.0/24,level=network)
------Routing(name=mypeering,level=gateway)
------Routing(name=mynetlink,level=gateway)
--------Routing(name=router-1.1.1.1,level=any)
------Routing(name=mystatic,level=gateway)
--Routing(name=Interface 1,level=interface)
----Routing(name=network-10.10.10.0/24,level=network)
------Routing(name=anotherpeering,level=gateway)
--Routing(name=Tunnel Interface 1000,level=interface)
----Routing(name=network-2.2.2.0/24,level=network)
--Routing(name=Tunnel Interface 1001,level=interface)
--Routing(name=Interface 2,level=interface)
----Routing(name=Network (IPv4),level=network)
------Routing(name=dynamic_netlink-myfw-Interface 2,level=gateway)
--------Routing(name=Any network,level=any)
If nested routes exist, you can iterate a given node to get specific
information::
>>> interface = engine.routing.get(1)
>>> for routes in interface:
... print(routes)
...
Routing(name=network-10.0.0.0/24,level=network)
...
>>> for networks in interface:
... networks
... for gateways in networks:
... print gateways, gateways.ip
...
Routing(name=network-172.18.1.0/24,level=network)
Routing(name=asus-wireless,level=gateway) 172.18.1.200
If BGP, OSPF or a Traffic Handler (netlink) need to be added to an interface
that has multiple IP addresses assigned and you want to bind to only one, you
can provide the ``network`` parameter to ``add_`` methods. The network can be
obtained for an interface::
>>> engine = Engine('sg_vm')
>>> interface0 = engine.routing.get(0)
>>> for network in interface0:
... network, network.ip
...
(Routing(name=network-172.18.1.0/24,level=network), '172.18.1.0/24')
Then add using::
>>> engine = Engine('sg_vm')
>>> interface0 = engine.routing.get(0)
>>> interface0.add_traffic_handler(StaticNetlink('foo'), network='172.18.1.0/24')
.. note:: If the ``network`` keyword is omitted and the interface has multiple
IP addresses assigned, this will bind OSPF, BGP or the Traffic Handler
to all address assigned.
Adding a basic static route can be done from the engine directly if it is a
simple source network to destination route::
engine.add_route(gateway='192.168.1.254/32', network='172.18.1.0/24')
The route gateway will be mapped to an interface with an address range in
the 192.168.1.x network automatically.
For more complex static routes such as ones that may use group elements, use
the routing node::
>>> engine = Engine('ve-1')
>>> interface0 = engine.routing.get(0)
>>> interface0.add_static_route(Router('tmprouter'), destination=[Group('routegroup')])
When a routing gateway is added to an IPv6 network, the gateway is validated before
adding. For example, if you have a single interface that has both an IPv4 and IPv6
address assigned, a static route using a Router gateway with only an IPv4 address will
only bind to the IPv4 network. In this case, you can optionally add both an IPv4 and
IPv6 to the router element, or run this operation for each network respectively.
.. seealso:: :meth:`.Routing.add_static_route`
.. note:: When changing are made to a routing node, i.e. adding OSPF, BGP, Netlink's, the
configuration is updated immediately without calling .update()
"""
import collections
from smc.base.model import SubElement, Element, ElementCache
from smc.base.util import element_resolver
from smc.api.exceptions import InterfaceNotFound, ModificationAborted
from smc.base.structs import SerializedIterable
def flush_parent_cache(node):
"""
Flush parent cache will recurse back up the tree and
wipe the cache from each parent node reference on the
given element. This allows the objects to be reused
and a clean way to force the object to update itself
if attributes or methods are referenced after update.
"""
if node._parent is None:
node._del_cache()
return
node._del_cache()
flush_parent_cache(node._parent)
class RoutingTree(SubElement):
"""
RoutingTree is the base class for both Routing and Antispoofing nodes.
This provides a commmon API for operations that affect how routing table
and antispoofing operate.
"""
def __init__(self, data=None, **meta):
super(RoutingTree, self).__init__(**meta)
if data is not None:
self.data = ElementCache(data)
def __iter__(self):
for node in self.data[self.typeof]:
data = ElementCache(node)
yield(self.__class__(
href=data.get_link('self'),
type=self.__class__.__name__,
data=node,
parent=self))
@property
def name(self):
"""
Interface name / ID for routing level
:return: name of routing node
:rtype: str
"""
return self.data.get('name')
@property
def nicid(self):
"""
NIC id for this interface
:return: nic identifier
:rtype: str
"""
return self.data.get('nic_id')
@property
def dynamic_nicid(self):
"""
NIC id for this dynamic interface
:return: nic identifier, if this is a DHCP interface
:rtype: str or None
"""
return self.data.get('dynamic_nicid')
@property
def ip(self):
"""
IP network / host for this route
:return: IP address of this routing level
:rtype: str
"""
return self.data.get('ip')
@property
def level(self):
"""
Routing nodes have multiple 'levels' where routes can
be nested. Most routes are placed at the interface level.
This setting can mostly be ignored, but provides an
informative view of how the route is nested.
:return: routing node level (interface,network,gateway,any)
:rtype: str
"""
return self.data.get('level')
@property
def related_element_type(self):
"""
.. versionadded:: 0.6.0
Requires SMC version >= 6.4
Related element type defines the 'type' of element at this
routing or antispoofing node level.
:rtype: str
"""
if 'related_element_type' in self.data:
return self.data.get('related_element_type')
return None if self.dynamic_nicid or (self.nicid and '.' in self.nicid) else \
Element.from_href(self.data.get('href')).typeof # pre-6.4
def as_tree(self, level=0):
"""
Display the routing tree representation in string
format
:rtype: str
"""
ret = '--' * level + repr(self) + '\n'
for routing_node in self:
ret += routing_node.as_tree(level+1)
return ret
def get(self, interface_id):
"""
Obtain routing configuration for a specific interface by
ID.
.. note::
If interface is a VLAN, you must use a str to specify the
interface id, such as '3.13' (interface 3, VLAN 13)
:param str,int interface_id: interface identifier
:raises InterfaceNotFound: invalid interface for engine
:return: Routing element, or None if not found
:rtype: Routing
"""
for interface in self:
if interface.nicid == str(interface_id) or \
interface.dynamic_nicid == str(interface_id):
return interface
raise InterfaceNotFound('Specified interface {} does not exist on '
'this engine.'.format(interface_id))
def delete(self):
super(RoutingTree, self).delete()
flush_parent_cache(self._parent)
def update(self):
super(RoutingTree, self).update()
flush_parent_cache(self._parent)
def all(self):
"""
Return all routes for this engine.
:return: current route entries as :class:`.Routing` element
:rtype: list
"""
return [node for node in self]
def __str__(self):
return '{}(name={},level={},type={})'.format(
self.__class__.__name__, self.name, self.level, self.related_element_type)
def __repr__(self):
return str(self)
class Routing(RoutingTree):
"""
Routing represents the Engine routing configuration and provides the
ability to view and add features to routing nodes such as OSPF.
"""
typeof = 'routing_node'
def __init__(self, data=None, **meta):
self._parent = meta.pop('parent', None)
super(Routing, self).__init__(data, **meta)
@property
def routing_node_element(self):
"""
A routing node element will reference the element used to represent
the node (i.e. router, host, network, netlink, bgp peering, etc).
Although the routing node already resolves the element and provides
the `ip` property to obtain the address/network, use this property
to obtain access to modifying the element itself::
>>> interface0 = engine.routing.get(0)
>>> for networks in interface0:
... for gateway in networks:
... gateway.routing_node_element
...
Router(name=router-1.1.1.1)
StaticNetlink(name=mystatic)
BGPPeering(name=anotherpeering)
BGPPeering(name=mypeering)
>>>
"""
return from_meta(self)
@property
def bgp_peerings(self):
"""
BGP Peerings applied to a routing node. This can be called from
the engine, interface or network level. Return is a tuple
of (interface, network, bgp_peering). This simplifies viewing
and removing BGP Peers from the routing table::
>>> for bgp in engine.routing.bgp_peerings:
... bgp
...
(Routing(name=Interface 0,level=interface,type=physical_interface),
Routing(name=network-1.1.1.0/24,level=network,type=network),
Routing(name=mypeering,level=gateway,type=bgp_peering))
(Routing(name=Interface 1,level=interface,type=physical_interface),
Routing(name=network-2.2.2.0/24,level=network,type=network),
Routing(name=mypeering,level=gateway,type=bgp_peering))
.. seealso:: :meth:`~netlinks` and :meth:`~ospf_areas` for obtaining
other routing element types
:rtype: tuple(Routing)
"""
return gateway_by_type(self, 'bgp_peering')
@property
def netlinks(self):
"""
Netlinks applied to a routing node. This can be called
from the engine, interface or network level. Return is a
tuple of (interface, network, netlink). This simplifies
viewing and removing Netlinks from the routing table::
>>> interface = engine.routing.get(1)
>>> for static_netlink in interface.netlinks:
... interface, network, netlink = static_netlink
... netlink
... netlink.delete()
...
Routing(name=mylink,level=gateway,type=netlink)
.. seealso:: :meth:`~bgp_peerings` and :meth:`~ospf_areas` for obtaining
other routing element types
:rtype: tuple(Routing)
"""
return gateway_by_type(self, 'netlink')
@property
def ospf_areas(self):
"""
OSPFv2 areas applied to a routing node. This can be called from
the engine, interface or network level. Return is a tuple
of (interface, network, bgp_peering). This simplifies viewing
and removing BGP Peers from the routing table::
>>> for ospf in engine.routing.ospf_areas:
... ospf
...
(Routing(name=Interface 0,level=interface,type=physical_interface),
Routing(name=network-1.1.1.0/24,level=network,type=network),
Routing(name=area10,level=gateway,type=ospfv2_area))
.. seealso:: :meth:`~bgp_peerings` and :meth:`~netlinks` for obtaining
other routing element types
:rtype: tuple(Routing)
"""
return gateway_by_type(self, 'ospfv2_area')
def add_traffic_handler(self, netlink, netlink_gw=None, network=None):
"""
Add a traffic handler to a routing node. A traffic handler can be
either a static netlink or a multilink traffic handler. If ``network``
is not specified and the interface has multiple IP addresses, the
traffic handler will be added to all ipv4 addresses.
Add a pre-defined netlink to the route table of interface 0::
engine = Engine('vm')
rnode = engine.routing.get(0)
rnode.add_traffic_handler(StaticNetlink('mynetlink'))
Add a pre-defined netlink only to a specific network on an interface
with multiple addresses. Specify a netlink_gw for the netlink::
rnode = engine.routing.get(0)
rnode.add_traffic_handler(
StaticNetlink('mynetlink'),
netlink_gw=[Router('myrtr'), Host('myhost')],
network='172.18.1.0/24')
:param StaticNetlink,Multilink netlink: netlink element
:param list(Element) netlink_gw: list of elements that should be destinations
for this netlink. Typically these may be of type host, router, group, server,
network or engine.
:param str network: if network specified, only add OSPF to this network on interface
:raises UpdateElementFailed: failure updating routing
:raises ModificationAborted: Change must be made at the interface level
:raises ElementNotFound: ospf area not found
:return: Status of whether the route table was updated
:rtype: bool
"""
routing_node_gateway = RoutingNodeGateway(netlink,
destinations=[] if not netlink_gw else netlink_gw)
return self._add_gateway_node('netlink', routing_node_gateway, network)
def add_ospf_area(self, ospf_area, ospf_interface_setting=None, network=None,
communication_mode='NOT_FORCED', unicast_ref=None):
"""
Add OSPF Area to this routing node.
Communication mode specifies how the interface will interact with the
adjacent OSPF environment. Please see SMC API documentation for more
in depth information on each option.
If the interface has multiple networks nested below, all networks
will receive the OSPF area by default unless the ``network`` parameter
is specified. OSPF cannot be applied to IPv6 networks.
Example of adding an area to interface routing node::
area = OSPFArea('area0') #obtain area resource
#Set on routing interface 0
interface = engine.routing.get(0)
interface.add_ospf_area(area)
.. note:: If UNICAST is specified, you must also provide a unicast_ref
of element type Host to identify the remote host. If no
unicast_ref is provided, this is skipped
:param OSPFArea ospf_area: OSPF area instance or href
:param OSPFInterfaceSetting ospf_interface_setting: used to override the
OSPF settings for this interface (optional)
:param str network: if network specified, only add OSPF to this network
on interface
:param str communication_mode: NOT_FORCED|POINT_TO_POINT|PASSIVE|UNICAST
:param Element unicast_ref: Element used as unicast gw (required for UNICAST)
:raises ModificationAborted: Change must be made at the interface level
:raises UpdateElementFailed: failure updating routing
:raises ElementNotFound: ospf area not found
:return: Status of whether the route table was updated
:rtype: bool
"""
communication_mode = communication_mode.upper()
destinations=[] if not ospf_interface_setting else [ospf_interface_setting]
if communication_mode == 'UNICAST' and unicast_ref:
destinations.append(unicast_ref)
routing_node_gateway = RoutingNodeGateway(
ospf_area, communication_mode=communication_mode,
destinations=destinations)
return self._add_gateway_node('ospfv2_area', routing_node_gateway, network)
def add_bgp_peering(self, bgp_peering, external_bgp_peer=None,
network=None):
"""
Add a BGP configuration to this routing interface.
If the interface has multiple ip addresses, all networks will receive
the BGP peering by default unless the ``network`` parameter is
specified.
Example of adding BGP to an interface by ID::
interface = engine.routing.get(0)
interface.add_bgp_peering(
BGPPeering('mypeer'),
ExternalBGPPeer('neighbor'))
:param BGPPeering bgp_peering: BGP Peer element
:param ExternalBGPPeer,Engine external_bgp_peer: peer element or href
:param str network: if network specified, only add OSPF to this network
on interface
:raises ModificationAborted: Change must be made at the interface level
:raises UpdateElementFailed: failed to add BGP
:return: Status of whether the route table was updated
:rtype: bool
"""
destination = [external_bgp_peer] if external_bgp_peer else []
routing_node_gateway = RoutingNodeGateway(bgp_peering,
destinations=destination)
return self._add_gateway_node('bgp_peering', routing_node_gateway, network)
def add_static_route(self, gateway, destination, network=None):
"""
Add a static route to this route table. Destination can be any element
type supported in the routing table such as a Group of network members.
Since a static route gateway needs to be on the same network as the
interface, provide a value for `network` if an interface has multiple
addresses on different networks.
::
>>> engine = Engine('ve-1')
>>> itf = engine.routing.get(0)
>>> itf.add_static_route(
gateway=Router('tmprouter'),
destination=[Group('routegroup')])
:param Element gateway: gateway for this route (Router, Host)
:param Element destination: destination network/s for this route.
:type destination: list(Host, Router, ..)
:raises ModificationAborted: Change must be made at the interface level
:raises UpdateElementFailed: failure to update routing table
:return: Status of whether the route table was updated
:rtype: bool
"""
routing_node_gateway = RoutingNodeGateway(gateway,
destinations=destination)
return self._add_gateway_node('router', routing_node_gateway, network)
def add_dynamic_gateway(self, networks):
"""
A dynamic gateway object creates a router object that is
attached to a DHCP interface. You can associate networks with
this gateway address to identify networks for routing on this
interface.
::
route = engine.routing.get(0)
route.add_dynamic_gateway([Network('mynetwork')])
:param list Network: list of network elements to add to
this DHCP gateway
:raises ModificationAborted: Change must be made at the interface level
:raises UpdateElementFailed: failure to update routing table
:return: Status of whether the route table was updated
:rtype: bool
"""
routing_node_gateway = RoutingNodeGateway(dynamic_classid='gateway',
destinations=networks or [])
return self._add_gateway_node('dynamic_netlink', routing_node_gateway)
def _add_gateway_node_on_tunnel(self, routing_node_gateway):
"""
Add a gateway node on a tunnel interface. Tunnel interface elements
are attached to the interface level and not directly nested under
the networks node.
:param RouteNodeGateway routing_node_gateway: routing node gateway instance
:return: Whether a change was made or not
:rtype: bool
"""
modified = False
peering = [next_hop for next_hop in self
if next_hop.routing_node_element == routing_node_gateway.routing_node_element]
if not peering:
self.data.setdefault('routing_node', []).append(
routing_node_gateway)
modified = True
# Have peering
else:
peers = [node.routing_node_element for peer in peering
for node in peer]
for destination in routing_node_gateway.destinations:
if destination not in peers:
peering[0].data.setdefault('routing_node', []).append(
{'level': 'any', 'href': destination.href,
'name': destination.name})
modified = True
if modified:
self.update()
return modified
def _add_gateway_node(self, gw_type, routing_node_gateway, network=None):
"""
Add a gateway node to existing routing tree. Gateways are only added if
they do not already exist. If they do exist, check the destinations of
the existing gateway and add destinations that are not already there.
A current limitation is that if a gateway doesn't exist and the
destinations specified do not have IP addresses that are valid, they
are still added (i.e. IPv4 gateway with IPv6 destination is considered
invalid).
:param Routing self: the routing node, should be the interface routing node
:param str gw_type: type of gateway, i.e. netlink, ospfv2_area, etc
:param RoutingNodeGateway route_node_gateway: gateway element
:param str network: network to bind to. If none, all networks
:return: Whether a change was made or not
:rtype: bool
"""
if self.level != 'interface':
raise ModificationAborted('You must make this change from the '
'interface routing level. Current node: {}'.format(self))
if self.related_element_type == 'tunnel_interface':
return self._add_gateway_node_on_tunnel(routing_node_gateway)
# Find any existing gateways
routing_node = list(gateway_by_type(self, type=gw_type, on_network=network))
_networks = [netwk for netwk in self if netwk.ip == network] if network is \
not None else list(self)
# Routing Node Gateway to add as Element
gateway_element_type = routing_node_gateway.routing_node_element
modified = False
for network in _networks:
# Short circuit for dynamic interfaces
if getattr(network, 'dynamic_classid', None):
network.data.setdefault('routing_node', []).append(
routing_node_gateway)
modified = True
break
# Used for comparison to
this_network_node = network.routing_node_element
if routing_node and any(netwk for _intf, netwk, gw in routing_node
if netwk.routing_node_element == this_network_node and
gateway_element_type == gw.routing_node_element):
# A gateway exists on this network
for gw in network:
if gw.routing_node_element == gateway_element_type:
existing_dests = [node.routing_node_element for node in gw]
for destination in routing_node_gateway.destinations:
is_valid_destination = False
if destination not in existing_dests:
dest_ipv4, dest_ipv6 = _which_ip_protocol(destination)
if len(network.ip.split(':')) > 1: # IPv6
if dest_ipv6:
is_valid_destination = True
else:
if dest_ipv4:
is_valid_destination = True
if is_valid_destination:
gw.data.setdefault('routing_node', []).append(
{'level': 'any', 'href': destination.href,
'name': destination.name})
modified = True
else: # Gateway doesn't exist
gw_ipv4, gw_ipv6 = _which_ip_protocol(gateway_element_type) # ipv4, ipv6 or both
if len(network.ip.split(':')) > 1:
if gw_ipv6:
network.data.setdefault('routing_node', []).append(
routing_node_gateway)
modified = True
else: # IPv4
if gw_ipv4:
network.data.setdefault('routing_node', []).append(
routing_node_gateway)
modified = True
if modified:
self.update()
return modified
def remove_route_gateway(self, element, network=None):
"""
Remove a route element by href or Element. Use this if you want to
remove a netlink or a routing element such as BGP or OSPF. Removing
is done from within the routing interface context.
::
interface0 = engine.routing.get(0)
interface0.remove_route_gateway(StaticNetlink('mynetlink'))
Only from a specific network on a multi-address interface::
interface0.remove_route_gateway(
StaticNetlink('mynetlink'),
network='172.18.1.0/24')
:param str,Element element: element to remove from this routing node
:param str network: if network specified, only add OSPF to this
network on interface
:raises ModificationAborted: Change must be made at the interface level
:raises UpdateElementFailed: failure to update routing table
:return: Status of whether the entry was removed (i.e. or not found)
:rtype: bool
"""
if self.level not in ('interface',):
raise ModificationAborted('You must make this change from the '
'interface routing level. Current node: {}'.format(self))
node_changed = False
element = element_resolver(element)
for network in self:
# Tunnel Interface binds gateways to the interface
if network.level == 'gateway' and network.data.get('href') == element:
network.delete()
node_changed = True
break
for gateway in network:
if gateway.data.get('href') == element:
gateway.delete()
node_changed = True
return node_changed
class RoutingNodeGateway(Routing):
def __init__(self, element=None, level='gateway', **kwargs):
self.destinations = kwargs.pop('destinations', [])
self.data = ElementCache(kwargs)
self.data.update(
level=level,
routing_node=[])
if element:
self.data.update(
href=element.href,
name=element.name)
#related_element_type=element.typeof)
for destination in self.destinations:
self.data['routing_node'].append(
{'href': destination.href,
'name': destination.name,
'level': 'any'})
class Antispoofing(RoutingTree):
"""
Anti-spoofing is configured by default based on
interface networks directly attached. It is possible
to override these settings by adding additional
networks as valid source networks on a given
interface.
Antispoofing is nested similar to routes. Iterate the
antispoofing configuration::
for entry in engine.antispoofing.all():
print(entry)
"""
typeof = 'antispoofing_node'
def __init__(self, data=None, **meta):
self._parent = meta.pop('parent', None)
super(Antispoofing, self).__init__(data, **meta)
@property
def autogenerated(self):
"""
Was the entry auto generated by a route entry or
added manually as an override
:rtype: bool
"""
return self.data.get('auto_generated') == 'true'
@property
def validity(self):
"""
Enabled or disabled antispoofing entry
:return: validity of this entry (enable,disable,absolute)
:rtype: str
"""
return self.data.get('validity')
def add(self, element):
"""
Add an entry to this antispoofing node level.
Entry can be either href or network elements specified
in :py:class:`smc.elements.network`
::
if0 = engine.antispoofing.get(0)
if0.add(Network('foonet'))
:param Element element: entry to add, i.e. Network('mynetwork'), Host(..)
:raises CreateElementFailed: failed adding entry
:raises ElementNotFound: element entry specified not in SMC
:return: whether entry was added
:rtype: bool
"""
if self.level == 'interface':
for network in self:
if from_meta(network) == element:
return False
self.data['antispoofing_node'].append({
'antispoofing_node': [],
'auto_generated': 'false',
'href': element.href,
'level': self.level,
'validity': 'enable',
'name': element.name})
self.update()
return True
return False
def __len__(self):
return len(self.data.get('antispoofing_node', []))
def remove(self, element):
"""
Remove a specific user added element from the antispoofing tables of
a given interface. This will not remove autogenerated or system level
entries.
:param Element element: element to remove
:return: remove element if it exists and return bool
:rtype: bool
"""
if self.level == 'interface':
len_before_change = len(self)
_nodes = []
for network in self:
if from_meta(network) != element:
_nodes.append(network.data)
else:
if network.autogenerated: # Make sure it was user added
_nodes.append(network.data)
if len(_nodes) != len_before_change:
self.data['antispoofing_node'] = _nodes
self.update()
return True
return False
def from_meta(node):
"""
Helper method that reolves a routing node to element. Rather than doing
a lookup and fetch, the routing node provides the information to
build the element from meta alone.
:rtype: Element
"""
# Version SMC < 6.4
if 'related_element_type' not in node.data:
return Element.from_href(
node.data.get('href'))
# SMC Version >= 6.4 - more efficient because it builds the
# element by meta versus requiring a query
return Element.from_meta(
name=node.data.get('name'),
type=node.related_element_type,
href=node.data.get('href'))
def route_level(root, level):
"""
Helper method to recurse the current node and return
the specified routing node level.
"""
def recurse(nodes):
for node in nodes:
if node.level == level:
routing_node.append(node)
else:
recurse(node)
routing_node = []
recurse(root)
return routing_node
def gateway_by_type(self, type=None, on_network=None): # @ReservedAssignment
"""
Return gateways for the specified node. You can also
specify type to find only gateways of a specific type.
Valid types are: bgp_peering, netlink, ospfv2_area.
:param RoutingNode self: the routing node to check
:param str type: bgp_peering, netlink, ospfv2_area
:param str on_network: if network is specified, should be CIDR and
specifies a filter to only return gateways on that network when
an interface has multiple
:return: tuple of RoutingNode(interface,network,gateway)
:rtype: list
"""
gateways = route_level(self, 'gateway')
if not type:
for gw in gateways:
yield gw
else:
for node in gateways:
#TODO: Change to type == node.related_element_type when
# only supporting SMC >= 6.4
if type == node.routing_node_element.typeof:
# If the parent is level interface, this is a tunnel interface
# where the gateway is bound to interface versus network
parent = node._parent
if parent.level == 'interface':
interface = parent
network = None
else:
network = parent
interface = network._parent
if on_network is not None:
if network and network.ip == on_network:
yield (interface, network, node)
else:
yield (interface, network, node)
def _which_ip_protocol(element):
"""
Validate the protocol addresses for the element. Most elements can
have an IPv4 or IPv6 address assigned on the same element. This
allows elements to be validated and placed on the right network.
:return: boolean tuple
:rtype: tuple(ipv4, ipv6)
"""
try:
if element.typeof in ('host', 'router'):
return getattr(element, 'address', False), getattr(element, 'ipv6_address', False)
elif element.typeof == 'netlink':
gateway = element.gateway
if gateway.typeof == 'router':
return getattr(gateway, 'address', False), getattr(gateway, 'ipv6_address', False)
# It's an engine, return true
elif element.typeof == 'network':
return getattr(element, 'ipv4_network', False), getattr(element, 'ipv6_network', False)
except AttributeError:
pass
# Always return true so that the calling function assumes the element
# is valid for the routing node. This could fail when submitting but
# we don't want to prevent adding elements yet since this could change
return True, True
def del_invalid_routes(engine, nicids):
"""
Helper method to run through and delete any routes that are tagged
as invalid or to_delete by a list of nicids. Since we could have a
list of routes, iterate from top level engine routing node to avoid
fetch exceptions. Route list should be a list of nicids as str.
:param list nicids: list of nicids
:raises DeleteElementFailed: delete element failed with reason
"""
nicids = map(str, nicids)
for interface in engine.routing:
if interface.nicid in nicids:
if getattr(interface, 'to_delete', False): # Delete the invalid interface
interface.delete()
continue
for network in interface:
if getattr(network, 'invalid', False) or \
getattr(network, 'to_delete', False):
network.delete()
route = collections.namedtuple('Route',
'route_network route_netmask route_gateway route_type dst_if src_if')
route.__new__.__defaults__ = (None,) * len(route._fields)
class Route(SerializedIterable):
"""
Active routes obtained from a running engine.
Obtain routes from an engine reference::
>>> engine = Engine('sg_vm')
>>> for route in engine.routing_monitoring:
... route
:ivar str route_network: network for this route
:ivar int route_netmask: netmask for the route
:ivar str route_gateway: route gateway, may be None if it's a local network only
:ivar str route_type: status of the route
:ivar int dst_if: destination interface index
:ivar int src_if: source interface index
"""
def __init__(self, data):
routes = data.get('routing_monitoring_entry', [])
data = [{k: v for k, v in d.items()
if k != 'cluster_ref'} for d in routes]
super(Route, self).__init__(data, route)
policy_route = collections.namedtuple('PolicyRoute',
'source destination gateway_ip comment')
policy_route.__new__.__defaults__ = (None,) * len(policy_route._fields)
class PolicyRoute(SerializedIterable):
"""
An iterable providing an interface to policy based routing on the
engine.
You must call engine.udpate() after performing an add or delete::
>>> engine = Engine('myfw')
>>> engine.policy_route
PolicyRoute(items: 1)
>>> for rt in engine.policy_route:
... rt
...
PolicyRoute(source=u'172.18.1.0/24', destination=u'172.18.1.0/24', gateway_ip=u'172.18.1.1', comment=None)
>>> engine.policy_route.create(source='172.18.2.0/24', destination='192.168.3.0/24', gateway_ip='172.18.2.1')
>>> engine.update()
'http://172.18.1.151:8082/6.4/elements/single_fw/746'
>>> for rt in engine.policy_route:
... rt
...
PolicyRoute(source=u'172.18.1.0/24', destination=u'172.18.1.0/24', gateway_ip=u'172.18.1.1', comment=None)
PolicyRoute(source=u'172.18.2.0/24', destination=u'192.168.3.0/24', gateway_ip=u'172.18.2.1', comment=None)
>>> engine.policy_route.delete(source='172.18.2.0/24')
>>> engine.update()
'http://172.18.1.151:8082/6.4/elements/single_fw/746'
>>> for rt in engine.policy_route:
... rt
...
PolicyRoute(source=u'172.18.1.0/24', destination=u'172.18.1.0/24', gateway_ip=u'172.18.1.1', comment=None)
:ivar str source: source network/cidr for the route
:ivar str destination: destination network/cidr for the route
:ivar str gateway_ip: gateway IP address, must be on source network
:ivar str comment: optional comment
"""
def __init__(self, engine):
data = engine.data.get('policy_route')
super(PolicyRoute, self).__init__(data, policy_route)
def create(self, source, destination, gateway_ip, comment=None):
"""
Add a new policy route to the engine.
:param str source: network address with /cidr
:param str destination: network address with /cidr
:param str gateway: IP address, must be on source network
:param str comment: optional comment
"""
self.items.append(dict(
source=source, destination=destination,
gateway_ip=gateway_ip, comment=comment))
def delete(self, **kw):
"""
Delete a policy route from the engine. You can delete using a
single field or multiple fields for a more exact match.
Use a keyword argument to delete a route by any valid attribute.
:param kw: use valid Route keyword values to delete by exact match
"""
delete_by = []
for field, val in kw.items():
if val is not None:
delete_by.append(field)
self.items[:] = [route for route in self.items
if not all(route.get(field) == kw.get(field)
for field in delete_by)]
| 39.129577 | 117 | 0.610635 | 4,988 | 41,673 | 4.995188 | 0.112069 | 0.033111 | 0.017338 | 0.003652 | 0.307995 | 0.27087 | 0.21123 | 0.186226 | 0.173463 | 0.16383 | 0 | 0.015255 | 0.306289 | 41,673 | 1,064 | 118 | 39.166353 | 0.846622 | 0.527464 | 0 | 0.257062 | 0 | 0 | 0.071171 | 0.003174 | 0 | 0 | 0 | 0.00094 | 0 | 1 | 0.129944 | false | 0.002825 | 0.014124 | 0.008475 | 0.282486 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f322d97c5bfce91348ef67bf61cb2f314102bbed | 597 | py | Python | python/widgets/SpinBox.py | hillnet/Supersonic | 3f3c94eff1d82b85614850d567777c2d6a32bc0e | [
"BSD-2-Clause"
] | null | null | null | python/widgets/SpinBox.py | hillnet/Supersonic | 3f3c94eff1d82b85614850d567777c2d6a32bc0e | [
"BSD-2-Clause"
] | null | null | null | python/widgets/SpinBox.py | hillnet/Supersonic | 3f3c94eff1d82b85614850d567777c2d6a32bc0e | [
"BSD-2-Clause"
] | null | null | null | from PyQt5.QtWidgets import QSpinBox
from PyQt5.QtGui import QFont
from python.Constants import *
class SpinBox(QSpinBox):
def __init__(self, frame, name, q_rect, min_val, max_val, default, step):
super().__init__(frame)
self.setGeometry(q_rect)
font = QFont()
font.setFamily(FONT)
font.setPointSize(FONT_SIZE)
self.setFont(font)
self.setMinimum(min_val)
self.setMaximum(max_val)
self.setProperty("value", default)
self.setSingleStep(step)
self.setDisplayIntegerBase(10)
self.setObjectName(name) | 31.421053 | 77 | 0.670017 | 70 | 597 | 5.5 | 0.542857 | 0.046753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008734 | 0.232831 | 597 | 19 | 78 | 31.421053 | 0.831878 | 0 | 0 | 0 | 0 | 0 | 0.008361 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.176471 | 0 | 0.294118 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3244f63e20dadefb7956b5f0df805a79eea4ef8 | 844 | py | Python | panopuppet/pano/views/dashboard.py | propyless/panopuppet | 6beea45ad25ea1e2ed7dbd5b60210880cd8aab2a | [
"Apache-2.0"
] | 60 | 2015-03-26T14:24:47.000Z | 2016-08-09T17:48:00.000Z | panopuppet/pano/views/dashboard.py | propyless/panopuppet | 6beea45ad25ea1e2ed7dbd5b60210880cd8aab2a | [
"Apache-2.0"
] | 108 | 2015-04-17T12:05:46.000Z | 2016-08-23T14:42:19.000Z | panopuppet/pano/views/dashboard.py | propyless/panopuppet | 6beea45ad25ea1e2ed7dbd5b60210880cd8aab2a | [
"Apache-2.0"
] | 27 | 2015-03-30T13:23:03.000Z | 2016-10-25T20:18:27.000Z | import pytz
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect, render
from django.views.decorators.cache import cache_page
from panopuppet.pano.puppetdb.puppetdb import set_server
from panopuppet.pano.settings import AVAILABLE_SOURCES, CACHE_TIME
__author__ = 'etaklar'
@login_required
@cache_page(CACHE_TIME)
def dashboard(request):
context = {'timezones': pytz.common_timezones,
'SOURCES': AVAILABLE_SOURCES}
if request.method == 'GET':
if 'source' in request.GET:
source = request.GET.get('source')
set_server(request, source)
if request.method == 'POST':
request.session['django_timezone'] = request.POST['timezone']
return redirect(request.POST['url'])
return render(request, 'pano/dashboard.html', context)
| 31.259259 | 69 | 0.721564 | 101 | 844 | 5.871287 | 0.425743 | 0.05059 | 0.060708 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175355 | 844 | 26 | 70 | 32.461538 | 0.852011 | 0 | 0 | 0 | 0 | 0 | 0.103081 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.3 | 0 | 0.45 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3248e0ac72e5f8956cce10805a1c4852f01aadf | 3,649 | py | Python | _fred-v1/model/test_net.py | elviva404/frontend-regression-validator | 21df2a127712bdf0688dc9aedf478c6a2a90a3c3 | [
"ECL-2.0",
"Apache-2.0"
] | 70 | 2019-09-16T13:30:49.000Z | 2022-02-25T17:46:23.000Z | _fred-v1/model/test_net.py | elviva404/frontend-regression-validator | 21df2a127712bdf0688dc9aedf478c6a2a90a3c3 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-01-13T09:15:47.000Z | 2020-07-29T11:49:25.000Z | _fred-v1/model/test_net.py | elviva404/frontend-regression-validator | 21df2a127712bdf0688dc9aedf478c6a2a90a3c3 | [
"ECL-2.0",
"Apache-2.0"
] | 10 | 2019-10-06T08:22:05.000Z | 2022-02-03T18:45:08.000Z | from models.nnet import NNet
import torch
from PIL import Image
import numpy as np
import os
import argparse
from config.config import VALID_MODELS
CHANNELS = sorted(['images', 'section', 'buttons', 'forms', 'textblock'])
CHANNELS_DICT = dict(zip(CHANNELS, range(len(CHANNELS))))
def prepare_for_input(pilim, flip_lr=False, flip_ud=False):
input_array = np.asarray(pilim) / 255
if flip_lr:
input_array = np.fliplr(input_array)
if flip_ud:
input_array = np.flipud(input_array)
return input_array
def get_tensor(input_array):
tensor = torch.tensor(input_array.copy()).permute(2, 0, 1).unsqueeze(0).float()
return tensor
def get_output(output):
output = output[0].permute(1, 2, 0)
out_image_array = output.detach().numpy()
return out_image_array
def test_net(model_name, model_file, trained_with_residuals, trained_with_out_layer, image_file, channel):
assert model_name in VALID_MODELS, 'Please choose a valid model: {}'.format(', '.join(VALID_MODELS))
assert os.path.exists(model_file), 'No such file {}'.format(model_file)
assert os.path.exists(image_file), 'No such file {}'.format(image_file)
channel = int(channel)
assert channel in list(range(len(CHANNELS))), 'Please choose a valid channel: {}'.format(CHANNELS_DICT)
model = NNet(out_channels=5, use_residuals=trained_with_residuals, model_name=model_name, out_layer=trained_with_out_layer)
model.load_state_dict(torch.load(model_file, map_location='cpu'))
model.eval()
pilim = Image.open(image_file).convert('L').convert('RGB')
pilim.thumbnail((512, pilim.size[1]), Image.ANTIALIAS)
new_h = pilim.size[1] - pilim.size[1] % 32
pilim = pilim.resize((512, new_h), Image.ANTIALIAS)
pilim.show()
correct_input_array = prepare_for_input(pilim)
lr_flipped_input_array = prepare_for_input(pilim, flip_lr=True)
if trained_with_out_layer:
_ , output = model(get_tensor(correct_input_array))
correct_out_image_array = get_output(output)
_ , output = model(get_tensor(lr_flipped_input_array))
lr_out_image_array = np.fliplr(get_output(output))
else:
correct_out_image_array = get_output(model(get_tensor(correct_input_array)))
lr_out_image_array = np.fliplr(get_output(model(get_tensor(lr_flipped_input_array))))
out_image_array = (correct_out_image_array + lr_out_image_array) / 2
out_image_array[out_image_array > 0.5] = 1
out_image_array[out_image_array <= 0.5] = 0
out_image_array *= 255
out_image_array = np.array(out_image_array, dtype='uint8')
out_pilim = Image.fromarray(out_image_array[:, :, channel])
out_pilim.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--model-name', help='Name of the model from {}'.format(', '.join(VALID_MODELS)))
parser.add_argument('--model-file', help='.pth file containing the state dict of the model')
parser.add_argument('--image-file', help='Image file to test on')
parser.add_argument('--trained-with-residuals', help='True if the model was trained with residuals')
parser.add_argument('--channel', help='What channel to show: {}'.format(CHANNELS_DICT))
parser.add_argument('--trained-with-out-layer', help='Trained with extra out layer')
args = parser.parse_args()
trained_with_residuals = True if args.trained_with_residuals == 'y' else False
trained_with_out_layer = True if args.trained_with_out_layer == 'y' else False
test_net(args.model_name, args.model_file, trained_with_residuals,trained_with_out_layer ,args.image_file, channel=args.channel,)
| 39.663043 | 133 | 0.727597 | 537 | 3,649 | 4.640596 | 0.227188 | 0.054575 | 0.088684 | 0.053371 | 0.274478 | 0.20305 | 0.145265 | 0.119583 | 0.072231 | 0.033708 | 0 | 0.010975 | 0.151 | 3,649 | 91 | 134 | 40.098901 | 0.793415 | 0 | 0 | 0 | 0 | 0 | 0.119759 | 0.013154 | 0 | 0 | 0 | 0 | 0.060606 | 1 | 0.060606 | false | 0 | 0.106061 | 0 | 0.212121 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f324d60461124541b7b9f7a1dc8266ea50c5560a | 5,503 | py | Python | Lib/site-packages/qutepart/brackethlighter.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | Lib/site-packages/qutepart/brackethlighter.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | Lib/site-packages/qutepart/brackethlighter.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | """Bracket highlighter.
Calculates list of QTextEdit.ExtraSelection
"""
import time
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QTextCursor
from PyQt5.QtWidgets import QTextEdit
class _TimeoutException(UserWarning):
"""Operation timeout happened
"""
pass
class BracketHighlighter:
"""Bracket highliter.
Calculates list of QTextEdit.ExtraSelection
Currently, this class might be just a set of functions.
Probably, it will contain instance specific selection colors later
"""
_MAX_SEARCH_TIME_SEC = 0.02
_START_BRACKETS = '({['
_END_BRACKETS = ')}]'
_ALL_BRACKETS = _START_BRACKETS + _END_BRACKETS
_OPOSITE_BRACKET = dict( (bracket, oposite)
for (bracket, oposite) in zip(_START_BRACKETS + _END_BRACKETS, _END_BRACKETS + _START_BRACKETS))
currentMatchedBrackets = None # instance variable. None or ((block, columnIndex), (block, columnIndex))
def _iterateDocumentCharsForward(self, block, startColumnIndex):
"""Traverse document forward. Yield (block, columnIndex, char)
Raise _TimeoutException if time is over
"""
# Chars in the start line
endTime = time.time() + self._MAX_SEARCH_TIME_SEC
for columnIndex, char in list(enumerate(block.text()))[startColumnIndex:]:
yield block, columnIndex, char
block = block.next()
# Next lines
while block.isValid():
for columnIndex, char in enumerate(block.text()):
yield block, columnIndex, char
if time.time() > endTime:
raise _TimeoutException('Time is over')
block = block.next()
def _iterateDocumentCharsBackward(self, block, startColumnIndex):
"""Traverse document forward. Yield (block, columnIndex, char)
Raise _TimeoutException if time is over
"""
# Chars in the start line
endTime = time.time() + self._MAX_SEARCH_TIME_SEC
for columnIndex, char in reversed(list(enumerate(block.text()[:startColumnIndex]))):
yield block, columnIndex, char
block = block.previous()
# Next lines
while block.isValid():
for columnIndex, char in reversed(list(enumerate(block.text()))):
yield block, columnIndex, char
if time.time() > endTime:
raise _TimeoutException('Time is over')
block = block.previous()
def _findMatchingBracket(self, bracket, qpart, block, columnIndex):
"""Find matching bracket for the bracket.
Return (block, columnIndex) or (None, None)
Raise _TimeoutException, if time is over
"""
if bracket in self._START_BRACKETS:
charsGenerator = self._iterateDocumentCharsForward(block, columnIndex + 1)
else:
charsGenerator = self._iterateDocumentCharsBackward(block, columnIndex)
depth = 1
oposite = self._OPOSITE_BRACKET[bracket]
for block, columnIndex, char in charsGenerator:
if qpart.isCode(block, columnIndex):
if char == oposite:
depth -= 1
if depth == 0:
return block, columnIndex
elif char == bracket:
depth += 1
else:
return None, None
def _makeMatchSelection(self, block, columnIndex, matched):
"""Make matched or unmatched QTextEdit.ExtraSelection
"""
selection = QTextEdit.ExtraSelection()
if matched:
bgColor = Qt.green
else:
bgColor = Qt.red
selection.format.setBackground(bgColor)
selection.cursor = QTextCursor(block)
selection.cursor.setPosition(block.position() + columnIndex)
selection.cursor.movePosition(QTextCursor.Right, QTextCursor.KeepAnchor)
return selection
def _highlightBracket(self, bracket, qpart, block, columnIndex):
"""Highlight bracket and matching bracket
Return tuple of QTextEdit.ExtraSelection's
"""
try:
matchedBlock, matchedColumnIndex = self._findMatchingBracket(bracket, qpart, block, columnIndex)
except _TimeoutException: # not found, time is over
return[] # highlight nothing
if matchedBlock is not None:
self.currentMatchedBrackets = ((block, columnIndex), (matchedBlock, matchedColumnIndex))
return [self._makeMatchSelection(block, columnIndex, True),
self._makeMatchSelection(matchedBlock, matchedColumnIndex, True)]
else:
self.currentMatchedBrackets = None
return [self._makeMatchSelection(block, columnIndex, False)]
def extraSelections(self, qpart, block, columnIndex):
"""List of QTextEdit.ExtraSelection's, which highlighte brackets
"""
blockText = block.text()
if columnIndex < len(blockText) and \
blockText[columnIndex] in self._ALL_BRACKETS and \
qpart.isCode(block, columnIndex):
return self._highlightBracket(blockText[columnIndex], qpart, block, columnIndex)
elif columnIndex > 0 and \
blockText[columnIndex - 1] in self._ALL_BRACKETS and \
qpart.isCode(block, columnIndex - 1):
return self._highlightBracket(blockText[columnIndex - 1], qpart, block, columnIndex - 1)
else:
self.currentMatchedBrackets = None
return []
| 37.182432 | 116 | 0.636198 | 526 | 5,503 | 6.541825 | 0.252852 | 0.120895 | 0.040686 | 0.043592 | 0.390294 | 0.275211 | 0.26562 | 0.26562 | 0.26562 | 0.205754 | 0 | 0.00404 | 0.280393 | 5,503 | 147 | 117 | 37.435374 | 0.864899 | 0.182628 | 0 | 0.280899 | 0 | 0 | 0.006893 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.067416 | false | 0.011236 | 0.044944 | 0 | 0.292135 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f3252f602ce8accd065b078684489e66b7308d98 | 7,943 | py | Python | sdrf_pipelines/sdrf_merge/add_datanalysis_param.py | ypriverol/sdrf-openms | 65aebe6ed33b7574911a84f627e04a36890ae60c | [
"Apache-2.0"
] | 1 | 2020-04-05T16:49:51.000Z | 2020-04-05T16:49:51.000Z | sdrf_pipelines/sdrf_merge/add_datanalysis_param.py | ypriverol/sdrf-openms | 65aebe6ed33b7574911a84f627e04a36890ae60c | [
"Apache-2.0"
] | null | null | null | sdrf_pipelines/sdrf_merge/add_datanalysis_param.py | ypriverol/sdrf-openms | 65aebe6ed33b7574911a84f627e04a36890ae60c | [
"Apache-2.0"
] | 1 | 2020-04-02T10:42:06.000Z | 2020-04-02T10:42:06.000Z | import pandas as pd
import re
import yaml
import os.path
from sdrf_pipelines.zooma.zooma import OlsClient
from sdrf_pipelines.openms.unimod import UnimodDatabase
from sdrf_pipelines.sdrf.sdrf import SdrfDataFrame
# Accessing ontologies and CVs
unimod = UnimodDatabase()
olsclient = OlsClient()
# print(ols_out)
field_types = {"boolean": bool, "str": str, "integer": int, "float": (float, int)}
# Function for consistency checks
def verify_content(pname, pvalue, ptype):
# for each type: check consistency
# print(type(pvalue))
if ptype in field_types.keys():
if not isinstance(pvalue, field_types[ptype]):
exit("ERROR: " + pname + " needs to be " + ptype + "!!")
# if ptype == "boolean":
# if not isinstance(pvalue, bool):
# exit("ERROR: " + pname + " needs to be either \"true\" or \"false\"!!")
# elif ptype == "str":
# if not isinstance(pvalue, str):
# exit("ERROR: " + pname + " needs to be a string!!")
# elif ptype == "integer":
# if not isinstance(pvalue, int):
# exit("ERROR: " + pname + " needs to be a string!!")
# elif ptype == "float":
# if not isinstance(pvalue, (float, int)):
# exit("ERROR: " + pname + " needs to be a numeric value!!")
elif ptype == "class":
not_matching = [x for x in pvalue.split(",") if x not in p["value"]]
if not_matching != []:
exit("ERROR: " + pname + " needs to have one of these values: " + ' '.join(p["value"]) + "!!\n" +
' '.join(not_matching) + " did not match")
# Mass tolerances: do they include Da or ppm exclusively?
if pname == "fragment_mass_tolerance" or pname == "precursor_mass_tolerance":
unit = pvalue.split(" ")[1]
if unit != "Da" and unit != "ppm":
exit("ERROR: " + pname + " allows only units of \"Da\" and \"ppm\", separated by space from the \
value!!\nWe found " + unit)
# ENZYME AND MODIFICATIONS: LOOK UP ONTOLOGY VALUES
elif pname == "enzyme":
ols_out = olsclient.search(pvalue, ontology="MS", exact=True)
if ols_out is None:
exit("ERROR: enzyme " + pvalue + " not found in the MS ontology, see \
https://bioportal.bioontology.org/ontologies/MS/?p=classes&conceptid=http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FMS_1001045 \
for available terms")
pvalue = "NT=" + pvalue + ";AC=" + ols_out[0]["short_form"]
return pvalue
def new_or_default(params_in, pname, p):
if(pname in list(params_in.keys())):
print("Found in parameter file")
pvalue = params_in[pname]
else:
print("Setting to default: " + p["default"])
pvalue = p["default"]
return(pvalue)
# Function to load modifications
def add_ptms(mods, pname, mod_columns):
for m in mods:
tmod = m.split(" of ")
if len(tmod) < 2:
exit("ERROR: Something wrong with the modification entry " + m + ". It should be PSI_MS_NAME of RESIDUE. \
Note that it should be single residues")
modname = tmod[0]
modpos = tmod[1]
found = [x for x in unimod.modifications if modname == x.get_name()]
if found == []:
exit("ERROR: " + m + " not found in Unimod. Check the \"PSI-MS Names\" in unimod.org. Also check whether you \
used space between the comma separated modifications")
modtype = pname.replace("_mods", "")
if re.match("[A-Z]", modpos):
mod_columns[len(mod_columns.columns)+1] = "NT=" + modname + ";AC=" + found[0].get_accession() + ";MT=" +\
modtype + ";TA=" + modpos
elif modpos in ["Protein N-term", "Protein C-term", "Any N-term", "Any C-term"]:
mod_columns[len(mod_columns.columns)+1] = "NT=" + modname + ";AC=" + found[0].get_accession() + ";MT=" +\
modtype + ";PP=" + modpos
else:
exit("ERROR: Wrong residue given: " + modpos + ". Should be either one upper case letter or any of \"Protein N-term\", \
\"Protein C-term\", \"Any N-term\", \"Any C-term\"")
return mod_columns
# modifications have the same column name, not working with pandas
# therefore separated
mod_columns = pd.DataFrame()
# For summary at the end
overwritten = set()
with open(r'param2sdrf.yml') as file:
param_mapping = yaml.safe_load(file)
mapping = param_mapping["parameters"]
# READ PARAMETERS FOR RUNNING WORKFLOW
with open(r'params.yml') as file:
tparams_in = yaml.safe_load(file)
params_in = tparams_in["params"]
rawfiles = tparams_in["rawfiles"]
fastafile = tparams_in["fastafile"]
# WE NEED AN SDRF FILE FOR THE EXPERIMENTAL DESIGN, CONTAINING FILE LOCATIONS
sdrf_content = pd.DataFrame()
has_sdrf = os.path.isfile("./sdrf.tsv")
if has_sdrf:
sdrf_content = pd.read_csv("sdrf.tsv", sep="\t")
mod_columns = sdrf_content.filter(like="comment[modification parameters]")
sdrf_content = sdrf_content.drop(columns=mod_columns.columns)
sdrf_content["comment[modification parameters]"] = None
# delete columns with fixed/variable modification info
if "fixed_mods" in params_in.keys():
ttt = [x for x in mod_columns.columns if any(mod_columns[x].str.contains("MT=fixed"))]
mod_columns.drop(ttt, axis=1, inplace=True)
overwritten.add("fixed_mods")
if "variable_mods" in params_in.keys():
ttt = [x for x in mod_columns.columns if any(mod_columns[x].str.contains("MT=variable"))]
mod_columns.drop(ttt, axis=1, inplace=True)
overwritten.add("variable_mods")
else:
# THROW ERROR FOR MISSING SDRF
exit("ERROR: No SDRF file given. Add an at least minimal version\nFor more details, \
see https://github.com/bigbio/proteomics-metadata-standard/tree/master/sdrf-proteomics")
# FIRST STANDARD PARAMETERS
# FOR GIVEN PARAMETERS
# CHECK WHETHER COLUMN IN SDRF TO PUT WARNING AND OVERWRITE
# IF NOT GIVEN, WRITE COLUMN
for p in mapping:
pname = p["name"]
ptype = p["type"]
print("---- Parameter: " + pname + ": ----")
pvalue = new_or_default(params_in, pname, p)
psdrf = "comment[" + p["sdrf"] + "]"
if psdrf in sdrf_content.keys():
if (len(set(sdrf_content[psdrf])) > 1):
exit("ERROR: multiple values for parameter " + pname + " in sdrf file\n We recommend separating \
the file into parts with the same data analysis parameters")
pvalue = verify_content(pname, pvalue, ptype)
# Modifications: look up in Unimod
if pname in ["fixed_mods", "variable_mods"] and pname in overwritten:
mods = pvalue.split(",")
print("WARNING: Overwriting " + pname + " values in sdrf file with " + pvalue)
mod_columns = add_ptms(mods, pname, mod_columns)
# Now finally writing the value
elif pname not in ["fixed_mods", "variable_mods"]:
print("WARNING: Overwriting " + pname + " values in sdrf file with " + pvalue)
overwritten.add(pname)
sdrf_content[psdrf] = pvalue
else:
sdrf_content[psdrf] = pvalue
# OVERWRITE RAW FILES IF GIVEN TO DIRECT TO THE CORRECT LOCATION?
# ADD FASTA FILE TO SDRF (COMMENT:FASTA DATABASE FILE)?
# WRITE EXPERIMENTAL DESIGN IF NO SDRF?
# adding modification columns
colnames = list(sdrf_content.columns) + ["comment[modification parameters]"] * len(mod_columns.columns)
sdrf_content = pd.concat([sdrf_content, mod_columns], axis=1)
sdrf_content.columns = colnames
sdrf_content.dropna(how='all', axis=1, inplace=True)
print("--- Writing sdrf file into sdrf_local.tsv ---")
# sdrf_content.to_csv("sdrf_local.tsv", sep="\t", header=colnames, index=False)
sdrf_content.to_csv("sdrf_local.tsv", sep="\t")
# Verify with sdrf-parser
check_sdrf = SdrfDataFrame()
check_sdrf.parse("sdrf_local.tsv")
check_sdrf.validate("mass_spectrometry")
print("########## SUMMARY #########")
print("--- The following parameters have been overwritten in the sdrf file: ---")
for p in overwritten:
print(p)
| 40.116162 | 132 | 0.64497 | 1,071 | 7,943 | 4.685341 | 0.275444 | 0.037864 | 0.01953 | 0.022718 | 0.213432 | 0.177362 | 0.157832 | 0.147469 | 0.141491 | 0.128737 | 0 | 0.004355 | 0.219439 | 7,943 | 197 | 133 | 40.319797 | 0.805 | 0.201183 | 0 | 0.096 | 0 | 0.016 | 0.200159 | 0.007454 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024 | false | 0 | 0.056 | 0 | 0.096 | 0.072 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
f326b64362c4f6bd9042a39c792b7b627ed5ebe2 | 745 | py | Python | Python3/py3_image/see_array_of_image.py | combofish/chips-get | 6005f24d09edda3f1f54c6603205b2f854ec3b3f | [
"MIT"
] | 2 | 2021-11-01T01:56:12.000Z | 2021-11-01T01:56:51.000Z | Python3/py3_image/see_array_of_image.py | combofish/chips-get | 6005f24d09edda3f1f54c6603205b2f854ec3b3f | [
"MIT"
] | null | null | null | Python3/py3_image/see_array_of_image.py | combofish/chips-get | 6005f24d09edda3f1f54c6603205b2f854ec3b3f | [
"MIT"
] | 2 | 2021-06-26T03:32:50.000Z | 2021-07-27T05:29:46.000Z | from PIL import Image
from numpy import *
im = array(Image.open('for_learn.jpeg'))
print(im.shape,im.dtype)
im = array(Image.open('for_learn.jpeg').convert('L'),'f')
print(im.shape,im.dtype)
im2 = 255 - im
im3 = (100.0/255) * im + 100
im4 = 255.0 * ( im/255.0)**2
## error
# im2.save('im1.jpg')
# im3.save('im3.jpg')
# im4.save('im4.jpg')
pil_im = Image.fromarray(im)
pil_im2 = Image.fromarray(uint8(im2))
pil_im3 = Image.fromarray(uint8(im3))
pil_im4 = Image.fromarray(uint8(im4))
# pil_im2.save('im2.jpg')
# pil_im3.save('im3.jpg')
# pil_im4.save('im4.jpg')
## resize
def imresize(im,sz):
pil_im = Image.fromarray(uint8(im))
return array(pil_im.resize(sz))
im5 = imresize(im4,(200,200))
# Image.fromarray(im5).save('im5.jpg')
| 20.694444 | 57 | 0.669799 | 129 | 745 | 3.782946 | 0.286822 | 0.172131 | 0.155738 | 0.065574 | 0.192623 | 0.114754 | 0.114754 | 0 | 0 | 0 | 0 | 0.087156 | 0.122148 | 745 | 35 | 58 | 21.285714 | 0.659021 | 0.242953 | 0 | 0.117647 | 0 | 0 | 0.054446 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0.117647 | 0 | 0.235294 | 0.117647 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b82363adea17e335f5f79ca06a07b8b8639ab1ae | 949 | py | Python | agronet_be/Agronetproject/urls.py | lauraC4MP0/Prueba-github | 291fc266fc0a8efc80ab36dd6eb4bff3e98e7c1f | [
"MIT"
] | 1 | 2021-10-06T00:39:08.000Z | 2021-10-06T00:39:08.000Z | agronet_be/Agronetproject/urls.py | lauraC4MP0/Prueba-github | 291fc266fc0a8efc80ab36dd6eb4bff3e98e7c1f | [
"MIT"
] | null | null | null | agronet_be/Agronetproject/urls.py | lauraC4MP0/Prueba-github | 291fc266fc0a8efc80ab36dd6eb4bff3e98e7c1f | [
"MIT"
] | 1 | 2021-10-03T13:39:31.000Z | 2021-10-03T13:39:31.000Z | from django.contrib import admin
from django.urls import path
from AgronetApp import views
from AgronetApp.views.orderDetailView import OrderDetailDetail, OrderDetailView
from rest_framework_simplejwt.views import (TokenObtainPairView, TokenRefreshView)
urlpatterns = [
path('login/', TokenObtainPairView.as_view()),
path('refresh/', TokenRefreshView.as_view()),
path('user/', views.UserCreateView.as_view()),
path('user/<int:pk>/', views.UserDetailView.as_view()),
path('orderDetail/', OrderDetailView.as_view()),
path('orderDetail/{id}', OrderDetailDetail.as_view()),
path('order/', views.OrdersView.as_view()),
path('order/<int:pk>',views.OrdersDetail.as_view()),
path('product/',views.ProductCreateView.as_view()),
path('product/<int:pk>',views.ProductDetailView.as_view()),
path('city/',views.CityViews.as_view()),
path('departament/',views.DepartamentView.as_view()),
]
| 41.26087 | 83 | 0.71549 | 105 | 949 | 6.333333 | 0.361905 | 0.108271 | 0.165414 | 0.042105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.124341 | 949 | 22 | 84 | 43.136364 | 0.800241 | 0 | 0 | 0 | 0 | 0 | 0.131607 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.263158 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b823b9f092d8ef4d3006a8ab8c7826df17ec4b36 | 1,364 | py | Python | extract_patches.py | akx/demxf | c45d06ce88dbd173a13ec6da35869d2117e77fee | [
"MIT"
] | 7 | 2017-11-16T16:01:14.000Z | 2022-03-12T00:43:47.000Z | extract_patches.py | akx/demxf | c45d06ce88dbd173a13ec6da35869d2117e77fee | [
"MIT"
] | null | null | null | extract_patches.py | akx/demxf | c45d06ce88dbd173a13ec6da35869d2117e77fee | [
"MIT"
] | null | null | null | import argparse
import json
import os
from demxf.catalog import read_mxf_catalog
def main():
ap = argparse.ArgumentParser()
ap.add_argument('-i', '--input', help='input MXF file', required=True)
ap.add_argument('-c', '--combined', help='output combined JSON file')
ap.add_argument('-d', '--directory', help='output directory for separate files')
args = ap.parse_args()
combined = {}
with open(args.input, 'rb') as infp:
for ce in read_mxf_catalog(infp):
if ce.filename.endswith('.maxpat'):
print(ce.filename)
patch = json.loads(ce.extract_from(infp).rstrip(b'\x00'))
if args.directory:
out_name = os.path.join(args.directory, ce.filename)
os.makedirs(os.path.dirname(out_name), exist_ok=True)
with open(out_name, 'w') as outfp:
json.dump(patch, outfp, ensure_ascii=False, indent=2, sort_keys=True)
print('-> {}'.format(outfp.name))
if args.combined:
combined[ce.filename] = patch
if args.combined:
print('Writing combined file...')
with open(args.combined, 'w') as outfp:
json.dump(combined, outfp, ensure_ascii=False, indent=2, sort_keys=True)
if __name__ == '__main__':
main()
| 34.1 | 93 | 0.584311 | 169 | 1,364 | 4.568047 | 0.408284 | 0.062176 | 0.050518 | 0.031088 | 0.145078 | 0.103627 | 0.103627 | 0.103627 | 0.103627 | 0 | 0 | 0.004086 | 0.282258 | 1,364 | 39 | 94 | 34.974359 | 0.784474 | 0 | 0 | 0.066667 | 0 | 0 | 0.117302 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033333 | false | 0 | 0.133333 | 0 | 0.166667 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b824a1ae21e2d387e8e625987aa3836b31ce6df6 | 478 | py | Python | binSearch/binsearch.py | romanofski/codesnippets | dbee0bee2ab8a0152137b029f28c2a7981654342 | [
"Unlicense"
] | null | null | null | binSearch/binsearch.py | romanofski/codesnippets | dbee0bee2ab8a0152137b029f28c2a7981654342 | [
"Unlicense"
] | null | null | null | binSearch/binsearch.py | romanofski/codesnippets | dbee0bee2ab8a0152137b029f28c2a7981654342 | [
"Unlicense"
] | null | null | null | SEARCHLIST = [1, 4, 5, 12, 23, 40, 42, 55]
def binarysearch(n, searchlist):
""" binary search.
>>> binarysearch(55, SEARCHLIST)
7
>>> binarysearch(4, SEARCHLIST)
1
"""
min = 0
max = len(searchlist)
x = 0
while not min > max or not max < min:
mid = int(min + (max - min)/2)
x = searchlist[mid]
if x > n:
max = mid
elif x < n:
min = mid
elif x == n:
return mid
| 19.916667 | 42 | 0.468619 | 62 | 478 | 3.612903 | 0.467742 | 0.026786 | 0.071429 | 0.080357 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073684 | 0.403766 | 478 | 23 | 43 | 20.782609 | 0.712281 | 0.175732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b824c1218bc4592bcf424624cfb593378d4d810b | 2,004 | py | Python | pylabs/test/server/right/system_tests_learner.py | Incubaid/arakoon | 43a8d0b26e4876ef91d9657149f105c7e57e0cb0 | [
"Apache-2.0"
] | 41 | 2015-02-11T03:23:36.000Z | 2020-12-27T12:13:52.000Z | pylabs/test/server/right/system_tests_learner.py | Incubaid/arakoon | 43a8d0b26e4876ef91d9657149f105c7e57e0cb0 | [
"Apache-2.0"
] | 36 | 2015-01-04T16:58:51.000Z | 2020-11-12T12:05:37.000Z | pylabs/test/server/right/system_tests_learner.py | Incubaid/arakoon | 43a8d0b26e4876ef91d9657149f105c7e57e0cb0 | [
"Apache-2.0"
] | 7 | 2015-07-10T08:04:01.000Z | 2021-09-28T08:09:23.000Z | """
Copyright (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .. import system_tests_common as Common
from nose.tools import assert_true
import time
import logging
from Compat import X
@Common.with_custom_setup(Common.setup_2_nodes, Common.basic_teardown)
def test_learner():
op_count = 54321
Common.iterate_n_times(op_count, Common.simple_set)
cluster = Common._getCluster(Common.cluster_id)
logging.info("adding learner")
name = Common.node_names[2]
(db_dir, log_dir, tlf_dir, head_dir) = Common.build_node_dir_names(name)
cluster.addNode(name,
Common.node_ips[2],
clientPort = Common.node_client_base_port + 2,
messagingPort = Common.node_msg_base_port + 2,
logDir = log_dir,
tlfDir = tlf_dir,
headDir = head_dir,
logLevel = 'debug',
home = db_dir,
isLearner = True,
targets = [Common.node_names[0]])
cfg = cluster._getConfigFile()
logging.info("cfg=%s", X.cfg2str(cfg))
cluster.disableFsync([name])
cluster.addLocalNode(name)
cluster.createDirs(name)
cluster.startOne(name)
time.sleep(1.0)
Common.assert_running_nodes(3)
time.sleep(op_count / 1000 + 1 ) # 1000/s in catchup should be no problem
#use a client ??"
Common.stop_all()
i2 = int(Common.get_last_i_tlog(name))
assert_true(i2 >= op_count - 1)
| 33.966102 | 77 | 0.67016 | 274 | 2,004 | 4.733577 | 0.543796 | 0.046261 | 0.020046 | 0.024672 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025743 | 0.244012 | 2,004 | 58 | 78 | 34.551724 | 0.830363 | 0.307385 | 0 | 0 | 0 | 0 | 0.018129 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.027778 | false | 0 | 0.138889 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b82505066293458fd03118895e3c1c8c1e4235a7 | 1,184 | py | Python | utils/Preprocesar.py | anderct105/va-questionnaire-3d | a95650703e650c4c0640ab22d4db325799f15e70 | [
"Apache-2.0"
] | null | null | null | utils/Preprocesar.py | anderct105/va-questionnaire-3d | a95650703e650c4c0640ab22d4db325799f15e70 | [
"Apache-2.0"
] | null | null | null | utils/Preprocesar.py | anderct105/va-questionnaire-3d | a95650703e650c4c0640ab22d4db325799f15e70 | [
"Apache-2.0"
] | null | null | null | import string
import nltk
class Preprocesar:
def __init__(self, corpus):
self.corpus = corpus
def __call__(self, pad='<PAD>'):
"""
Realiza el preproceso del texto para obtener vectores a partir de tokens
a partir del texto, eliminando puntuación y palabras comunes del inglés.
:param corpus: vector de textos
:param pad: valor a utilizar para el padding, el cual se añade al vocabulario
:return: un vector con palabras para cada texto y el vocabulario generado con el ínndice
"""
nltk.download('punkt')
corpus_prep = []
vocab = []
for response in self.corpus:
response_tokenized = nltk.word_tokenize(response)
response_prep = []
# Eliminar puntuacion
for word in response_tokenized:
word = word.lower()
if word not in string.punctuation:
response_prep.append(word)
vocab.append(word)
corpus_prep.append(response_prep)
vocab.append(pad)
vocab = {x: index for index, x in enumerate(set(vocab))}
return corpus_prep, vocab
| 33.828571 | 96 | 0.60473 | 139 | 1,184 | 5.028777 | 0.482014 | 0.042918 | 0.042918 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.326858 | 1,184 | 34 | 97 | 34.823529 | 0.877039 | 0.309122 | 0 | 0 | 0 | 0 | 0.013123 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.095238 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b825117a011da8d40feabbac092b817ee38234d0 | 2,615 | py | Python | dephell/commands/package_purge.py | OliverHofkens/dephell | 6303f416018910668f1635b70cd828a2fd2b2d9e | [
"MIT"
] | 1,880 | 2019-03-21T10:08:25.000Z | 2022-03-31T12:41:55.000Z | dephell/commands/package_purge.py | rachmadaniHaryono/dephell | 0ef500c8f2d5f05244bac191b1b1383f68464cd2 | [
"MIT"
] | 356 | 2019-03-21T19:08:56.000Z | 2021-01-08T17:45:43.000Z | dephell/commands/package_purge.py | rachmadaniHaryono/dephell | 0ef500c8f2d5f05244bac191b1b1383f68464cd2 | [
"MIT"
] | 157 | 2019-04-23T01:13:37.000Z | 2022-03-24T22:41:18.000Z | # built-in
from argparse import ArgumentParser
# external
from packaging.utils import canonicalize_name
# app
from ..actions import get_python_env
from ..config import builders
from ..controllers import Graph, Mutator, Resolver, analyze_conflict
from ..converters import InstalledConverter
from ..models import Requirement
from ..package_manager import PackageManager
from .base import BaseCommand
class PackagePurgeCommand(BaseCommand):
"""Remove given packages and their dependencies.
"""
@staticmethod
def build_parser(parser) -> ArgumentParser:
builders.build_config(parser)
builders.build_venv(parser)
builders.build_output(parser)
builders.build_other(parser)
parser.add_argument('name', nargs='+', help='names of packages to remove')
return parser
def __call__(self) -> bool:
python = get_python_env(config=self.config)
manager = PackageManager(executable=python.path)
converter = InstalledConverter()
# get installed packages
root = converter.load(paths=python.lib_paths)
names = set(self.args.name) & {canonicalize_name(dep.name) for dep in root.dependencies}
if not names:
self.logger.error('packages is not installed', extra=dict(python=python.path))
return False
# resolve graph
self.logger.info('build dependencies graph...')
resolver = Resolver(
graph=Graph(root),
mutator=Mutator(),
)
resolved = resolver.resolve(silent=self.config['silent'])
if not resolved:
conflict = analyze_conflict(resolver=resolver)
self.logger.warning('conflict was found')
print(conflict)
return False
# get packages to remove
reqs = []
for name in names:
parent = resolver.graph.get(name=name)
reqs.append(Requirement(dep=parent, lock=True))
for dep in resolver.graph.get_children(dep=parent).values():
if not dep:
raise LookupError('cannot find dep in graph')
if dep.constraint.sources - {root.name} - names:
continue
reqs.append(Requirement(dep=dep, lock=True))
# remove installed packages
self.logger.info('removing packages...', extra=dict(
python=python.path,
packages=[req.name for req in reqs],
))
code = manager.remove(reqs=reqs)
if code != 0:
return False
self.logger.info('removed')
return True
| 33.961039 | 96 | 0.63174 | 285 | 2,615 | 5.722807 | 0.37193 | 0.030656 | 0.034948 | 0.025751 | 0.030656 | 0 | 0 | 0 | 0 | 0 | 0 | 0.000528 | 0.275717 | 2,615 | 76 | 97 | 34.407895 | 0.860612 | 0.060803 | 0 | 0.052632 | 0 | 0 | 0.065057 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035088 | false | 0 | 0.157895 | 0 | 0.298246 | 0.017544 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b826bd2587c73dbb93bb6116280f6307823985b8 | 734 | py | Python | pdf_bsw_gui/main.py | NSLS-II-PDF/pdf-bsw-gui | 6db847986d9bad6c59bdf3bca3b559959019ff46 | [
"BSD-3-Clause"
] | 3 | 2021-05-19T16:43:04.000Z | 2021-08-10T17:59:24.000Z | pdf_bsw_gui/main.py | NSLS-II-PDF/pdf-bsw-gui | 6db847986d9bad6c59bdf3bca3b559959019ff46 | [
"BSD-3-Clause"
] | 14 | 2021-04-01T18:40:52.000Z | 2021-07-19T19:31:54.000Z | pdf_bsw_gui/main.py | NSLS-II-PDF/pdf-bsw-gui | 6db847986d9bad6c59bdf3bca3b559959019ff46 | [
"BSD-3-Clause"
] | 5 | 2021-04-01T22:05:35.000Z | 2021-06-03T09:43:09.000Z | import argparse
from bluesky_widgets.qt import gui_qt
from .viewer import Viewer
from .settings import SETTINGS
def main(argv=None):
print(__doc__)
parser = argparse.ArgumentParser(description="bluesky-widgets demo")
parser.add_argument("--zmq", help="0MQ address")
parser.add_argument("--catalog", help="Databroker catalog")
args = parser.parse_args(argv)
with gui_qt("Demo App"):
if args.catalog:
import databroker
SETTINGS.catalog = databroker.catalog[args.catalog]
# Optional: Receive live streaming data.
if args.zmq:
SETTINGS.subscribe_to.append(args.zmq)
viewer = Viewer() # noqa: 401
if __name__ == "__main__":
main()
| 22.9375 | 72 | 0.66485 | 87 | 734 | 5.390805 | 0.494253 | 0.059701 | 0.072495 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007067 | 0.228883 | 734 | 31 | 73 | 23.677419 | 0.821555 | 0.065395 | 0 | 0 | 0 | 0 | 0.115666 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.263158 | 0 | 0.315789 | 0.052632 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b828ae5b21a1a8666ed7fb1a78aa595e63d35d22 | 3,625 | py | Python | unitypack/modding.py | CakeLancelot/UnityPackFF | ee3368b16aec3c6b95c70778105dfcbf7379647f | [
"MIT"
] | 6 | 2020-11-03T13:23:40.000Z | 2021-10-06T15:25:29.000Z | unitypack/modding.py | CakeLancelot/UnityPackFF | ee3368b16aec3c6b95c70778105dfcbf7379647f | [
"MIT"
] | 1 | 2021-02-15T20:16:40.000Z | 2021-02-15T20:16:40.000Z | unitypack/modding.py | CakeLancelot/UnityPackFF | ee3368b16aec3c6b95c70778105dfcbf7379647f | [
"MIT"
] | 10 | 2020-11-03T15:08:10.000Z | 2022-02-13T07:32:52.000Z | from io import BytesIO
from wand.image import Image
from .utils import BinaryWriter
from .object import FFOrderedDict
from .engine.object import Object
from .engine.mesh import SubMesh
def import_audio(obj, audiopath, length, name=None, freq=44100):
if not isinstance(obj, Object):
raise ValueError('Invalid target object')
with open(audiopath, 'rb') as f:
obj.audio_data = f.read()
obj.size = len(obj.audio_data)
obj.length = length # in seconds; float
obj.frequency = freq
if name is not None:
obj.name = name
def import_texture(obj, imgpath, name=None, fmt='dxt1'):
if not isinstance(obj, Object):
raise ValueError('Invalid target object')
img = Image(filename=imgpath)
if name is not None:
obj.name = name
obj.height = img.height
obj.width = img.width
# DXT1 or DXT5
obj.format = 12 if fmt == 'dxt5' else 10
obj.image_count = 1
img.flip()
# HACK: ImageMagick apparently thinks it knows better than you and will
# give you a DXT1 if there's no transparency *even if you ask for DXT5*
buf = img.make_blob(fmt)
if chr(buf[87]) == '1':
obj.format = 10 # DXT1
# load image as DDS, stripping 128-byte header
obj.data = buf[128:]
obj.complete_image_size = len(obj.data)
# these are all the same across all Texture2Ds in CharTexture and Icons
# but only m_TextureDimension = 2 seems to be mandatory
obj._obj['m_Limit'] = -1
obj._obj['m_TextureDimension'] = 2
obj._obj['m_TextureSettings']['m_FilterMode'] = 1
obj._obj['m_TextureSettings']['m_Aniso'] = 1
obj._obj['m_TextureSettings']['m_MipBias'] = 0.0
obj._obj['m_TextureSettings']['m_WrapMode'] = 0
def import_mesh(obj, meshpath, name=None):
if not isinstance(obj, Object):
raise ValueError('Invalid target object')
# read obj file
with open(meshpath) as f:
lines = [line for line in f.read().split('\n') if line != '']
lines = [line.split(' ') for line in lines]
_vertices = []
_normals = []
_uvs = []
vertices = []
normals = []
uvs = []
indices = []
idxdict = dict()
idxbuf = BytesIO()
buf = BinaryWriter(idxbuf)
# parse obj file
nextidx = 0
for line in lines:
if line[0] == 'v':
vert = FFOrderedDict()
vert['x'] = -float(line[1])
vert['y'] = float(line[2])
vert['z'] = float(line[3])
_vertices.append(vert)
elif line[0] == 'vn':
norm = FFOrderedDict()
norm['x'] = -float(line[1])
norm['y'] = float(line[2])
norm['z'] = float(line[3])
_normals.append(norm)
elif line[0] == 'vt':
uv = FFOrderedDict()
uv['x'] = float(line[1])
uv['y'] = float(line[2])
_uvs.append(uv)
elif line[0] == 'f':
if len(line) != 4:
raise ValueError('Mesh is not triangulated')
_indices = []
for col in line[1:]:
tmp = col.split('/')
v = int(tmp[0]) - 1
t = int(tmp[1]) - 1
n = int(tmp[2]) - 1
if (v, t, n) in idxdict.keys():
idx = idxdict[(v, t, n)]
else:
idx = nextidx
nextidx += 1
idxdict[(v, t, n)] = idx
vertices.append(_vertices[v])
normals.append(_normals[n])
uvs.append(_uvs[t])
_indices.append(idx)
# reorder vertices to flip faces
indices.extend(_indices[::-1])
for i in indices:
buf.write_uint16(i)
# assign to mesh object
if name is not None:
obj.name = name
obj.mesh_compression = 0
obj.use_16bit_indices = 1
obj.vertices = vertices
obj.normals = normals
obj.uvs = uvs
obj.index_buffer = idxbuf.getvalue()
if len(obj.submeshes) == 0:
obj.submeshes.append(SubMesh(FFOrderedDict()))
obj.submeshes[0].first_byte = 0
obj.submeshes[0].index_count = len(indices)
obj.submeshes[0].is_tri_strip = 0
obj.submeshes[0].triangle_count = len(indices) // 3
| 24.166667 | 72 | 0.656552 | 554 | 3,625 | 4.209386 | 0.306859 | 0.030875 | 0.01801 | 0.037736 | 0.150943 | 0.131218 | 0.110635 | 0.110635 | 0.099485 | 0.074614 | 0 | 0.025377 | 0.195586 | 3,625 | 149 | 73 | 24.328859 | 0.774348 | 0.117517 | 0 | 0.110092 | 0 | 0 | 0.077527 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027523 | false | 0 | 0.082569 | 0 | 0.110092 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b82926a9b7f2d0c3c3f01fcfda9d84e36ba690a2 | 1,844 | py | Python | code/day6.py | Artemis21/AOC19 | d4c671ab86c3a3291a3ab2e6421288cddeb6a65a | [
"MIT"
] | null | null | null | code/day6.py | Artemis21/AOC19 | d4c671ab86c3a3291a3ab2e6421288cddeb6a65a | [
"MIT"
] | null | null | null | code/day6.py | Artemis21/AOC19 | d4c671ab86c3a3291a3ab2e6421288cddeb6a65a | [
"MIT"
] | null | null | null | def orbits(inp):
tree = []
index = {'COM': tree}
for a, b in inp:
if b not in index:
index[b] = []
if a not in index:
index[a] = []
index[a].append(index[b])
return tree, index
def inp():
with open('code/6.txt') as f:
raw = f.read()
return [i.split(')') for i in raw.split('\n')]
def count(tree):
done = {}
def recurse(tree, depth):
if not tree:
return depth
name = str((depth, tree))
if name in done:
return done[name]
val = sum(recurse(i, depth+1) for i in tree) + depth
done[name] = val
return val
return recurse(tree, 0)
def distance(tree, index, a='YOU', b='SAN'):
parents = {}
def get_parents(tree):
for i in tree:
parents[id(i)] = tree
get_parents(i)
def find(obj, path=[tree], tree=tree):
if tree is obj:
return path
path = list(path)
path.append(tree)
for i in tree:
found = find(obj, path, i)
if found:
return found
return None
apath = find(index[a])
bpath = find(index[b])
common = []
for ap in apath:
for bp in bpath:
if ap is bp:
common.append(ap)
break
nca = common[-1]
def depth(find, tree, cur=0):
if tree is find:
return cur
cur += 1
for i in tree:
found = depth(find, i, cur)
if found:
return found
return None
return depth(index[a], nca) + depth(index[b], nca) - 2
def part_a():
return count(orbits(inp())[0])
def part_b():
return distance(*orbits(inp()))
if __name__ == '__main__':
print('6A:', part_a())
print('6B:', part_b())
| 21.694118 | 60 | 0.485358 | 250 | 1,844 | 3.524 | 0.252 | 0.034052 | 0.034052 | 0.045403 | 0.131669 | 0.063564 | 0 | 0 | 0 | 0 | 0 | 0.008826 | 0.385575 | 1,844 | 84 | 61 | 21.952381 | 0.768756 | 0 | 0 | 0.132353 | 0 | 0 | 0.019523 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.147059 | false | 0 | 0 | 0.029412 | 0.367647 | 0.029412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b82eadf2c3c944b8ea6ce90f57517ac899982a19 | 2,687 | py | Python | backend/app/api.py | podaac/docx-to-html | b094faaa740ede68779d78739f0957668db2bb8e | [
"Apache-2.0"
] | null | null | null | backend/app/api.py | podaac/docx-to-html | b094faaa740ede68779d78739f0957668db2bb8e | [
"Apache-2.0"
] | null | null | null | backend/app/api.py | podaac/docx-to-html | b094faaa740ede68779d78739f0957668db2bb8e | [
"Apache-2.0"
] | null | null | null | from app import app
# dependencies
import os
from flask import Flask, request, jsonify
from werkzeug.utils import secure_filename
from flask_cors import CORS
# converter files
from converter import handle_input, parse_html
app.secret_key = os.urandom(24) # for cors to work
UPLOAD_FOLDER = './converter'
ALLOWED_EXTENSIONS = set(['docx'])
app.config['MAX_CONTENT_LENGTH'] = 32 * \
1024 * 1024 # limit file uploads to 32 mb
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
# checks file type
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
# handles main post request from react frontend
@app.route('/', methods=['GET', 'POST'])
def upload_file():
try:
# handles doing only bootstrap
only_bootstrap = request.values['onlybootstrap']
if only_bootstrap == 'true':
only_bootstrap_html_output = request.values['htmloutput']
file = False
else:
file = request.files['file']
only_bootstrap_html_output = False
# handles whether to make new table of contents
make_toc = request.values['toc']
make_toc = True if make_toc == 'true' else False
# convert ftp links to drive links
ftp = request.values['ftp']
ftp = True if ftp == 'true' else False
# handles whether or not to do NLP
do_nlp = request.values['donlp']
do_nlp = True if do_nlp == 'true' else False
css_type = request.values['csstype']
# send file to be converted
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
try:
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
except OSError as err:
print(err)
# convert file and run all parsing operations
# gets back and string of html
html = handle_input.check_file_type_and_process(
filename, make_toc, ftp, do_nlp, css_type)
return jsonify(html)
# sends html back through to add bootstrap after its been in the frontend WYSIWYG editor
elif only_bootstrap_html_output:
only_bootstrap_html_output = parse_html.only_bootstrap(
only_bootstrap_html_output)
return jsonify(only_bootstrap_html_output)
# just in case a file gets through the frontend file type checks
else:
print('wrong file type')
return jsonify('error')
except:
return jsonify('error')
if __name__ == '__main__':
app.run()
CORS(app, expose_headers='Authorization')
| 31.611765 | 96 | 0.641608 | 340 | 2,687 | 4.885294 | 0.376471 | 0.078266 | 0.061409 | 0.083082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008209 | 0.274656 | 2,687 | 84 | 97 | 31.988095 | 0.844023 | 0.195757 | 0 | 0.113208 | 0 | 0 | 0.082051 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037736 | false | 0 | 0.113208 | 0.018868 | 0.245283 | 0.037736 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83145aef102f51d9f30c17cf33296a686849da6 | 795 | py | Python | RNN_model_test/rnn_test.py | AJamal27891/LSTM-BPE | 5007c0b3cab2e19e9ae03282e134e0eef47398f7 | [
"Apache-2.0"
] | 1 | 2021-05-10T05:52:06.000Z | 2021-05-10T05:52:06.000Z | RNN_model_test/rnn_test.py | AJamal27891/LSTM-BPE | 5007c0b3cab2e19e9ae03282e134e0eef47398f7 | [
"Apache-2.0"
] | null | null | null | RNN_model_test/rnn_test.py | AJamal27891/LSTM-BPE | 5007c0b3cab2e19e9ae03282e134e0eef47398f7 | [
"Apache-2.0"
] | null | null | null | # create lstm
import torch
class RNN(torch.nn.Module):
def __init__(self, input_size, hidden_size, num_layers, num_classes, sequence_length, device):
super(RNN, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.rnn = torch.nn.LSTM(input_size, hidden_size, num_layers, batch_first=True)
self.fc = torch.nn.Linear(hidden_size*sequence_length, num_classes)
self.device = device
def forward(self, x):
h0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size).to(self.device)
c0 = torch.zeros(self.num_layers, x.size(0), self.hidden_size).to(self.device)
out, _ = self.rnn(x, (h0, c0))
out = out.reshape(out.reshape[0], -1)
out = self.fc(out)
return out
| 34.565217 | 98 | 0.655346 | 118 | 795 | 4.169492 | 0.313559 | 0.142276 | 0.085366 | 0.077236 | 0.337398 | 0.337398 | 0.223577 | 0.223577 | 0.223577 | 0.223577 | 0 | 0.012862 | 0.21761 | 795 | 22 | 99 | 36.136364 | 0.778135 | 0.013836 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.0625 | 0 | 0.3125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8316f20a9148ce1d88583d6ee76bd46f805fae7 | 4,310 | py | Python | pyqt5/QListView/CustomListModel.py | gookeryoung/pylessons | c9d7b3899b565b16753c1be5723de617a468f3c7 | [
"MIT"
] | null | null | null | pyqt5/QListView/CustomListModel.py | gookeryoung/pylessons | c9d7b3899b565b16753c1be5723de617a468f3c7 | [
"MIT"
] | null | null | null | pyqt5/QListView/CustomListModel.py | gookeryoung/pylessons | c9d7b3899b565b16753c1be5723de617a468f3c7 | [
"MIT"
] | null | null | null | import sys
import typing
from PyQt5.QtCore import QAbstractListModel, QPoint
from PyQt5.QtCore import QModelIndex
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QColor, QPainter, QBrush, QPolygon, QPen
from PyQt5.QtGui import QIcon
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import QApplication, QTreeView, QComboBox, QTableView
from PyQt5.QtWidgets import QListView
from PyQt5.QtWidgets import QSplitter
class CustomPalette(QAbstractListModel):
"""Custom list model inherit from QAbstractListModel class."""
def __init__(self, colors=(), parent=None):
super(CustomPalette, self).__init__(parent)
self._colors = colors
def rowCount(self, parent: QModelIndex = ...) -> int:
"""Must be implemented, returns the count of data in row"""
return len(self._colors)
def headerData(self, section: int, orientation: Qt.Orientation, role: int = ...) -> typing.Any:
"""Controls the header of each row and column"""
if role == Qt.DisplayRole:
if orientation == Qt.Horizontal:
return "Palette"
else:
return f"Color [{section}#]"
if role == Qt.DecorationRole:
pixmap = QPixmap(30, 30)
painter = QPainter(pixmap)
painter.setPen(QPen(Qt.NoPen))
painter.setBrush(QBrush(Qt.white))
painter.drawRect(0, 0, 30, 30)
painter.setBrush(QBrush(Qt.cyan))
points = QPolygon([QPoint(0, 0), QPoint(30, 0), QPoint(0, 30)])
painter.drawPolygon(points)
painter.end()
icon = QIcon(pixmap)
return icon
if role == Qt.ToolTipRole:
return f'color in section:{section}, orientation: {orientation}'
def data(self, index: QModelIndex, role: int = ...) -> typing.Any:
"""Controls the data in each cells.
EditRole: when double click and edit cells
ToolTipRole: when hover on cells
DecorationRole: decorate before cells
DisplayRole: content in cells
"""
if role == Qt.EditRole:
return self._colors[index.row()].name()
if role == Qt.ToolTipRole:
return 'Hex code: ' + self._colors[index.row()].name()
if role == Qt.DecorationRole:
row = index.row()
value = self._colors[row]
pixmap = QPixmap(60, 30)
pixmap.fill(value)
icon = QIcon(pixmap)
return icon
if role == Qt.DisplayRole:
row = index.row()
value = self._colors[row]
return value
def flags(self, index: QModelIndex) -> Qt.ItemFlags:
return Qt.ItemIsEnabled | Qt.ItemIsEditable | Qt.ItemIsSelectable
def setData(self, index: QModelIndex, value: typing.Any, role: int = ...) -> bool:
if role == Qt.EditRole:
row = index.row()
color = QColor(value)
if color.isValid():
self._colors[row] = color
self.dataChanged.emit(index, index)
return True
return False
def insertRows(self, row: int, count: int, parent: QModelIndex = ...) -> bool:
self.beginInsertRows(QModelIndex(), row, row + count - 1)
for i in range(count):
self._colors.insert(row, QColor("#000000"))
self.endInsertRows()
return True
def removeRows(self, row: int, count: int, parent: QModelIndex = ...) -> bool:
self.beginRemoveRows(QModelIndex(), row, row + count - 1)
for i in range(count):
value = self._colors[row]
self._colors.remove(value)
self.endRemoveRows()
return True
if __name__ == '__main__':
app = QApplication(sys.argv)
app.setStyle("cleanlooks")
red = QColor(255, 0, 0)
green = QColor(0, 255, 0)
blue = QColor(0, 0, 255)
model = CustomPalette([red, green, blue])
splitter = QSplitter()
splitter.resize(1200, 300)
list_view = QListView(splitter)
tree_view = QTreeView(splitter)
table_view = QTableView(splitter)
combo_box = QComboBox(splitter)
list_view.setModel(model)
tree_view.setModel(model)
table_view.setModel(model)
combo_box.setModel(model)
splitter.show()
app.exec_()
| 31.459854 | 99 | 0.60348 | 483 | 4,310 | 5.312629 | 0.308489 | 0.042868 | 0.024942 | 0.024552 | 0.169914 | 0.156664 | 0.13562 | 0.113016 | 0.063913 | 0.030398 | 0 | 0.01985 | 0.287007 | 4,310 | 136 | 100 | 31.691176 | 0.815164 | 0.079582 | 0 | 0.239583 | 0 | 0 | 0.029193 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.114583 | 0.010417 | 0.354167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b833ed8f6ed3594f0fe9d1bc58f3ad7c701ef903 | 950 | py | Python | docs/tutorials/content_hps_ml_basic/test_config.py | Z223I/deephyper | 4fd1054dc22f15197567bdd93c6e7a95a614b8e2 | [
"BSD-3-Clause"
] | 1 | 2021-09-03T18:24:31.000Z | 2021-09-03T18:24:31.000Z | docs/tutorials/content_hps_ml_basic/test_config.py | Z223I/deephyper | 4fd1054dc22f15197567bdd93c6e7a95a614b8e2 | [
"BSD-3-Clause"
] | null | null | null | docs/tutorials/content_hps_ml_basic/test_config.py | Z223I/deephyper | 4fd1054dc22f15197567bdd93c6e7a95a614b8e2 | [
"BSD-3-Clause"
] | 1 | 2021-08-31T13:47:27.000Z | 2021-08-31T13:47:27.000Z | def test_config(config):
import numpy as np
from sklearn.utils import check_random_state
from sklearn.ensemble import RandomForestClassifier
from deephyper.benchmark.datasets import airlines as dataset
rs_data = np.random.RandomState(seed=42)
ratio_test = 0.33
ratio_valid = (1 - ratio_test) * 0.33
train, valid, test = dataset.load_data(
random_state=rs_data,
test_size=ratio_test,
valid_size=ratio_valid,
categoricals_to_integers=True,
)
rs_classifier = check_random_state(42)
classifier = RandomForestClassifier(n_jobs=8, random_state=rs_classifier, **config)
classifier.fit(*train)
acc_train = classifier.score(*train)
acc_valid = classifier.score(*valid)
acc_test = classifier.score(*test)
print(f"Accuracy on Training: {acc_train:.3f}")
print(f"Accuracy on Validation: {acc_valid:.3f}")
print(f"Accuracy on Testing: {acc_test:.3f}")
| 30.645161 | 87 | 0.708421 | 126 | 950 | 5.119048 | 0.412698 | 0.068217 | 0.065116 | 0.074419 | 0.055814 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019582 | 0.193684 | 950 | 30 | 88 | 31.666667 | 0.822454 | 0 | 0 | 0 | 0 | 0 | 0.116842 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | false | 0 | 0.173913 | 0 | 0.217391 | 0.130435 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83456bcaeb46d36f4b62fbc8173c039d571bf73 | 6,739 | py | Python | tests/test_listing.py | ChasNelson1990/pyzoopla | d22ceb7f443016e0ce92436741fa3b27de3c53b3 | [
"MIT"
] | 1 | 2020-08-29T01:41:23.000Z | 2020-08-29T01:41:23.000Z | tests/test_listing.py | ChasNelson1990/pyzoopla | d22ceb7f443016e0ce92436741fa3b27de3c53b3 | [
"MIT"
] | 4 | 2019-10-24T14:48:50.000Z | 2021-06-17T13:57:27.000Z | tests/test_listing.py | ChasNelson1990/pyzoopla | d22ceb7f443016e0ce92436741fa3b27de3c53b3 | [
"MIT"
] | 2 | 2018-07-11T12:13:44.000Z | 2022-03-24T11:14:26.000Z | from httmock import all_requests, HTTMock, response
from pyzoopla.listing import PropertyHistoricalListing, PropertyListing
def test_listing_few_details():
@all_requests
def zoopla_mock(url, request):
content = open('tests/test_data/listing.txt', 'r').read()
return response(content=content, request=request)
with HTTMock(zoopla_mock):
results = PropertyListing(47902463)
assert str(results) == 'https://ww2.zoopla.co.uk/for-sale/details/47902463'
assert results.listing_id == 47902463
assert results.slug == 'for-sale/details'
data = results.details(dataframe=False)
del data['date_generated']
assert data == {
'listing_id': 47902463,
'description': "\n A lovely three, three bathroom bedroom third floor Marylebone "
"apartment in a prestigious mansion block with lift and porter. Beautifully presented "
"throughout, comprising a master bedroom with en suite shower room, two further double "
"bedrooms, two further shower rooms, and a large semi open plan kitchen/reception room with "
"dining area. Further features bright and charming rooms, neutral décor and ample storage.You "
"may download, store and use the material for your own personal use and research. You may not "
"republish, retransmit, redistribute or otherwise make the material available to any party or "
"make the same available on any website, online service or bulletin board of your own or of "
"any other party or make the same available in hard copy or in any other media without the "
"website owner's express prior written consent. The website owner's copyright must remain on "
"all reproductions of material taken from this website.\n ",
'main_features': ['3 bedrooms', '3 bathrooms', '1 reception room',
'floor area1,163 sq. ft'],
'more_features': [],
'price_history': {'date': ['7th Jun 2018'],
'price': [2295000],
'detail': ['First listed']}
}
def test_listing_more_details():
@all_requests
def zoopla_mock(url, request):
content = open('tests/test_data/listing2.txt', 'r').read()
return response(content=content, request=request)
with HTTMock(zoopla_mock):
results = PropertyListing(38834402)
assert str(results) == 'https://ww2.zoopla.co.uk/for-sale/details/38834402'
assert results.listing_id == 38834402
assert results.slug == 'for-sale/details'
data = results.details(dataframe=True).to_dict()
del data['date_generated']
assert data == {
'listing_id': {0: 38834402},
'description': {0: '\n Set within a superb portered building just south of Oxford '
'Street, this fantastic two bedroom, two bathroom apartment offers beautifully presented '
'living space with classic décor.A wealth of exclusive boutiques and eateries can be found '
'throughout Mayfair, Oxford Street and Regent Street offer world class shops and department'
' stores. Hyde Park is also moments away.\n '},
'main_features': {0: ['2 bedrooms']},
'more_features': {0: ['Secure entry and lift access to the second floor',
'Generous reception room with lots of natural light',
'Separate modern kitchen with ample storage space',
'Master bedroom with fitted wardrobe and en suite',
'Good-sized second bedroom with fitted wardrobe',
'Well presented shower room',
'Large entrance hall with storage cupboards']},
'price_history': {0: {'date': ['18th Apr 2018', '22nd Dec 2015', '19th Feb 2015', '29th Oct 2014'],
'price': [2300000, 2599000, 2800000, 3000000],
'detail': ['Price reduced by £299,000', 'Price reduced by £201,000',
'Price reduced by £200,000', 'First listed']}}
}
def test_historical_listing_details():
@all_requests
def zoopla_mock(url, request):
content = open('tests/test_data/historical.txt', 'r').read()
return response(content=content, request=request)
with HTTMock(zoopla_mock):
results = PropertyHistoricalListing(37047136)
assert str(results) == 'Property history of 108 Shoreditch High Street, London E1 6JN, \n29th May 2015'
assert results.listing_id == 37047136
assert results.slug == 'property-history'
data = results.details(dataframe=False)
del data['date_generated']
assert data == {
'listing_id': 37047136,
'description': ". . . This wonderfully bright and spacious one bedroom apartment occupies the "
"third floor of a sympathetically restored Victorian building.Offering approximately 734 sq. "
"Ft. Of space this larger than average one bedroom boasts a stylish finish in the form of "
"exposed brick work, wood flooring, double glazed sash windows and a bespoke kitchen with "
"Siemens ovens and induction hob.Comprising an open plan dual aspect kitchen and living space, "
"generous double bedroom with fitted wardrobes and a high quality bathroom with a large "
"walk-in shower and storage spaces.Enjoying a fantastic locationin the heart of vibrant "
"Shoreditch, home to an increasing number of boutique clothing shops, the Ace Hotel and an "
"array of excellent bars and restaurants. Fashionable Brick Lane and Columbia Road are also "
"close by.A number of transport links serve the property including Shoreditch High Street "
"(Overground) just a stone's throw away, Old Street Station (National Rail, Northern Line) and "
"the major hub of Liverpool Street.Offered with no onward chain.. . . . . . ",
'features': '. . 734 sq. Ft. One bedroom apartment. Victorian conversion. High specification finish. Exposed '
'brick/sash windows/wood flooring. Central Shoreditch location. . . . '
}
| 58.6 | 119 | 0.602612 | 759 | 6,739 | 5.304348 | 0.43083 | 0.014903 | 0.01391 | 0.015648 | 0.227273 | 0.218331 | 0.204918 | 0.204918 | 0.195231 | 0.195231 | 0 | 0.044217 | 0.318742 | 6,739 | 114 | 120 | 59.114035 | 0.832063 | 0 | 0 | 0.231579 | 0 | 0.010526 | 0.560321 | 0.012613 | 0 | 0 | 0 | 0 | 0.126316 | 1 | 0.063158 | false | 0 | 0.021053 | 0 | 0.115789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b834d6e8336d0648e0ac507b9667ca5ee527219d | 6,180 | py | Python | incident/views.py | dihyat/serviceDesk | 6f54ebec800a6e27b2293ac87b342ce3914e3e62 | [
"Apache-2.0"
] | null | null | null | incident/views.py | dihyat/serviceDesk | 6f54ebec800a6e27b2293ac87b342ce3914e3e62 | [
"Apache-2.0"
] | null | null | null | incident/views.py | dihyat/serviceDesk | 6f54ebec800a6e27b2293ac87b342ce3914e3e62 | [
"Apache-2.0"
] | null | null | null | from django.shortcuts import render, get_object_or_404
from django.http import JsonResponse, HttpResponseBadRequest, HttpResponse
from django.core import serializers
from .forms import IncidentForm, UpdateForm, DeveloperForm
from .models import Incident,Developers
# Create your views here.
def indexView(request):
form = IncidentForm()
update_form = UpdateForm()
developer_form = DeveloperForm()
incidents = Incident.objects.all()
developers = []
dev_info = Developers.objects.all()
for i in incidents:
developers += [i.developer.all()]
zip_incident = zip(incidents,developers)
return render(request, 'index.html', {"form": form, "incident": zip_incident, "update_form":update_form, "dev_info":dev_info, "developer_form": developer_form})
def postIncident(request):
if request.is_ajax and request.method == "POST":
form = IncidentForm(request.POST)
if form.is_valid():
instance = form.save()
dev_data = instance.developer.all()
ser_instance = serializers.serialize('json',[instance,])
ser_dev = serializers.serialize('json', dev_data)
return JsonResponse({"instance": ser_instance, 'dev_instance':ser_dev},status=200)
else:
return JsonResponse({"error": form.errors},status=400)
return JsonResponse({"error":"error"},status=400)
def checkName(request):
if request.is_ajax and request.method=="GET":
company_name = request.GET.get("company_name",None)
if Incident.objects.filter(company_name=company_name).exists():
return JsonResponse({"valid":False},status = 200)
else:
return JsonResponse({"valid":True},status = 200)
return JsonResponse({},status = 400)
def delete_post(request, test_id):
remv_post = Incident.objects.get(id = test_id)
if request.method=='DELETE':
remv_post.delete()
return JsonResponse({
'valid':True
})
return HttpResponseBadRequest('invalid')
def update_post(request, test_id):
if request.method == "PUT":
all_data = request.body.decode('utf-8').split('&')
dev_team = list(filter(None,all_data[3].split('=')[1].split('+')))
spc_name = all_data[1].split('=')[1].split('+')
spc_comp = all_data[0].split('=')[1].split('+')
#allows to add names with spaces
str_spc_name = ''
str_comp_name = ''
for val in spc_name:
str_spc_name += val + ' '
for dal in spc_comp:
str_comp_name += dal + ' '
clean_data = {
'company_name': str_comp_name,
'first_name': str_spc_name,
'last_name': all_data[2].split('=')[1],
}
form = UpdateForm(clean_data)
if form.is_valid():
obj, was_created = Incident.objects.update_or_create(id = test_id, defaults = clean_data)
obj.developer.clear()
if obj != None:
for i in dev_team:
dev_obj = Developers.objects.get(id = i)
obj.developer.add(dev_obj)
obj.save()
dev_data = obj.developer.all()
ser_dev = serializers.serialize('json', dev_data)
ser_instance = serializers.serialize('json',[obj])
return JsonResponse({"instance": ser_instance, 'dev_instance':ser_dev},status=200)
else:
return JsonResponse({"error": form.errors},status=400)
else:
return JsonResponse({"error":"error"},status=400)
#Requests for the developer model starts here
def create_developer(request):
if request.is_ajax and request.method == "POST":
form = DeveloperForm(request.POST)
if form.is_valid():
id_list = request.POST.getlist('incidents')
instance = form.save()
for id in id_list:
vari = Incident.objects.get(id = id)
instance.developer_teams.add(vari)
instance.save()
print(instance.developer_teams.all())
ser_instance = serializers.serialize('json',[instance,])
return JsonResponse({"instance": ser_instance},status=200)
else:
return JsonResponse({"error": form.errors},status=400)
return JsonResponse({"error":"error"},status=400)
def delete_developer(request, test_id):
remv_dev = Developers.objects.get(id = test_id)
if request.method=='DELETE':
remv_dev.delete()
return JsonResponse({
'valid':True
})
return HttpResponseBadRequest('invalid')
def checkTeamName(request):
if request.is_ajax and request.method=="GET":
team_name = request.GET.get("team_name",None)
if Developers.objects.filter(team_name=team_name).exists():
return JsonResponse({"valid":False},status = 200)
else:
return JsonResponse({"valid":True},status = 200)
return JsonResponse({},status = 400)
def update_developer(request, test_id):
if request.method == "PUT":
all_data = request.body.decode('utf-8').split('&')
spc_name = all_data[0].split('=')[1].split('+')[0]
spc_email = all_data[1].split('=')[1].split('%40')
#allows to add names with spaces
str_spc_email = spc_email[0]+'@'+spc_email[1]
clean_data = {
'team_name': spc_name,
'team_email': str_spc_email,
'team_number': all_data[2].split('=')[1],
}
form = DeveloperForm(clean_data)
if form.is_valid():
obj, was_created = Developers.objects.update_or_create(id = test_id, defaults = clean_data)
if obj != None:
obj.save()
ser_instance = serializers.serialize('json',[obj])
return JsonResponse({"dev_instance": ser_instance,},status=200)
else:
return JsonResponse({"error": form.errors},status=400)
else:
return JsonResponse({"error":"error"},status=400) | 33.225806 | 164 | 0.597896 | 699 | 6,180 | 5.105866 | 0.171674 | 0.100869 | 0.049314 | 0.031942 | 0.553376 | 0.543009 | 0.498179 | 0.451667 | 0.407397 | 0.325021 | 0 | 0.017387 | 0.27411 | 6,180 | 186 | 165 | 33.225806 | 0.778199 | 0.020874 | 0 | 0.454545 | 0 | 0 | 0.065322 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068182 | false | 0 | 0.037879 | 0 | 0.280303 | 0.007576 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83524056aa3bf0c73f3bfc9f086621f2abd8202 | 1,132 | py | Python | my_nn/optimizers/optimizers.py | zerowing-ex/Machine_Learning | 2c540cf25588ddf598749362d461f131c17581ce | [
"MIT"
] | null | null | null | my_nn/optimizers/optimizers.py | zerowing-ex/Machine_Learning | 2c540cf25588ddf598749362d461f131c17581ce | [
"MIT"
] | null | null | null | my_nn/optimizers/optimizers.py | zerowing-ex/Machine_Learning | 2c540cf25588ddf598749362d461f131c17581ce | [
"MIT"
] | null | null | null | from abc import ABCMeta, abstractmethod
import numpy as np
class Optimizer(metaclass=ABCMeta):
@abstractmethod
def minimize(self, w, g):
pass
class SGD(Optimizer):
def __init__(self,
learning_rate=0.01,
momentum=0.0,
nesterov=False,
name="SGD",
**kwargs
):
self.learning_rate = learning_rate
self.momentum = momentum
self.nesterov = nesterov
self.name = name
self.kwargs = kwargs
self.velocity = None
def minimize(self, w, g):
if self.momentum <= 0:
w -= self.learning_rate * g
elif not self.nesterov:
velocity = self.momentum * self.velocity - self.learning_rate * g
w += velocity
else:
velocity = self.momentum * self.velocity - self.learning_rate * g
w += self.momentum * velocity - self.learning_rate * g
optimizers_dict: dict = {
'sgd': SGD,
}
def get(optimizer_name):
optimizer_name = optimizer_name.lower()
return optimizers_dict[optimizer_name]
| 25.155556 | 77 | 0.571555 | 124 | 1,132 | 5.080645 | 0.314516 | 0.133333 | 0.152381 | 0.107937 | 0.252381 | 0.15873 | 0.15873 | 0.15873 | 0.15873 | 0.15873 | 0 | 0.008032 | 0.340106 | 1,132 | 44 | 78 | 25.727273 | 0.835341 | 0 | 0 | 0.114286 | 0 | 0 | 0.0053 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.114286 | false | 0.028571 | 0.057143 | 0 | 0.257143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b837d3db636a90a998f050c9c219ccdfff526540 | 403 | py | Python | addition of matrices.py | hhimmmmii/Python-for-Beginners | 82d1fecb5174d2d36dc8b547de13af8ed8a6ef70 | [
"MIT"
] | 6 | 2020-10-02T13:18:33.000Z | 2020-11-07T20:42:39.000Z | addition of matrices.py | virendrasingal/Python-for-Beginners | a8dc40c169fab921f55c1b5aa818a59a316caf34 | [
"MIT"
] | 5 | 2020-10-03T10:01:44.000Z | 2020-10-30T16:56:35.000Z | addition of matrices.py | virendrasingal/Python-for-Beginners | a8dc40c169fab921f55c1b5aa818a59a316caf34 | [
"MIT"
] | 42 | 2020-09-30T18:47:49.000Z | 2021-10-01T04:10:31.000Z | # Program to add two matrices using nested loop
X = [[16,71,33],
[14 ,15,60],
[71 ,81,99]]
Y = [[55,8,1],
[6,17,3],
[4,5,92]]
result = [[0,0,0],
[0,0,0],
[0,0,0]]
# iterates along the rows
for i in range(len(X)):
# iterates along the columns
for j in range(len(X[0])):
result[i][j] = X[i][j] + Y[i][j]
for r in result:
print(r) | 18.318182 | 48 | 0.473945 | 74 | 403 | 2.581081 | 0.567568 | 0.08377 | 0.109948 | 0.125654 | 0.04712 | 0.04712 | 0.04712 | 0.04712 | 0 | 0 | 0 | 0.145985 | 0.320099 | 403 | 22 | 49 | 18.318182 | 0.551095 | 0.238213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.071429 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b839e91cbc1b50e7916c464f7be1a596a4a43767 | 5,332 | py | Python | data_transformer.py | dozsam13/LHYP | af7115c115fdff51399b83cd1a515bf2c6f7879d | [
"MIT"
] | null | null | null | data_transformer.py | dozsam13/LHYP | af7115c115fdff51399b83cd1a515bf2c6f7879d | [
"MIT"
] | null | null | null | data_transformer.py | dozsam13/LHYP | af7115c115fdff51399b83cd1a515bf2c6f7879d | [
"MIT"
] | null | null | null | from con_reader import CONreaderVM
from dicom_reader import DCMreaderVM
from utils import get_logger
from domain.patient_data import PatientData
import numpy as np
import pickle
import os
import sys
import cv2 as cv
logger = get_logger(__name__)
def create_path_for_file(pickle_file_path):
os.makedirs(os.path.dirname(pickle_file_path), exist_ok=True)
def collect_contour_slices_by_frames(contours):
frameSliceDict = {}
for slc in contours:
for frm in contours[slc]:
if not(frm in frameSliceDict):
frameSliceDict[frm] = []
frameSliceDict[frm].append(slc)
return frameSliceDict
def left_ventricle_contours(contours):
left_ventricle_color_modes = {"ln", "lp"}
left_ventricle_contours = {}
for slc, frames in contours.items():
for frm, modes in frames.items():
filtered_contours = dict(filter(lambda contour: contour[0] in left_ventricle_color_modes, modes.items()))
if len(filtered_contours) == 0:
continue
if not(slc in left_ventricle_contours):
left_ventricle_contours[slc] = {}
left_ventricle_contours[slc][frm] = filtered_contours
return left_ventricle_contours
def frame_of_diastole(frame_slice_dict, contours):
frame1 = list(frame_slice_dict.keys())[0]
frame2 = list(frame_slice_dict.keys())[1]
slice_dict_1 = list(frame_slice_dict.values())[0]
slice_dict_2 = list(frame_slice_dict.values())[1]
slice_intersection = list(set(slice_dict_1).intersection(set(slice_dict_2)))
slice_intersection.sort()
mid_slice_index = slice_intersection[len(slice_intersection)//2]
common_contour_mode = next(iter(set(contours[mid_slice_index][frame1].keys()).intersection(contours[mid_slice_index][frame2])))
area1 = cv.contourArea(contours[mid_slice_index][frame1][common_contour_mode].astype(int))
area2 = cv.contourArea(contours[mid_slice_index][frame2][common_contour_mode].astype(int))
return frame1 if area1 > area2 else frame2
def calculate_sampling_slices(frame_slice_dict, diastole_frame):
diastole_slice_indexes = frame_slice_dict[diastole_frame]
return np.percentile(np.array(diastole_slice_indexes), (19,50,83), interpolation='lower')
def create_contour_diff_matricies(sampling_contours, shape):
contour_diff_matricies = []
for contours in sampling_contours:
contour_diff_mx = np.zeros(shape)
cv.drawContours(contour_diff_mx, [contours["lp"].astype(np.int32)],0, color=255, thickness=-1)
cv.drawContours(contour_diff_mx, [contours["ln"].astype(np.int32)],0, color=0, thickness=-1)
contour_diff_mx = cv.resize(contour_diff_mx, (200,200), interpolation = cv.INTER_AREA)
contour_diff_matricies.append(contour_diff_mx.astype('uint8'))
return contour_diff_matricies
def read_pathology(meta_txt):
pathology = ""
with open(meta_txt, "r") as f:
pathology = f.readline().split(": ")[1]
return pathology.rstrip()
def create_pickle_for_patient(in_dir, out_dir):
scan_id = os.path.basename(in_dir)
image_folder = os.path.join(in_dir, "sa", "images")
con_file = os.path.join(in_dir, "sa", "contours.con")
meta_txt = os.path.join(in_dir, "meta.txt")
if not os.path.isdir(image_folder):
logger.error("Could not find image folder for: {}".format(scan_id))
return
if not os.path.isfile(con_file):
logger.error("Could not find .con file for: {}".format(scan_id))
return
if not os.path.isfile(meta_txt):
logger.error("Could not find meta.txt file for: {}".format(scan_id))
return
dr = DCMreaderVM(image_folder)
if dr.num_frames == 0 and dr.num_frames == 0 or dr.broken:
logger.error("Could not create pickle file for {}".format(scan_id))
return
cr = CONreaderVM(con_file)
contours = left_ventricle_contours(cr.get_hierarchical_contours())
frame_slice_dict = collect_contour_slices_by_frames(contours)
if not (len(frame_slice_dict) == 2):
logger.error("Too many contour frames for {}".format(scan_id))
return
pickle_file_path = os.path.join(out_dir, scan_id + ".p")
create_path_for_file(pickle_file_path)
diastole_frame = frame_of_diastole(frame_slice_dict, contours)
sampling_slices = calculate_sampling_slices(frame_slice_dict, diastole_frame)
sampling_contours = []
for slice_index in sampling_slices:
shape = dr.get_image(slice_index,diastole_frame).shape
sampling_contours.append(contours[slice_index][diastole_frame])
pathology = read_pathology(meta_txt)
shape = dr.get_image(sampling_slices[0],diastole_frame).shape
contour_diff_matricies = create_contour_diff_matricies(sampling_contours, shape)
print(type(contour_diff_matricies))
patient_data = PatientData(scan_id, pathology, cr.get_volume_data(), contour_diff_matricies)
with (open(pickle_file_path, "wb")) as pickleFile:
pickle.dump(patient_data, pickleFile)
in_dir = sys.argv[1]
out_dir = sys.argv[2]
if not os.path.isdir(in_dir):
logger.error("Invalid input directory: {}".format(in_dir))
else:
patient_folders = sorted(os.listdir(in_dir))
for patient_folder in patient_folders:
create_pickle_for_patient(os.path.join(in_dir, patient_folder), out_dir)
| 41.015385 | 131 | 0.722243 | 742 | 5,332 | 4.889488 | 0.216981 | 0.037211 | 0.042448 | 0.020673 | 0.311742 | 0.194046 | 0.111907 | 0.048512 | 0.020948 | 0.020948 | 0 | 0.012653 | 0.169917 | 5,332 | 129 | 132 | 41.333333 | 0.807049 | 0 | 0 | 0.046296 | 0 | 0 | 0.046887 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.083333 | 0 | 0.259259 | 0.009259 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83bb1b0a0583d209ec7d065b804047b17a60c10 | 1,822 | py | Python | pyeccodes/defs/grib2/local/1098/template_2_0_def.py | ecmwf/pyeccodes | dce2c72d3adcc0cb801731366be53327ce13a00b | [
"Apache-2.0"
] | 7 | 2020-04-14T09:41:17.000Z | 2021-08-06T09:38:19.000Z | pyeccodes/defs/grib2/local/1098/template_2_0_def.py | ecmwf/pyeccodes | dce2c72d3adcc0cb801731366be53327ce13a00b | [
"Apache-2.0"
] | null | null | null | pyeccodes/defs/grib2/local/1098/template_2_0_def.py | ecmwf/pyeccodes | dce2c72d3adcc0cb801731366be53327ce13a00b | [
"Apache-2.0"
] | 3 | 2020-04-30T12:44:48.000Z | 2020-12-15T08:40:26.000Z | import pyeccodes.accessors as _
def load(h):
h.add(_.Codetable('tiggeModel', 2, "grib2/local/[localSubSectionCentre:l]/models.table"))
h.add(_.Codetable('tiggeCentre', 2, "grib2/local/[localSubSectionCentre:l]/centres.table"))
def tiggeLAMName_inline_concept(h):
def wrapped(h):
tiggeCentre = h.get_l('tiggeCentre')
tiggeModel = h.get_l('tiggeModel')
if tiggeCentre == 0 and tiggeModel == 0:
return 'MOGREPS-MO- EUA'
if tiggeCentre == 1 and tiggeModel == 1:
return 'AEMet-SREPS-MM-EUAT'
if tiggeCentre == 1 and tiggeModel == 2:
return 'SRNWP-PEPS'
if tiggeCentre == 2 and tiggeModel == 3:
return 'COSMOLEPS-ARPASIMC-EU'
if tiggeCentre == 3 and tiggeModel == 4:
return 'NORLAMEPS'
if tiggeCentre == 4 and tiggeModel == 5:
return 'ALADIN-LAEF'
if tiggeCentre == 5 and tiggeModel == 6:
return 'COSMO-DE EPS'
if tiggeCentre == 2 and tiggeModel == 7:
return 'COSMO-SREPS-BO-EU'
if tiggeCentre == 6 and tiggeModel == 8:
return 'GLAMEPS'
if tiggeCentre == 7 and tiggeModel == 9:
return 'PEARCE'
if tiggeCentre == 8 and tiggeModel == 10:
return 'DMI- HIRLAM'
if tiggeCentre == 9 and tiggeModel == 11:
return 'OMSZ- ALADIN-EPS'
if tiggeCentre == 10 and tiggeModel == 11:
return 'OMSZ- ALADIN-EPS'
if tiggeCentre == 11 and tiggeModel == 11:
return 'OMSZ- ALADIN-EPS'
return wrapped
h.add(_.Concept('tiggeLAMName', None, concepts=tiggeLAMName_inline_concept(h)))
| 29.868852 | 95 | 0.542261 | 192 | 1,822 | 5.09375 | 0.333333 | 0.186094 | 0.04908 | 0.064417 | 0.308793 | 0.130879 | 0.130879 | 0.096115 | 0.096115 | 0 | 0 | 0.032231 | 0.352909 | 1,822 | 60 | 96 | 30.366667 | 0.797286 | 0 | 0 | 0.078947 | 0 | 0 | 0.18726 | 0.066996 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078947 | false | 0 | 0.026316 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83c62b15e8098f3d53f5fb6d6e12e231734b7f2 | 788 | py | Python | tests/test_server.py | PaulGregor/evelink | dc1ca05725bf81c7f066cf4abcb51ab503759aaa | [
"MIT"
] | null | null | null | tests/test_server.py | PaulGregor/evelink | dc1ca05725bf81c7f066cf4abcb51ab503759aaa | [
"MIT"
] | null | null | null | tests/test_server.py | PaulGregor/evelink | dc1ca05725bf81c7f066cf4abcb51ab503759aaa | [
"MIT"
] | 1 | 2019-12-11T10:31:09.000Z | 2019-12-11T10:31:09.000Z | import mock
import unittest2 as unittest
import evelink.server as evelink_server
from tests.utils import APITestCase
class ServerTestCase(APITestCase):
def setUp(self):
super(ServerTestCase, self).setUp()
self.server = evelink_server.Server(api=self.api)
def test_server_status(self):
self.api.get.return_value = self.make_api_result("server/server_status.xml")
result, current, expires = self.server.server_status()
self.assertEqual(result, {'online':True, 'players':38102})
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
self.assertEqual(self.api.mock_calls, [
mock.call.get('server/ServerStatus', params={}),
])
if __name__ == "__main__":
unittest.main()
| 29.185185 | 84 | 0.680203 | 92 | 788 | 5.630435 | 0.456522 | 0.11583 | 0.061776 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025518 | 0.204315 | 788 | 26 | 85 | 30.307692 | 0.800638 | 0 | 0 | 0 | 0 | 0 | 0.081218 | 0.030457 | 0 | 0 | 0 | 0 | 0.210526 | 1 | 0.105263 | false | 0 | 0.210526 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83cc0040431175b961c5d973d39f8e1b2c4ea44 | 4,326 | py | Python | src/ZEO/tests/ZEO4/zrpc/server.py | azmeuk/ZEO | 8de475763467b054d87ad310f1696cc713db9135 | [
"ZPL-2.1"
] | 40 | 2015-11-26T18:40:29.000Z | 2022-03-15T06:45:43.000Z | src/ZEO/tests/ZEO4/zrpc/server.py | azmeuk/ZEO | 8de475763467b054d87ad310f1696cc713db9135 | [
"ZPL-2.1"
] | 138 | 2015-01-05T16:05:09.000Z | 2022-03-31T14:02:40.000Z | src/ZEO/tests/ZEO4/zrpc/server.py | azmeuk/ZEO | 8de475763467b054d87ad310f1696cc713db9135 | [
"ZPL-2.1"
] | 24 | 2015-04-03T07:05:13.000Z | 2021-12-24T06:10:54.000Z | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import asyncore
import socket
# _has_dualstack: True if the dual-stack sockets are supported
try:
# Check whether IPv6 sockets can be created
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
except (socket.error, AttributeError):
_has_dualstack = False
else:
# Check whether enabling dualstack (disabling v6only) works
try:
s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
except (socket.error, AttributeError):
_has_dualstack = False
else:
_has_dualstack = True
s.close()
del s
from .connection import Connection
from .log import log
from .log import logger
import logging
# Export the main asyncore loop
loop = asyncore.loop
class Dispatcher(asyncore.dispatcher):
"""A server that accepts incoming RPC connections"""
__super_init = asyncore.dispatcher.__init__
def __init__(self, addr, factory=Connection, map=None):
self.__super_init(map=map)
self.addr = addr
self.factory = factory
self._open_socket()
def _open_socket(self):
if type(self.addr) == tuple:
if self.addr[0] == '' and _has_dualstack:
# Wildcard listen on all interfaces, both IPv4 and
# IPv6 if possible
self.create_socket(socket.AF_INET6, socket.SOCK_STREAM)
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
elif ':' in self.addr[0]:
self.create_socket(socket.AF_INET6, socket.SOCK_STREAM)
if _has_dualstack:
# On Linux, IPV6_V6ONLY is off by default.
# If the user explicitly asked for IPv6, don't bind to IPv4
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, True)
else:
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
else:
self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.set_reuse_addr()
log("listening on %s" % str(self.addr), logging.INFO)
for i in range(25):
try:
self.bind(self.addr)
except Exception as exc:
log("bind failed %s waiting", i)
if i == 24:
raise
else:
time.sleep(5)
else:
break
self.listen(5)
def writable(self):
return 0
def readable(self):
return 1
def handle_accept(self):
try:
sock, addr = self.accept()
except socket.error as msg:
log("accepted failed: %s" % msg)
return
# We could short-circuit the attempt below in some edge cases
# and avoid a log message by checking for addr being None.
# Unfortunately, our test for the code below,
# quick_close_doesnt_kill_server, causes addr to be None and
# we'd have to write a test for the non-None case, which is
# *even* harder to provoke. :/ So we'll leave things as they
# are for now.
# It might be better to check whether the socket has been
# closed, but I don't see a way to do that. :(
# Drop flow-info from IPv6 addresses
if addr: # Sometimes None on Mac. See above.
addr = addr[:2]
try:
c = self.factory(sock, addr)
except:
if sock.fileno() in asyncore.socket_map:
del asyncore.socket_map[sock.fileno()]
logger.exception("Error in handle_accept")
else:
log("connect from %s: %s" % (repr(addr), c))
| 34.608 | 79 | 0.586223 | 533 | 4,326 | 4.643527 | 0.403377 | 0.022626 | 0.028283 | 0.035556 | 0.179394 | 0.179394 | 0.156768 | 0.142626 | 0.079192 | 0 | 0 | 0.013921 | 0.302589 | 4,326 | 124 | 80 | 34.887097 | 0.80643 | 0.319926 | 0 | 0.263158 | 0 | 0 | 0.035623 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.065789 | false | 0 | 0.078947 | 0.026316 | 0.210526 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83cea6c09e50f1fd185b929136df6a5dcf2ccb5 | 557 | py | Python | src/Classes/SystemManager.py | erick-dsnk/Electric | 7e8aad1f792321d7839717ed97b641bee7a4a64e | [
"Apache-2.0"
] | null | null | null | src/Classes/SystemManager.py | erick-dsnk/Electric | 7e8aad1f792321d7839717ed97b641bee7a4a64e | [
"Apache-2.0"
] | null | null | null | src/Classes/SystemManager.py | erick-dsnk/Electric | 7e8aad1f792321d7839717ed97b641bee7a4a64e | [
"Apache-2.0"
] | null | null | null | from timeit import default_timer as timer
from psutil import *
from subprocess import *
class SystemManager:
@staticmethod
def get_pc_config():
configuration = {}
mem = virtual_memory()
cpu_name, _ = Popen('wmic cpu get name', stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=True).communicate()
configuration['cpu-info'] = cpu_name.decode('utf-8').replace('Name', '').replace('\r', '').replace('\n', '')[33:][:-2]
configuration['ram-availiable'] = round(mem.total / 1000000000, 1)
return configuration
| 37.133333 | 126 | 0.64991 | 66 | 557 | 5.378788 | 0.69697 | 0.039437 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033482 | 0.195691 | 557 | 14 | 127 | 39.785714 | 0.758929 | 0 | 0 | 0 | 0 | 0 | 0.093357 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.25 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83d4b9941915701b52d345aa42be332ed594b86 | 1,467 | py | Python | boilerplate/python/client.py | aulonm/stromstad-ws | e5963489c5bee99ca9761d8fb0dd01649b1d2f13 | [
"ISC"
] | null | null | null | boilerplate/python/client.py | aulonm/stromstad-ws | e5963489c5bee99ca9761d8fb0dd01649b1d2f13 | [
"ISC"
] | null | null | null | boilerplate/python/client.py | aulonm/stromstad-ws | e5963489c5bee99ca9761d8fb0dd01649b1d2f13 | [
"ISC"
] | null | null | null | #!/usr/bin/env python3
import sys
import json
import socket
HOST, PORT = ('localhost', 3876)
class MyClient:
def _send_request(self, data):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect((HOST, PORT))
sock.sendall('{}\n'.format(json.dumps(data)).encode('utf-8'))
received = str(sock.recv(1024), 'utf-8')
print("Teller received: {}".format(received))
return received
def check_balance(self):
data = {'cmd': 'balance'}
self._send_request(data)
def deposit(self, amount):
data = {'cmd': 'deposit', 'amount': int(amount)}
self._send_request(data)
def withdraw(self, amount):
data = {'cmd': 'withdraw', 'amount': int(amount)}
self._send_request(data)
def unknown(self, command):
data = {'cmd': command}
self._send_request(data)
def execute(self, command, *args):
args = tuple([arg for arg in args if arg])
commands = {
'deposit': self.deposit,
'balance': self.check_balance,
'withdraw': self.withdraw
}
func = commands.get(command, None)
if func:
return func(*args)
else:
return self.unknown(command)
def main(argv):
client = MyClient()
command = argv[1]
args = argv[2:] or []
client.execute(command, *args)
if __name__ == '__main__':
main(sys.argv)
| 24.04918 | 73 | 0.574642 | 172 | 1,467 | 4.773256 | 0.395349 | 0.066991 | 0.073082 | 0.09257 | 0.143727 | 0.090134 | 0.090134 | 0.090134 | 0 | 0 | 0 | 0.012369 | 0.283572 | 1,467 | 60 | 74 | 24.45 | 0.768792 | 0.014315 | 0 | 0.093023 | 0 | 0 | 0.081717 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162791 | false | 0 | 0.069767 | 0 | 0.325581 | 0.023256 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83d4c84931d8605c0121f12ebc937eead577628 | 3,305 | py | Python | problems/017.py | 6112/project-euler | b7478d14aa6defe347ab12178c7ffe90efdcb867 | [
"MIT"
] | null | null | null | problems/017.py | 6112/project-euler | b7478d14aa6defe347ab12178c7ffe90efdcb867 | [
"MIT"
] | null | null | null | problems/017.py | 6112/project-euler | b7478d14aa6defe347ab12178c7ffe90efdcb867 | [
"MIT"
] | null | null | null | # encoding=utf-8
## SOLVED 2013/12/21
## 21124
# If the numbers 1 to 5 are written out in words: one, two, three, four, five,
# then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.
# If all the numbers from 1 to 1000 (one thousand) inclusive were written out in
# words, how many letters would be used?
# NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and
# forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20
# letters. The use of "and" when writing out numbers is in compliance with
# British usage.
import re
MAX = 1000
def euler():
# accumulator for the number of letters used
accumulator = 0
# for each number in the given range
for number in range(1, MAX + 1):
# get the number's name
name = number_name(number)
# remove the whitespace and dashes
name = re.sub('\\s|-', '', name)
# add the length of the anme to the number of letters used
accumulator += len(name)
# return the number of letters used
return accumulator
# used for direct access to some number names
number_name_dictionary = {
0: 'zero',
1: 'one',
2: 'two',
3: 'three',
4: 'four',
5: 'five',
6: 'six',
7: 'seven',
8: 'eight',
9: 'nine',
10: 'ten',
11: 'eleven',
12: 'twelve',
13: 'thirteen',
15: 'fifteen',
18: 'eighteen',
20: 'twenty',
30: 'thirty',
40: 'forty',
50: 'fifty',
80: 'eighty',
1000: 'one thousand'
}
def number_name(number):
"""Return the full name, in letters, of a given number.
Args:
number: number whose name should be returned.
Returns:
the full name of that number (twenty-three, one hundred and two...), as
a string.
Raises:
ValueError: if number is not between 0 and 1000.
"""
if not isinstance(number, int):
raise TypeError("number is not an integer")
elif number < 0 or number > 1000:
raise ValueError("number out of range (must be between 0 and 1000)")
elif number in number_name_dictionary:
# return directly if it's simply a dictionary lookup -- used for
# exceptions and small numbers
return number_name_dictionary [number]
elif number > 10 and number < 20:
# sixteen, nineteen...
return number_name_dictionary [number - 10] + 'teen'
elif number >= 20 and number < 100:
# twenty-three, forty-nine...
if number // 10 * 10 in number_name_dictionary:
# exceptions for the tens: twenty, forty, fifty...
name = number_name_dictionary [number // 10 * 10]
else:
# regular tens: sixty, seventy...
name = number_name(number // 10) + 'ty'
if number % 10:
# if has a non-zero unit, add a dash, then the name of the units
# (twenty-three, ninety-eight...)
name += '-' + number_name(number % 10)
return name
elif number >= 100 and number < 1000:
# nine hundred, two hundred...
name = number_name(number // 100) + ' hundred'
# if has tens or units
if number % 100:
# add 'and ...', as in four hundred and ninety-eight
name += ' and ' + number_name(number % 100)
return name
| 32.087379 | 80 | 0.598185 | 459 | 3,305 | 4.267974 | 0.363834 | 0.061256 | 0.049005 | 0.040837 | 0.115365 | 0.033691 | 0 | 0 | 0 | 0 | 0 | 0.059026 | 0.297731 | 3,305 | 102 | 81 | 32.401961 | 0.785006 | 0.450832 | 0 | 0.036364 | 0 | 0 | 0.123139 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036364 | false | 0 | 0.018182 | 0 | 0.145455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b83fa590864e49920cb4637d5c3ae6d714ff1858 | 19,430 | py | Python | widgets.py | markusrobertjonsson/lesim2 | 05e171dbb7f1f4046b4363083030dfc6195f5a03 | [
"MIT"
] | null | null | null | widgets.py | markusrobertjonsson/lesim2 | 05e171dbb7f1f4046b4363083030dfc6195f5a03 | [
"MIT"
] | 107 | 2019-04-12T13:21:08.000Z | 2020-11-16T20:41:53.000Z | widgets.py | markusrobertjonsson/lesim2 | 05e171dbb7f1f4046b4363083030dfc6195f5a03 | [
"MIT"
] | 9 | 2019-04-17T19:48:19.000Z | 2020-10-25T20:12:48.000Z | import tkinter as tk
import tkinter.ttk as ttk
import tkinter.font as tkFont
from tkinter import Canvas # , Frame
from tkinter.scrolledtext import ScrolledText
from tkinter.constants import YES # , BOTH
from tkinter import messagebox
# import threading
# import time
class TextBoxLineNumbers(Canvas):
def __init__(self, font, *args, **kwargs):
super().__init__(*args, **kwargs)
self.text_box = None
self.font = font
def redraw(self):
self.delete("all")
i = self.text_box.index("@0,0")
while True:
dline = self.text_box.dlineinfo(i)
if dline is None:
break
y = dline[1]
line_number = str(i).split(".")[0]
self.create_text(2, y, anchor="nw", text=line_number, font=self.font)
i = self.text_box.index("%s+1line" % i)
def set_font(self, font):
self.font = font
self.redraw()
class TextBox(ScrolledText):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
super().config(undo=True)
events_to_bind = ['<Key>', '<MouseWheel>', '<Return>', '<Control-Home>',
'<Button-1>',
'<Button-2>', '<Button-3>',
'<Button-4>', # scroll up
'<Button-5>', # scroll down
'<Configure>',
'<B1-Motion>']
for event in events_to_bind:
super().bind(event, self.redraw_line_numbers)
super().bind("<Control-y>", self.redo)
super().bind("<Control-Y>", self.redo)
super().bind("<Control-z>", self.undo)
super().bind("<Control-Z>", self.undo)
# self['yscrollcommand'] = self.yscroll
# self.vbar.config(command=self.yview)
super().focus_set() # Set focus to the TextBox
def set_font(self, font):
super().config(font=font)
def get_current_font(self):
font_obj = tkFont.Font(font=self['font']).actual()
return (font_obj['family'], font_obj['size'])
def undo(self, event=None):
try:
super().edit_undo()
except tk.TclError: # nothing to undo
pass
return "break"
def redo(self, event=None):
try:
super().edit_redo()
except tk.TclError: # nothing to redo
pass
return "break"
def yview(self, *args):
super().yview(*args)
self.redraw_line_numbers()
# def yscroll(self, *args):
# # super().yview(*args)
# print("In yscroll")
def attach(self, line_numbers):
self.line_numbers = line_numbers
self.line_numbers.text_box = self
def redraw_line_numbers(self, event=None):
self.after(10, self.line_numbers.redraw)
# self.line_numbers.redraw()
class LineNumberedTextBox():
def __init__(self, frame):
self.text_box = TextBox(frame)
# self.text_box.vbar.config(command=self.text_box.yview)
self.font = self.text_box.get_current_font()
self.line_numbers = TextBoxLineNumbers(self.font, frame, width=30, highlightthickness=1, bd=1)
self.text_box.attach(self.line_numbers)
self.line_numbers.pack(side="left", fill="y")
self.text_box.pack(side="right", fill="both", expand=YES)
self.text_box.bind("<Control-plus>", self.increase_fontsize)
self.text_box.bind("<Control-minus>", self.decrease_fontsize)
def redraw_line_numbers(self):
self.text_box.redraw_line_numbers()
def bind(self, acc, fcn):
self.text_box.bind(acc, fcn)
def undo(self, event=None):
self.text_box.undo()
def redo(self, event=None):
self.text_box.redo()
def increase_fontsize(self, event=None):
self.font = (self.font[0], self.font[1] + 1)
self._update_font()
def decrease_fontsize(self, event=None):
self.font = (self.font[0], self.font[1] - 1)
self._update_font()
def _update_font(self):
self.text_box.set_font(self.font)
self.line_numbers.set_font(self.font)
self.redraw_line_numbers()
class ErrorDlg(tk.Toplevel):
def __init__(self, title, message, detail):
# tk.Toplevel.__init__(self)
super().__init__()
self.details_expanded = False
self.title(title)
self.geometry("500x100")
self.minsize(500, 100)
self.maxsize(1000, 1000)
self.rowconfigure(0, weight=0)
self.rowconfigure(1, weight=1)
self.columnconfigure(0, weight=1)
button_frame = tk.Frame(self)
button_frame.grid(row=0, column=0, sticky="nsew")
button_frame.columnconfigure(0, weight=1)
button_frame.columnconfigure(1, weight=1)
text_frame = tk.Frame(self)
text_frame.grid(row=1, column=0, padx=(7, 7), pady=(7, 7), sticky="nsew")
text_frame.rowconfigure(0, weight=1)
text_frame.columnconfigure(0, weight=1)
lbl = ttk.Label(button_frame, text=message)
lbl.grid(row=0, column=0, columnspan=3, pady=(7, 7), padx=(7, 7), sticky="w")
ok_button = ttk.Button(button_frame, text="OK", command=self.destroy)
ok_button.grid(row=1, column=1, sticky="e")
self.details_button = ttk.Button(button_frame, text="Details >>",
command=self.toggle_details)
self.details_button.grid(row=1, column=2, padx=(7, 7), sticky="e")
self.textbox = tk.scrolledtext.ScrolledText(text_frame, height=6)
self.textbox.insert("1.0", detail)
self.textbox.config(state="disabled")
# self.scrollb = tk.Scrollbar(text_frame, command=self.textbox.yview)
# self.textbox.config(yscrollcommand=self.scrollb.set)
ok_button.focus_set()
self.grab_set() # Make this dialog box modal
def toggle_details(self):
curr_x, curr_y = self._get_current_pos()
if not self.details_expanded:
self.textbox.grid(row=0, column=0, sticky='nsew')
# self.scrollb.grid(row=0, column=1, sticky='nsew')
self.resizable(True, True)
self.geometry('700x500' + '+' + curr_x + '+' + curr_y)
self.details_button.config(text="<< Details")
self.details_expanded = True
else:
self.textbox.grid_forget()
# self.scrollb.grid_forget()
self.resizable(False, False)
self.geometry('500x85' + '+' + curr_x + '+' + curr_y)
self.details_button.config(text="Details >>")
self.details_expanded = False
def _get_current_pos(self):
current_geometry = self.geometry()
first_plus_ind = current_geometry.index('+')
pos_xy = current_geometry[(first_plus_ind + 1):].split('+')
assert(len(pos_xy) == 2)
return pos_xy[0], pos_xy[1]
class ProgressDlg(tk.Toplevel):
def __init__(self, progress_obj):
super().__init__()
self.progress_obj = progress_obj
self._create_widgets()
self.is_visible2 = True
def _create_widgets(self):
self.title("Simulation Progress")
self.label1 = ttk.Label(self, textvariable=self.progress_obj.message1)
self.label1.grid(row=0, column=0, columnspan=2, padx=(10, 0), pady=(10, 4), sticky="w")
self.progressbar1 = ttk.Progressbar(self,
mode='determinate', # indeterminate
variable=self.progress_obj.progress1,
length=500)
self.progressbar1.grid(row=1, column=0, padx=(10, 10), pady=(0, 10), sticky="nsew")
self.label2 = ttk.Label(self, textvariable=self.progress_obj.message2)
self.label2.grid(row=2, column=0, padx=(10, 0), pady=(0, 4), sticky="w")
self.progressbar2 = ttk.Progressbar(self,
mode='determinate', # indeterminate
variable=self.progress_obj.progress2,
length=500)
self.progressbar2.grid(row=3, column=0, padx=(10, 10), pady=(0, 5), sticky="nsew")
# XXX Address in issue 70
# self.details_box = tk.scrolledtext.ScrolledText(self, height=10)
# self.details_box.insert("1.0", "Lots of info...")
# self.details_box.config(state="disabled")
# self.details_box.grid(row=4, column=0, padx=(10, 10), pady=(5, 5), sticky="nsew")
button_frame = tk.Frame(self)
button_frame.grid(row=5, column=0, padx=(10, 10), pady=(0, 0), sticky="e")
self.stop_button = ttk.Button(button_frame, text="Stop", command=self.stop)
self.stop_button.grid(row=0, column=0, padx=(10, 0), pady=(0, 5), sticky="w")
self.close_button = ttk.Button(button_frame, text="Close", command=self.destroy)
self.close_button.grid(row=0, column=1, padx=(5, 0), pady=(0, 5), sticky="e")
self.close_button.config(state=tk.DISABLED)
# stop_button.focus_set()
self.grab_set() # Make this dialog box modal
def set_title(self, title):
self.title(title)
def set_visibility2(self, visible):
if visible:
if not self.is_visible2:
self.progressbar2.grid()
self.label2.grid()
self.is_visible2 = True
else:
if self.is_visible2:
self.progressbar2.grid_remove()
self.label2.grid_remove()
self.is_visible2 = False
def stop(self):
self.progress_obj.stop()
self.close_button.config(state=tk.NORMAL)
self.stop_button.config(state=tk.DISABLED)
def update_progress(self, level, fraction_done):
self.progress_obj.update(level, fraction_done)
def report1(self, message):
self.label1.config(text=message)
def report2(self, message):
self.label2.config(text=message)
class WarningDlg():
def __init__(self, msg):
messagebox.showwarning(title="Warning", message=msg)
class LicenseDlg(tk.Toplevel):
def __init__(self, gui, include_agree_buttons=True):
super().__init__()
self.gui = gui
self.title("License")
self.geometry("500x100")
self.minsize(700, 400)
self.maxsize(1000, 700)
self.rowconfigure(0, weight=0)
self.rowconfigure(1, weight=1)
self.rowconfigure(2, weight=0)
self.columnconfigure(0, weight=1)
lbl_frame = tk.Frame(self)
lbl_frame.grid(row=0, column=0, sticky="nsew")
lbl_frame.rowconfigure(0, weight=0)
lbl = ttk.Label(lbl_frame, text=self._get_text(), background=gui.root['bg'])
lbl.grid(row=0, column=0, pady=(3, 0), padx=(7, 7), sticky="w")
text_frame = tk.Frame(self)
text_frame.grid(row=1, column=0, padx=(7, 7), pady=(5, 0), sticky="nsew")
text_frame.rowconfigure(0, weight=1)
text_frame.columnconfigure(0, weight=1)
textbox = tk.scrolledtext.ScrolledText(text_frame, height=6)
textbox.config(wrap=tk.WORD)
textbox.grid(row=0, column=0, sticky='nsew')
textbox.insert("1.0", self._get_license_text())
textbox.config(state="disabled")
question_frame = tk.Frame(self)
question_frame.grid(row=2, column=0, padx=(7, 7), pady=(7, 7), sticky="nsew")
question_frame.columnconfigure(0, weight=1)
question = ttk.Label(question_frame, text="Do you agree to these terms and conditions?",
background=gui.root['bg'])
question.grid(row=0, column=0, padx=(0, 7), sticky="w")
button_frame = tk.Frame(question_frame)
button_frame.grid(row=0, column=1, sticky="e")
yes_button = ttk.Button(button_frame, text="Yes", command=self.destroy)
yes_button.grid(row=0, column=0)
no_button = ttk.Button(button_frame, text="No", command=self.no)
no_button.grid(row=0, column=1, padx=(5, 0))
self.resizable(True, True)
self.grab_set() # Make this dialog box modal
if include_agree_buttons:
yes_button.focus_set()
self.protocol("WM_DELETE_WINDOW", self.no)
else:
question.grid_remove()
no_button.grid_remove()
yes_button.config(text="Close")
@staticmethod
def _get_text():
return """Learning Simulator is developed at Centre for Cultural Evolution at Stockholm University.
When using this software in research, please cite it as
Jonsson, Ghirlanda, Lind, Enquist, Learning Simulator, (2020), GitHub repository,
https://github.com/learningsimulator/learningsimulator.
The souce code for this software is hosted on GitHub under the MIT license stated below. It is is built using
- Python(R) (Copyright © 2001-2020 Python Software Foundation (PSF); All Rights Reserved)
- Matplotlib (Copyright © 2012-2020 Matplotlib Development Team (MDT); All Rights Reserved)
The terms and conditions for these products can be found below."""
@staticmethod
def _get_license_text():
return """MIT License for Learning Simulator
----------------------------------
Copyright (c) 2018 markusrobertjonsson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Terms and conditions for accessing or otherwise using Python
------------------------------------------------------------
PSF LICENSE AGREEMENT FOR PYTHON 3.8.2rc2
1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and
the Individual or Organization ("Licensee") accessing and otherwise using Python
3.8.2rc2 software in source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python 3.8.2rc2 alone or in any derivative
version, provided, however, that PSF's License Agreement and PSF's notice of
copyright, i.e., "Copyright © 2001-2020 Python Software Foundation; All Rights
Reserved" are retained in Python 3.8.2rc2 alone or in any derivative version
prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on or
incorporates Python 3.8.2rc2 or any part thereof, and wants to make the
derivative work available to others as provided herein, then Licensee hereby
agrees to include in any such work a brief summary of the changes made to Python
3.8.2rc2.
4. PSF is making Python 3.8.2rc2 available to Licensee on an "AS IS" basis.
PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF
EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR
WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE
USE OF PYTHON 3.8.2rc2 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON 3.8.2rc2
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF
MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 3.8.2rc2, OR ANY DERIVATIVE
THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material breach of
its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any relationship
of agency, partnership, or joint venture between PSF and Licensee. This License
Agreement does not grant permission to use PSF trademarks or trade name in a
trademark sense to endorse or promote products or services of Licensee, or any
third party.
8. By copying, installing or otherwise using Python 3.8.2rc2, Licensee agrees
to be bound by the terms and conditions of this License Agreement.
License agreement for matplotlib 3.1.3
--------------------------------------
1. This LICENSE AGREEMENT is between the Matplotlib Development Team ("MDT"), and the Individual or Organization ("Licensee") accessing and otherwise using matplotlib software in source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, MDT hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use matplotlib 3.1.3 alone or in any derivative version, provided, however, that MDT's License Agreement and MDT's notice of copyright, i.e., "Copyright (c) 2012-2013 Matplotlib Development Team; All Rights Reserved" are retained in matplotlib 3.1.3 alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on or incorporates matplotlib 3.1.3 or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to matplotlib 3.1.3.
4. MDT is making matplotlib 3.1.3 available to Licensee on an "AS IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 3.1.3 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB 3.1.3 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING MATPLOTLIB 3.1.3, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint venture between MDT and Licensee. This License Agreement does not grant permission to use MDT trademarks or trade name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
8. By copying, installing or otherwise using matplotlib 3.1.3, Licensee agrees to be bound by the terms and conditions of this License Agreement.
"""
def no(self):
self.gui.file_quit()
| 43.565022 | 562 | 0.658157 | 2,665 | 19,430 | 4.700188 | 0.179362 | 0.013412 | 0.014929 | 0.015647 | 0.529698 | 0.468146 | 0.398771 | 0.359093 | 0.328996 | 0.309995 | 0 | 0.027023 | 0.232476 | 19,430 | 445 | 563 | 43.662921 | 0.812714 | 0.050232 | 0 | 0.18429 | 0 | 0.030211 | 0.398263 | 0.007166 | 0 | 0 | 0 | 0 | 0.003021 | 1 | 0.10574 | false | 0.006042 | 0.021148 | 0.006042 | 0.166163 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b843743ad830c5014f1df6f18d3fbf09a03f50d6 | 21,177 | py | Python | somenlp/NER/trainer.py | BeTKH/SoMeNLP | 0f22931b20f7c2f21c255410984257f0e3d225f6 | [
"MIT"
] | null | null | null | somenlp/NER/trainer.py | BeTKH/SoMeNLP | 0f22931b20f7c2f21c255410984257f0e3d225f6 | [
"MIT"
] | 3 | 2022-02-07T11:56:37.000Z | 2022-02-08T10:04:45.000Z | somenlp/NER/trainer.py | BeTKH/SoMeNLP | 0f22931b20f7c2f21c255410984257f0e3d225f6 | [
"MIT"
] | null | null | null | import torch
import time
from .seqeval_custom import precision_recall_fscore_support
class Trainer():
def __init__(self, device, model_wrapper, data_handler, output_handler, train_conf):
self.device = device
self.model_w = model_wrapper
self.data_handler = data_handler
self.output_handler = output_handler
self.train_config = train_conf
def _weighted_averages(self, support, *arrays):
res = []
for arr in arrays:
if sum(support) == 0:
weighted_average = 0
else:
weighted_average = sum([a * b for a, b in zip(support, arr)]) / sum(support)
res.append(weighted_average)
return res
def _eval_fct(self, labels, predictions, data_set_name, loss, meta_name=''):
precision_all, recall_all, fscore_all, support, names = precision_recall_fscore_support(labels, predictions, average=None)
w_precision, w_recall, w_fscore = self._weighted_averages(support, precision_all, recall_all, fscore_all)
scalars = {}
for p, r, f, n in zip(precision_all, recall_all, fscore_all, names):
scalars['{}{}/Precision/{}'.format(meta_name, n, data_set_name)] = p
scalars['{}{}/Recall/{}'.format(meta_name, n, data_set_name)] = r
scalars['{}{}/FScore/{}'.format(meta_name, n, data_set_name)] = f
if (not meta_name or meta_name.rstrip('/') == 'software') and data_set_name == self.train_config['eval_dataset_name'] and n == 'Application':
self.model_w.current_performance = f
if self.model_w.best_performance <= f:
self.model_w.current_is_best = True
self.model_w.best_performance = f
scalars['{}Total/Precision/{}'.format(meta_name, data_set_name)] = w_precision
scalars['{}Total/Recall/{}'.format(meta_name, data_set_name)] = w_recall
scalars['{}Total/FScore/{}'.format(meta_name, data_set_name)] = w_fscore
scalars['{}Total/Loss/{}'.format(meta_name, data_set_name)] = loss
self.output_handler.print_scalars(scalars, self.model_w.global_epoch, data_set_name, meta_name)
self.output_handler.write_scalars(scalars, self.model_w.global_epoch)
self.output_handler.c_matrix(names, labels, predictions, self.train_config['tag_mode'])
def _eval(self, labels, predictions, data_set_name, loss):
if not isinstance(labels, dict):
self._eval_fct(labels, predictions, data_set_name, loss)
else:
for k in labels.keys():
self._eval_fct(labels[k], predictions[k], data_set_name, loss, meta_name=k+'/')
def _get_train_depth(self, ep, hierarchy, max_depth=3):
for _, v in hierarchy.items():
if ep <= v['limit']:
return v['depth']
return max_depth
def _train_model(self, train_loader, epochs):
print("Starting training")
for ep in range(1, epochs+1):
self.model_w.model.train()
self.model_w.current_is_best = False
print("Epoch {}".format(self.model_w.global_epoch))
if self.data_handler.multi_task_mapping:
if 'hierarchy_depth' in self.model_w.config['model']['gen']:
train_depth = self._get_train_depth(ep, self.model_w.config['model']['gen']['hierarchy_depth'])
else:
train_depth = 4
print("Training multi-label model with max depth {}".format(train_depth))
start = time.time()
ep_loss, running_batch_loss, running_batch_count = 0, 0, 0
for step, batch in enumerate(train_loader):
running_batch_count += 1
batch = {k: (t.to(self.device) if t is not None else None) for k, t in batch.items()}
if self.model_w.optim_grouped_params is None:
loss = self.model_w.model.neg_log_likelihood(batch['tags'], char_sentence=batch['chars'], sentence=batch['ids'], lengths=batch['lengths'], feature_sentence=batch['features'].float())
else:
if not self.data_handler.multi_task_mapping:
outputs = self.model_w.model(batch['ids'], token_type_ids=None, attention_mask=batch['masks'], labels=batch['tags'])
loss = outputs[0]
else:
if len(self.data_handler.encoding['tag2idx']) == 4:
outputs = self.model_w.model(
batch['ids'],
token_type_ids=None,
attention_mask=batch['masks'],
software_labels=batch['software'],
soft_type_labels=batch['soft_type'],
mention_type_labels = batch['mention_type'],
soft_purpose_labels=batch['soft_purpose'],
sequence_lengths=batch['lengths'],
train_depth=train_depth,
teacher_forcing=True)
elif len(self.data_handler.encoding['tag2idx']) == 3:
outputs = self.model_w.model(
batch['ids'],
token_type_ids=None,
attention_mask=batch['masks'],
software_labels=batch['software'],
soft_type_labels=batch['soft_type'],
soft_purpose_labels=batch['soft_purpose'],
sequence_lengths=batch['lengths'],
train_depth=train_depth,
teacher_forcing=True)
else:
raise(RuntimeError("Unsupported data transformation configuration"))
loss = outputs[0]
loss.backward()
if self.model_w.optim_grouped_params is None:
self.model_w.optim.step()
self.model_w.optim.zero_grad()
else:
torch.nn.utils.clip_grad_norm_(parameters=self.model_w.model.parameters(), max_norm=self.model_w.config['model']['gen']['max_grad_norm'])
self.model_w.optim.step()
self.model_w.optim.zero_grad()
self.model_w.scheduler.step()
ep_loss += loss.item()
running_batch_loss += loss.item()
if step > 0:
if step % self.train_config['print_batches'] == 0:
print("At batch {}".format(step))
print("Average loss over last batches: {}".format(running_batch_loss / running_batch_count))
running_batch_count = 0
running_batch_loss = 0
if step % self.train_config['save_batches'] == 0:
self.model_w.save_checkpoint(step)
if step % self.train_config['test_batches'] == 0:
self._test_model()
end = time.time()
print("Epoch took {} seconds".format(round(end - start, 3)))
if self.model_w.global_epoch % self.train_config['test_epochs'] == 0:
self._test_model()
if self.model_w.global_epoch >= self.train_config['save_from']:
if self.model_w.global_epoch % self.train_config['save_epochs'] == 0:
self.model_w.save_checkpoint()
if 'save_max' in self.train_config and self.train_config['save_max'] and self.model_w.current_is_best:
print("Saving model with best performance..")
self.model_w.save_checkpoint()
self.model_w.global_epoch += 1
def _test_model(self):
self.model_w.model.eval()
for idx, dataset in enumerate(self.data_handler.data_config['sets']['test']):
print("Start testing on corpus {}".format(idx))
ep_loss = 0
if not self.data_handler.multi_task_mapping:
predictions, true_labels, input_masks, input_ids = [], [], [], []
else:
predictions, true_labels = {}, {}
for k in self.data_handler.tag_remapping.keys():
predictions[k] = []
true_labels[k] = []
input_masks, input_ids = [], []
start = time.time()
for step, batch in enumerate(dataset['dataloader']):
batch = {k: (t.to(self.device) if t is not None else None) for k, t in batch.items()}
with torch.no_grad():
if self.model_w.optim_grouped_params is None:
tag_seq, score, input_mask = self.model_w.model(char_sentence=batch['chars'], sentence=batch['ids'], lengths=batch['lengths'], feature_sentence=batch['features'].float())
predictions.extend(tag_seq.tolist())
true_labels.extend(batch['tags'].tolist())
else:
if not self.data_handler.multi_task_mapping:
outputs = self.model_w.model(batch['ids'], token_type_ids=None, attention_mask=batch['masks'], labels=batch['tags'])
logits = outputs[1]
tag_seq = torch.argmax(logits, axis=2)
predictions.extend(tag_seq.tolist())
true_labels.extend(batch['tags'].tolist())
else:
if len(self.data_handler.encoding['tag2idx']) == 4:
outputs = self.model_w.model(
batch['ids'],
token_type_ids=None,
attention_mask=batch['masks'],
software_labels=batch['software'],
soft_type_labels=batch['soft_type'],
mention_type_labels = batch['mention_type'],
soft_purpose_labels=batch['soft_purpose'],
sequence_lengths=batch['lengths'],
train_depth=3,
teacher_forcing=False)
logits = {
'software': outputs[1],
'soft_type': outputs[2],
'mention_type': outputs[3],
'soft_purpose': outputs[4]
}
elif len(self.data_handler.encoding['tag2idx']) == 3:
outputs = self.model_w.model(
batch['ids'],
token_type_ids=None,
attention_mask=batch['masks'],
software_labels=batch['software'],
soft_type_labels=batch['soft_type'],
soft_purpose_labels=batch['soft_purpose'],
sequence_lengths=batch['lengths'],
train_depth=3,
teacher_forcing=False)
logits = {
'software': outputs[1],
'soft_type': outputs[2],
'soft_purpose': outputs[3]
}
else:
raise(RuntimeError("Unsupported data transformation configuration"))
for k in predictions.keys():
if self.model_w.model_type in ['MultiSciBERTCRF', 'MultiOpt2SciBERTCRF']:
predictions[k].extend(logits[k].tolist())
else:
predictions[k].extend(torch.argmax(logits[k], axis=2).tolist())
true_labels[k].extend(batch[k].tolist())
input_mask = (
(batch['ids'] != self.data_handler.special_toks['cls_tok']) &
(batch['ids'] != self.data_handler.special_toks['pad_tok']) &
(batch['ids'] != self.data_handler.special_toks['sep_tok'])
)
input_masks.extend(input_mask.tolist())
input_ids.extend(batch['ids'].tolist())
end = time.time()
print("Testing on corpus {} took {} seconds".format(idx, round(end - start, 3)))
if not self.data_handler.multi_task_mapping:
sentences = []
pred_tags = []
valid_tags = []
for j_p, j_t, j_s, j_m in zip(predictions, true_labels, input_ids, input_masks):
pred_tags.append([])
valid_tags.append([])
sentences.append([])
for i_p, i_t, i_s, i_m in zip(j_p, j_t, j_s, j_m):
if i_m:
pred_tags[-1].append(self.data_handler.encoding['tag2name'][i_p])
valid_tags[-1].append(self.data_handler.encoding['tag2name'][i_t])
sentences[-1].append(i_s)
else:
sentences = []
pred_tags, valid_tags = {}, {}
for k in self.data_handler.encoding['tag2idx'].keys():
pred_tags[k] = []
valid_tags[k] = []
for top_idx, (j_s, j_m) in enumerate(zip(input_ids, input_masks)):
for k in pred_tags.keys():
pred_tags[k].append([])
valid_tags[k].append([])
sentences.append([])
for bottom_idx, (i_s, i_m) in enumerate(zip(j_s, j_m)):
if i_m:
sentences[-1].append(i_s)
for k in pred_tags.keys():
pred_tags[k][-1].append(self.data_handler.encoding['tag2name'][k][predictions[k][top_idx][bottom_idx]])
valid_tags[k][-1].append(self.data_handler.encoding['tag2name'][k][true_labels[k][top_idx][bottom_idx]])
self._eval(valid_tags, pred_tags, dataset['name'], ep_loss)
if self.train_config['print_errors']:
token_convert = self.data_handler.encoding['word2name'] if self.data_handler.tokenizer is None else self.data_handler.tokenizer
self.output_handler.print_errors(valid_tags, pred_tags, sentences, self.train_config['max_output_length'], dataset['name'], token_convert)
def train(self):
for idx, dataset in enumerate(self.data_handler.data_config['sets']['train']):
print("Training on {} dataset from train set".format(idx))
if dataset["epochs"] > 0:
self.model_w.set_optim(dataset['optimizer'])
if self.model_w.optim_grouped_params is not None:
self.model_w.set_scheduler((len(dataset['dataloader']) * dataset['epochs']), dataset['scheduler'])
self._train_model(dataset['dataloader'], dataset['epochs'])
def prediction(self, bio=True, summary=True):
self.model_w.model.eval()
start = time.time()
iterator = self.data_handler.stream_files()
for out_path, data_loader, text in iterator:
if not self.data_handler.multi_task_mapping:
ids, predictions, input_masks = [], [], []
else:
predictions = {}
for k in self.data_handler.encoding['tag2idx'].keys():
predictions[k] = []
input_masks, ids = [], []
for batch in data_loader:
batch = {k: (t.to(self.device) if t is not None else None) for k, t in batch.items()}
with torch.no_grad():
if self.model_w.optim_grouped_params is None:
tag_seq, score, input_mask = self.model_w.model(char_sentence=batch['chars'], sentence=batch['ids'], lengths=batch['lengths'], feature_sentence=batch['features'].float())
else:
if not self.data_handler.multi_task_mapping:
outputs = self.model_w.model(batch['ids'], token_type_ids=None, attention_mask=batch['masks'], labels=batch['tags'])
logits = outputs[1]
tag_seq = torch.argmax(logits, axis=2)
predictions.extend(tag_seq.tolist())
else:
outputs = self.model_w.model(
batch['ids'],
token_type_ids=None,
sequence_lengths=batch['lengths'],
attention_mask=batch['masks'])
if len(self.data_handler.encoding['tag2idx']) == 4:
logits = {
'software': outputs[1],
'soft_type': outputs[2],
'mention_type': outputs[3],
'soft_purpose': outputs[4]
}
elif len(self.data_handler.encoding['tag2idx']) == 3:
logits = {
'software': outputs[1],
'soft_type': outputs[2],
'soft_purpose': outputs[3]
}
for k, v in logits.items():
if self.model_w.model_type in ['MultiSciBERTCRF', 'MultiOpt2SciBERTCRF']:
predictions[k].extend(v.tolist())
else:
predictions[k].extend(torch.argmax(v, axis=2).tolist())
input_mask = (
(batch['ids'] != self.data_handler.special_toks['cls_tok']) &
(batch['ids'] != self.data_handler.special_toks['pad_tok']) &
(batch['ids'] != self.data_handler.special_toks['sep_tok'])
)
ids.extend(batch['ids'].tolist())
input_masks.extend(input_mask.tolist())
if not self.data_handler.multi_task_mapping:
pred_tags = []
n_text = []
for j, j_p, j_m in zip(ids, predictions, input_masks):
pred_tags.append([])
n_text.append([])
for i, i_p, i_m in zip(j, j_p, j_m):
if i_m:
pred_tags[-1].append(self.data_handler.encoding['tag2name'][i_p])
n_text[-1].append(i)
if self.data_handler.tokenizer is None:
n_text = [[self.data_handler.encoding['word2name'][word] for word in sent] for sent in n_text]
else:
n_text = [[self.data_handler.tokenizer.convert_ids_to_tokens(word) for word in sent] for sent in n_text]
else:
pred_tags = {}
for k in self.data_handler.encoding['tag2idx'].keys():
pred_tags[k] = []
n_text = []
for top_idx, (j, j_m) in enumerate(zip(ids, input_masks)):
for k in pred_tags.keys():
pred_tags[k].append([])
n_text.append([])
for bottom_idx, (i, i_m) in enumerate(zip(j, j_m)):
if i_m:
n_text[-1].append(i)
for k in pred_tags.keys():
pred_tags[k][-1].append(self.data_handler.encoding['tag2name'][k][predictions[k][top_idx][bottom_idx]])
if self.data_handler.tokenizer is None:
n_text = [[self.data_handler.encoding['word2name'][word] for word in sent] for sent in n_text]
else:
n_text = [[self.data_handler.tokenizer.convert_ids_to_tokens(word) for word in sent] for sent in n_text]
if bio:
self.output_handler.save_predictions(out_path, pred_tags, n_text)
if summary:
self.output_handler.summarize_predictions(out_path, pred_tags, n_text)
end = time.time()
print("Predicting all files took {} seconds".format(round(end - start, 3)))
| 56.321809 | 202 | 0.496482 | 2,229 | 21,177 | 4.459847 | 0.102288 | 0.044362 | 0.049291 | 0.02716 | 0.665124 | 0.610703 | 0.536968 | 0.469973 | 0.447239 | 0.427824 | 0 | 0.006558 | 0.395193 | 21,177 | 375 | 203 | 56.472 | 0.769597 | 0 | 0 | 0.563584 | 0 | 0 | 0.074656 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026012 | false | 0 | 0.008671 | 0 | 0.046243 | 0.043353 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b848b8a8c138a85e4a5d372b6de985f62d5b679c | 2,641 | py | Python | tools/Polygraphy/tests/tools/args/onnx/test_loader.py | hwkyai/TensorRT | d04182cd0086c70db4a8ad30e0d7675c4eb33782 | [
"Apache-2.0"
] | null | null | null | tools/Polygraphy/tests/tools/args/onnx/test_loader.py | hwkyai/TensorRT | d04182cd0086c70db4a8ad30e0d7675c4eb33782 | [
"Apache-2.0"
] | null | null | null | tools/Polygraphy/tests/tools/args/onnx/test_loader.py | hwkyai/TensorRT | d04182cd0086c70db4a8ad30e0d7675c4eb33782 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
from polygraphy.backend.onnx import onnx_from_path
from polygraphy.tools.args import (DataLoaderArgs, ModelArgs, OnnxLoaderArgs,
OnnxSaveArgs, OnnxShapeInferenceArgs)
from tests.helper import check_file_non_empty
from tests.models.meta import ONNX_MODELS
from tests.tools.args.helper import ArgGroupTestHelper
class TestOnnxLoaderArgs(object):
def test_basic(self):
arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
arg_group.parse_args([ONNX_MODELS["identity_identity"].path, "--onnx-outputs=identity_out_0"])
model = arg_group.load_onnx()
assert len(model.graph.output) == 1
assert model.graph.output[0].name == "identity_out_0"
def test_external_data(self):
arg_group = ArgGroupTestHelper(OnnxLoaderArgs(), deps=[ModelArgs()])
model = ONNX_MODELS["ext_weights"]
arg_group.parse_args([model.path, "--load-external-data", model.ext_data])
model = arg_group.load_onnx()
assert len(model.graph.node) == 3
class TestOnnxSaveArgs(object):
def test_external_data(self):
model = onnx_from_path(ONNX_MODELS["const_foldable"].path)
arg_group = ArgGroupTestHelper(OnnxSaveArgs(), deps=[ModelArgs(), OnnxLoaderArgs()])
with tempfile.NamedTemporaryFile() as path, tempfile.NamedTemporaryFile() as data:
arg_group.parse_args(["-o", path.name, "--save-external-data", data.name])
arg_group.save_onnx(model)
check_file_non_empty(path.name)
check_file_non_empty(data.name)
class TestOnnxShapeInferenceArgs(object):
def test_shape_inference_disabled_on_fallback(self):
arg_group = ArgGroupTestHelper(OnnxShapeInferenceArgs(default=True, enable_force_fallback=True), deps=[DataLoaderArgs()])
arg_group.parse_args([])
assert arg_group.do_shape_inference
arg_group.parse_args(["--force-fallback-shape-inference"])
assert not arg_group.do_shape_inference
| 40.015152 | 129 | 0.72359 | 332 | 2,641 | 5.566265 | 0.400602 | 0.060606 | 0.035173 | 0.045996 | 0.15368 | 0.104978 | 0.104978 | 0.04329 | 0.04329 | 0 | 0 | 0.005988 | 0.177963 | 2,641 | 65 | 130 | 40.630769 | 0.845233 | 0.220371 | 0 | 0.166667 | 0 | 0 | 0.077827 | 0.029858 | 0 | 0 | 0 | 0 | 0.138889 | 1 | 0.111111 | false | 0 | 0.166667 | 0 | 0.361111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b849e1665151f630f2ada935b9ec6dddcc0bdb46 | 3,326 | py | Python | sdr_availability/data_manips.py | enavu/sdr_avail | d2f5ea4dd927df99e2161b632f382a902761c302 | [
"MIT"
] | null | null | null | sdr_availability/data_manips.py | enavu/sdr_avail | d2f5ea4dd927df99e2161b632f382a902761c302 | [
"MIT"
] | null | null | null | sdr_availability/data_manips.py | enavu/sdr_avail | d2f5ea4dd927df99e2161b632f382a902761c302 | [
"MIT"
] | null | null | null | import glob
import os
import pandas as pd
import datetime as dt
import re
def get_sdr_list(min, office, time_selected):
path = "sdr_availability/data/**/"
all_files = glob.glob(os.path.join(path, "*.csv"))
no_sz = []
## Check file for sizes
for i in range(0,len(all_files)):
sz = os.path.getsize(all_files[i])
if sz == 0:
no_sz.append(i)
## Remove the bad fles
for sz in no_sz:
all_files.pop(sz)
#Create the lookup for the office selected
path = "sdr_availability/data/"
file = "hr.csv"
hr_list = pd.read_csv(path + file)
sdrs = hr_list[hr_list['Office'].str.contains(office)]
sdrs_email = list(sdrs['Email'])
##Merge each file.
df_from_each_file = (pd.read_csv(f, sep=',', quotechar='"', skipinitialspace=True, header=None, names=['Email', 'Slots', 'UnkA', 'UnkB']) for f in all_files)
df_merged = pd.concat(df_from_each_file, ignore_index=True)
#Pandas automatically fills in with NaN, I wanted to replace it to deal with strings only
df_merged = df_merged.fillna('')
##There is some bad data in these files, and columns are not the same due to some quoted and unquoted
##Let them seperate out in columns to work with pandas and add them back together
df_merged['combined_slots'] = (df_merged['Slots'].astype(str) + df_merged['UnkA'].astype(str)+ ' ,' + df_merged['UnkB'].astype(str))
##Create another dataframe - with only selected locations
df_selected = df_merged[df_merged['Email'].isin(sdrs_email)]
##Evaulate each combined time slots
##Break out list of commas, when > 0 evaluate if begin and end time are in slots
#print(df_selected['combined_slots'])
list_time = []
email_list = []
for index, row in df_selected.iterrows():
list_time.clear()
## How many times does the : appear
count_time = row['combined_slots'].count(":")
##Use re to loop through slots and add to string
i = 0
while count_time > 0:
#print(count_time)
try:
timeInStr = re.findall('[\d ]\d:\d\d \w\w', row['combined_slots'])[i].strip()
list_time.append(timeInStr)
count_time-=1
i+=1
except:
try:
print("SECOND TRY: " + row['combined_slots'])
timeInStr = re.findall('[\d ]\d:\d\d\w\w', row['combined_slots'])[i].strip()
list_time.append(timeInStr)
count_time-=1
i+=1
except:
pass
n = 2
split_list = [list_time[i * n:(i + 1) * n] for i in range((len(list_time) + n - 1) // n )]
for s in split_list:
begin_time = dt.datetime.strptime(s[0], '%I:%M %p').time()
end_time = dt.datetime.strptime(s[1], '%I:%M %p').time()
#print(str(time_selected[0]) +" "+str(begin_time) +" "+ str(time_selected[1]) +" "+str(end_time))
test = time_selected[0] > begin_time and time_selected[1] < end_time
if test:
email_list.append(row['Email'])
df_selected_time = df_selected[df_selected['Email'].isin(email_list)]
return df_selected_time, df_selected | 39.595238 | 161 | 0.585087 | 468 | 3,326 | 3.991453 | 0.320513 | 0.038544 | 0.034261 | 0.028908 | 0.17666 | 0.093148 | 0.093148 | 0.093148 | 0.093148 | 0.093148 | 0 | 0.007576 | 0.285628 | 3,326 | 84 | 162 | 39.595238 | 0.77862 | 0.227601 | 0 | 0.214286 | 0 | 0 | 0.098664 | 0.018475 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017857 | false | 0.017857 | 0.089286 | 0 | 0.125 | 0.017857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b84b4cf63ff5b8b6727fbae3a108388343ee987f | 403 | py | Python | instagram.py | test692/InstaBurste | 74d3a25cede9e4ed8ab1f3ca98582539d3d4ea2d | [
"MIT"
] | 19 | 2019-09-17T21:12:39.000Z | 2022-02-12T01:54:27.000Z | instagram.py | test692/InstaBurste | 74d3a25cede9e4ed8ab1f3ca98582539d3d4ea2d | [
"MIT"
] | 1 | 2020-09-12T16:33:49.000Z | 2020-09-12T16:33:49.000Z | instagram.py | test692/InstaBurste | 74d3a25cede9e4ed8ab1f3ca98582539d3d4ea2d | [
"MIT"
] | 13 | 2019-10-22T21:16:22.000Z | 2022-02-27T07:30:51.000Z | # Date: 02/20/2018
# Author: Ethical-H4CK3R
# Description: Interactive Bruter
from lib.tor import tor_exists
from lib.console import Console
from lib.session import Database
class Instagram(Console, Database):
def run(self):
self.create_table()
self.cmdloop()
self.exit()
if __name__ == '__main__':
exit('Run: chmod +x install.sh && ./install.sh') if not tor_exists() else Instagram().run() | 23.705882 | 92 | 0.729529 | 58 | 403 | 4.87931 | 0.62069 | 0.074205 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028902 | 0.141439 | 403 | 17 | 92 | 23.705882 | 0.789017 | 0.176179 | 0 | 0 | 0 | 0 | 0.145897 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.3 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85010e24cb0643794873949b89886b38ed7da1a | 6,209 | py | Python | bk7231tools/analysis/rbl.py | khalednassar/bk7231tools | b2c5b5bb9f861a154654b89aa69fee0dfacb9ffa | [
"MIT"
] | 6 | 2022-02-12T11:08:28.000Z | 2022-03-25T23:41:51.000Z | bk7231tools/analysis/rbl.py | khalednassar/bk7231tools | b2c5b5bb9f861a154654b89aa69fee0dfacb9ffa | [
"MIT"
] | null | null | null | bk7231tools/analysis/rbl.py | khalednassar/bk7231tools | b2c5b5bb9f861a154654b89aa69fee0dfacb9ffa | [
"MIT"
] | null | null | null | import io
import os
import struct
from dataclasses import astuple, dataclass
from enum import IntFlag
from typing import ClassVar, List
from zlib import crc32
from .flash import FlashLayout
from .utils import block_crc_check
class OTAAlgorithm(IntFlag):
NONE = 0
CRYPT_XOR = 1
CRYPT_AES256 = 2
COMPRESS_GZIP = 256
COMPRESS_QUICKLZ = 512
COMPRESS_FASTLZ = 768
@dataclass
class Header:
magic: bytes
algo: OTAAlgorithm
timestamp: int
name: str
version: str
sn: str
crc32: int
hash: int
size_raw: int
size_package: int
info_crc32: int
FORMAT: ClassVar[struct.Struct] = struct.Struct("<4sII16s24s24sIIIII")
MAGIC: ClassVar[str] = b"RBL\x00"
@classmethod
def from_bytes(cls, data: bytes):
header = cls(*cls.FORMAT.unpack(data))
header.algo = OTAAlgorithm(header.algo)
cls.__validate_data(data, info_crc32=header.info_crc32)
def __clean_c_string(x): return x[:x.index(b"\x00")].decode()
header.name, header.version, header.sn = tuple(
map(__clean_c_string, [header.name, header.version, header.sn]))
return header
def to_bytes(self) -> bytes:
data_tuple = astuple(self)
def encode_str(x): return x if not isinstance(x, str) else x.encode('utf-8')
data_tuple = tuple(map(encode_str, data_tuple))
return self.FORMAT.pack(*data_tuple)
@classmethod
def __validate_data(cls, data: bytes, info_crc32: int):
calculated_crc = crc32(data[:-4])
if calculated_crc != info_crc32:
raise ValueError(
f"Header crc32 {info_crc32:#x} does not match calculated header crc32 {calculated_crc:#x}")
__HEADER_MAGIC_NEEDLE = bytes([Header.MAGIC[0]]), Header.MAGIC[1:]
class Container(object):
def __init__(self, header: Header, payload: bytes):
self.header = header
self.payload = payload
@classmethod
def from_bytestream(cls, bytestream: io.BytesIO, flash_layout: FlashLayout = None):
magic = bytestream.read(len(Header.MAGIC))
if magic != Header.MAGIC:
raise ValueError(
f"Given bytestream magic {magic.hex()}[hex] does not match an RBL container magic")
if flash_layout and flash_layout.with_crc:
bytestream.seek(bytestream.tell() - len(magic), os.SEEK_SET)
headerstream = cls.__create_bytestream_without_crc(bytestream)
header_byte_count = Header.FORMAT.size
crc_byte_count = (header_byte_count // 32) * 2
header = Header.from_bytes(headerstream.read(header_byte_count))
bytestream.seek(bytestream.tell() + header_byte_count + crc_byte_count, os.SEEK_SET)
else:
header_byte_count = Header.FORMAT.size - len(magic)
header = Header.from_bytes(
magic + bytestream.read(header_byte_count))
bytestream = cls.__create_bytestream_for_layout(
header, bytestream, flash_layout)
payload = bytestream.read(header.size_package)
# TODO: implement AES and GZIP support
if header.algo == OTAAlgorithm.NONE:
padding = header.size_package - header.size_raw
payload = payload[:header.size_raw] + (bytes([padding]) * padding)
payload_crc = crc32(payload)
if payload_crc != header.crc32:
payload = None
return cls(header, payload)
def write_to_bytestream(self, bytestream: io.BytesIO, payload_only=True):
if self.payload is None:
raise ValueError("Container has invalid payload")
if not payload_only:
bytestream.write(self.header.to_bytes())
bytestream.write(self.payload)
@classmethod
def __create_bytestream_for_layout(cls, header: Header, bytestream: io.BytesIO, flash_layout: FlashLayout) -> io.BytesIO:
if flash_layout is None:
return bytestream
partition = filter(lambda x: x.name == header.name,
flash_layout.partitions).__next__()
start_position = bytestream.tell()
package_position = start_position - partition.size
if package_position < 0:
raise ValueError(
f"Partition {header.name} does not have enough bytes for payload")
new_stream = io.BytesIO()
package_read_bytes = partition.size - Header.FORMAT.size
if flash_layout.with_crc:
package_read_bytes -= (Header.FORMAT.size // 32) * 2
bytestream.seek(package_position)
new_stream.write(bytestream.read(package_read_bytes))
bytestream.seek(start_position, os.SEEK_SET)
new_stream.seek(0, os.SEEK_SET)
return new_stream if not flash_layout.with_crc else cls.__create_bytestream_without_crc(new_stream)
@classmethod
def __create_bytestream_without_crc(cls, bytestream: io.BytesIO) -> io.BytesIO:
new_stream = io.BytesIO()
start_position = bytestream.tell()
crc_blocks = bytestream.read(36)
if block_crc_check(crc_blocks[:32], crc_blocks[32:34]):
bytestream.seek(start_position, os.SEEK_SET)
elif block_crc_check(crc_blocks[2:34], crc_blocks[34:36]):
bytestream.seek(start_position+2, os.SEEK_SET)
else:
pass
block = bytestream.read(32)
while block:
new_stream.write(block)
bytestream.read(2)
block = bytestream.read(32)
bytestream.seek(start_position, os.SEEK_SET)
new_stream.seek(0, os.SEEK_SET)
return new_stream
def find_rbl_containers_indices(bytestream: io.BytesIO) -> List[int]:
oldpos = bytestream.tell()
rbl_locations = []
magic_needle = __HEADER_MAGIC_NEEDLE[0]
magic_remainder = __HEADER_MAGIC_NEEDLE[1]
c = bytestream.read(len(magic_needle))
while c:
location = bytestream.tell() - 1
if c == magic_needle:
remainder = bytestream.read(len(magic_remainder))
if remainder == magic_remainder:
rbl_locations.append(location)
c = bytestream.read(len(magic_needle))
bytestream.seek(oldpos, os.SEEK_SET)
return rbl_locations
| 36.523529 | 125 | 0.657111 | 767 | 6,209 | 5.080834 | 0.195567 | 0.039518 | 0.020785 | 0.027714 | 0.156017 | 0.11496 | 0.047216 | 0.037978 | 0.037978 | 0.037978 | 0 | 0.019107 | 0.249799 | 6,209 | 169 | 126 | 36.739645 | 0.817518 | 0.005798 | 0 | 0.160839 | 0 | 0 | 0.047318 | 0 | 0 | 0 | 0 | 0.005917 | 0 | 1 | 0.076923 | false | 0.006993 | 0.062937 | 0.013986 | 0.342657 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b851c01b53dbe9cab90203922096752ff6dcb84f | 2,109 | py | Python | connector/main.py | twindebank/DrHue | 9a957af37196f87804c5259169fb826409d18705 | [
"MIT"
] | null | null | null | connector/main.py | twindebank/DrHue | 9a957af37196f87804c5259169fb826409d18705 | [
"MIT"
] | null | null | null | connector/main.py | twindebank/DrHue | 9a957af37196f87804c5259169fb826409d18705 | [
"MIT"
] | null | null | null | import base64
import datetime
import json
import sys
import traceback
import iso8601
from google.cloud import bigquery
from google.cloud.bigquery import TableReference, DatasetReference, Table, SchemaField
from google.cloud.bigquery.enums import SqlTypeNames
DATASET = 'raw_events'
bq_types = {
str: SqlTypeNames.STRING,
datetime.datetime: SqlTypeNames.DATETIME,
float: SqlTypeNames.FLOAT,
int: SqlTypeNames.INTEGER
}
def main(event, context):
try:
pubsub_to_bq(event, context)
except Exception as e:
print(repr(e), file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
def pubsub_to_bq(event, context):
print(f"CONTEXT: {context}")
pubsub_message = decode_event(event)
table_name, row = create_row(pubsub_message, context)
print(f"ROW: {row}")
send_to_bq(
dataset=DATASET,
table=table_name,
row=row
)
def decode_event(event):
return base64.b64decode(event['data']).decode('utf-8')
def create_row(raw, context):
"""
message type can be state or telemetry
namespace can be hue only atm
"""
message = json.loads(raw)
message_type = message.get("type", "unknown")
message_source = message.get("source", "unknown")
table_name = f"raw_{message_type}_{message_source}"
row = {
"payload": raw,
"event_id": context.event_id,
"insertion_datetime": iso8601.parse_date(context.timestamp),
"resource_name": context.resource['name']
}
return table_name, row
def send_to_bq(dataset, table, row):
bigquery_client = bigquery.Client(project='theo-home')
table_ref = TableReference(
dataset_ref=DatasetReference(dataset_id=dataset, project='theo-home'),
table_id=table,
)
schema = [SchemaField(name=field, field_type=bq_types[type(data)]) for field, data in row.items()]
table = bigquery_client.create_table(
Table(table_ref, schema=schema),
exists_ok=True
)
errors = bigquery_client.insert_rows(table, [row])
if errors:
print(errors, file=sys.stderr)
| 25.719512 | 102 | 0.684685 | 265 | 2,109 | 5.279245 | 0.354717 | 0.011437 | 0.032166 | 0.032881 | 0.031451 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008934 | 0.203888 | 2,109 | 81 | 103 | 26.037037 | 0.8243 | 0.032243 | 0 | 0 | 0 | 0 | 0.086096 | 0.017318 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.15 | 0.016667 | 0.266667 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b856e3cad75b58cb76f2832c9deeeb5918bb6779 | 1,000 | py | Python | remote/__init__.py | 89jd/pi-bike-python-client | 24c76ad15b3621dbb065849490875b15cd6fa25e | [
"Apache-2.0"
] | null | null | null | remote/__init__.py | 89jd/pi-bike-python-client | 24c76ad15b3621dbb065849490875b15cd6fa25e | [
"Apache-2.0"
] | null | null | null | remote/__init__.py | 89jd/pi-bike-python-client | 24c76ad15b3621dbb065849490875b15cd6fa25e | [
"Apache-2.0"
] | null | null | null | import evdev
import threading
import time
class RemoteControlThread(threading.Thread):
def __init__(self, device_id:str, on_key, debug: bool = False) -> None:
super().__init__(daemon=True)
self.device_id = device_id
self.on_key = on_key
self.debug = debug
def print_debug_log(self, s: str):
if self.debug:
print(s)
def run(self) -> None:
while True:
try:
device = evdev.InputDevice(self.device_id)
self.print_debug_log('Input device found')
for event in device.read_loop():
if event.type == evdev.ecodes.EV_KEY:
self.on_key(event.code, event.value)
except FileNotFoundError:
if self.debug:
self.print_debug_log('Input device not found')
time.sleep(1)
if __name__ == "__main__":
RemoteControlThread(print).start()
while True:
time.sleep(1)
| 29.411765 | 75 | 0.57 | 118 | 1,000 | 4.559322 | 0.423729 | 0.05948 | 0.066915 | 0.063197 | 0.104089 | 0.104089 | 0 | 0 | 0 | 0 | 0 | 0.003012 | 0.336 | 1,000 | 33 | 76 | 30.30303 | 0.807229 | 0 | 0 | 0.214286 | 0 | 0 | 0.048 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.107143 | false | 0 | 0.107143 | 0 | 0.25 | 0.178571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b856e4841aac9d0613a32fb9673cce53941c59ba | 2,175 | py | Python | pelicanconf.py | janithl/blog | 9b0a69aace559c1f031f124f9d111a45e9678887 | [
"MIT"
] | null | null | null | pelicanconf.py | janithl/blog | 9b0a69aace559c1f031f124f9d111a45e9678887 | [
"MIT"
] | null | null | null | pelicanconf.py | janithl/blog | 9b0a69aace559c1f031f124f9d111a45e9678887 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Janith'
SITENAME = "Janith's Blog"
SITEURL = ''
RELATIVE_URLS = True
DEFAULT_LANG = 'en'
TIMEZONE = 'Asia/Colombo'
THEME = 'pelican-readable'
USER_LOGO_URL = SITEURL + '/static/images/avatar.png'
PATH = 'content'
ARTICLE_URL = '{date:%Y}/{date:%m}/{slug}/'
ARTICLE_SAVE_AS = '{date:%Y}/{date:%m}/{slug}/index.html'
ARTICLE_LANG_URL = '{date:%Y}/{date:%m}/{slug}-{lang}/'
ARTICLE_LANG_SAVE_AS = '{date:%Y}/{date:%m}/{slug}-{lang}/index.html'
CATEGORY_URL = 'category/{slug}/'
CATEGORY_SAVE_AS = 'category/{slug}/index.html'
AUTHOR_URL = 'author/{slug}/'
AUTHOR_SAVE_AS = 'author/{slug}/index.html'
TAG_URL = 'tag/{slug}/'
TAG_SAVE_AS = 'tag/{slug}/index.html'
TAGS_URL = 'tags/'
TAGS_SAVE_AS = 'tags/index.html'
DEFAULT_PAGINATION = 5
PAGINATION_PATTERNS = (
(1, '{base_name}/', '{base_name}/index.html'),
(2, '{base_name}/page/{number}/', '{base_name}/page/{number}/index.html'),
)
EXTRA_PATH_METADATA = {'images/favicon.ico': {'path': 'favicon.ico'}}
DISQUS_SITENAME = 'janithl'
GITHUB_URL = 'https://github.com/janithl'
GOOGLE_ANALYTICS = 'UA-7602960-7'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
#LINKS = (('Pelican', 'http://getpelican.com/'),
# ('Python.org', 'http://python.org/'),
# ('Jinja2', 'http://jinja.pocoo.org/'),
# ('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('Github', 'https://github.com/janithl'),)
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# Turn off syntax highlights
MARKDOWN = {
'extension_configs': {
'markdown.extensions.codehilite': {'guess_lang': False, 'css_class': 'highlight'},
'markdown.extensions.extra': {},
'markdown.extensions.meta': {},
},
'output_format': 'html5',
}
| 29.794521 | 90 | 0.628966 | 267 | 2,175 | 4.917603 | 0.483146 | 0.054836 | 0.027418 | 0.030465 | 0.092917 | 0.062452 | 0.030465 | 0 | 0 | 0 | 0 | 0.007991 | 0.194483 | 2,175 | 72 | 91 | 30.208333 | 0.741438 | 0.206437 | 0 | 0 | 0 | 0 | 0.410631 | 0.234229 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022222 | 0 | 0.022222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8588ca8b4874137dfd4d8625bf6f4c06e56b9aa | 4,784 | py | Python | src/ralph_scrooge/models/_history.py | ar4s/ralph_pricing | 40127e9450edc91ba0be725d63bf691dde16a137 | [
"Apache-2.0"
] | 4 | 2016-05-06T19:28:53.000Z | 2018-01-26T21:13:40.000Z | src/ralph_scrooge/models/_history.py | ar4s/ralph_pricing | 40127e9450edc91ba0be725d63bf691dde16a137 | [
"Apache-2.0"
] | 283 | 2015-01-07T15:06:34.000Z | 2019-08-08T10:43:47.000Z | src/ralph_scrooge/models/_history.py | ar4s/ralph_pricing | 40127e9450edc91ba0be725d63bf691dde16a137 | [
"Apache-2.0"
] | 16 | 2015-01-27T10:33:20.000Z | 2020-06-25T07:04:21.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
from datetime import datetime
from django.db import models
from django.forms.models import model_to_dict
from simple_history.models import HistoricalRecords, transform_field
try:
from django.utils.timezone import now
except ImportError:
now = datetime.now
class IntervalHistoricalRecords(HistoricalRecords):
"""
Historical record with date intervals in which record was active.
"""
def get_extra_fields(self, model, fields):
"""
Add active from and active to fields to Historical Records
"""
result = super(IntervalHistoricalRecords, self).get_extra_fields(
model,
fields,
)
result['active_from'] = models.DateTimeField(default=now)
result['active_to'] = models.DateTimeField(default=datetime.max)
result['__str__'] = lambda self: '%s active from %s to %s' % (
self.history_object,
self.active_from,
self.active_to,
)
return result
def copy_fields(self, model):
"""
Copy fields with foreign keys relations
"""
fields = {}
for field in model._meta.fields:
field = copy.deepcopy(field)
if isinstance(field, models.ForeignKey):
field.rel.related_name = '+'
field.rel.related_query_name = None
field.attname = field.name
transform_field(field)
fields[field.name] = field
return fields
def _update_most_recent(self, manager, **fields):
"""
Updates last historical record with passed fields values
(ex. active_to)
"""
try:
# get last historical record
most_recent = manager.all()[:1].get()
except manager.model.DoesNotExist:
return
# update fields values
for field, value in fields.items():
setattr(most_recent, field, value)
most_recent.save()
def create_historical_record(self, instance, type):
"""
Creates historical record (just original method)
"""
current_now = now()
history_date = getattr(instance, '_history_date', current_now)
history_user = getattr(instance, '_history_user', None)
active_from = current_now
# update most recent history record
manager = getattr(instance, self.manager_name)
self._update_most_recent(manager, active_to=current_now)
attrs = {}
for field in instance._meta.fields:
attrs[field.attname] = getattr(instance, field.attname)
manager.create(
history_date=history_date,
history_type=type,
history_user=history_user,
active_from=active_from,
**attrs
)
def post_delete(self, instance, **kwargs):
"""
Updates most recent history record active to date
"""
manager = getattr(instance, self.manager_name)
self._update_most_recent(
manager,
active_to=now(),
history_type='-'
)
class ModelDiffMixin(object):
"""
A model mixin that "tracks" model fields values and provide some useful api
to know what fields have been changed.
"""
class Meta:
abstract = True
app_label = 'ralph_scrooge'
def __init__(self, *args, **kwargs):
super(ModelDiffMixin, self).__init__(*args, **kwargs)
self.__initial = self._dict
@property
def diff(self):
d1 = self.__initial
d2 = self._dict
diffs = [(k, (v, d2[k])) for k, v in d1.items() if v != d2[k]]
return dict(diffs)
@property
def has_changed(self):
return bool(self.diff)
@property
def _dict(self):
return model_to_dict(
self,
fields=[field.name for field in self._meta.fields]
)
def save(self, *args, **kwargs):
"""
Saves model and set initial state.
"""
# set skip_history_when_saving if historical record should not be saved
# (historical record should be saved when instance is created or
# modified (but only when some field value is changed))
if self.pk and not self.has_changed:
self.skip_history_when_saving = True
try:
super(ModelDiffMixin, self).save(*args, **kwargs)
finally:
if hasattr(self, 'skip_history_when_saving'):
del self.skip_history_when_saving
self.__initial = self._dict
| 30.864516 | 79 | 0.610159 | 540 | 4,784 | 5.185185 | 0.3 | 0.028571 | 0.022857 | 0.03 | 0.078214 | 0.051429 | 0.051429 | 0.051429 | 0.051429 | 0.051429 | 0 | 0.002099 | 0.302885 | 4,784 | 154 | 80 | 31.064935 | 0.837481 | 0.166806 | 0 | 0.09901 | 0 | 0 | 0.030279 | 0.006319 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09901 | false | 0 | 0.108911 | 0.019802 | 0.29703 | 0.009901 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b859097b626f2734ab0bcd4e95bb569b2b8a207c | 1,372 | py | Python | pycoflow/packet.py | anzigly/pycoflow | cd8071d7f17d3a78a2b0d028f1ddf7d838d6840c | [
"Apache-2.0"
] | 1 | 2016-09-07T11:50:33.000Z | 2016-09-07T11:50:33.000Z | pycoflow/packet.py | anzigly/pycoflow | cd8071d7f17d3a78a2b0d028f1ddf7d838d6840c | [
"Apache-2.0"
] | null | null | null | pycoflow/packet.py | anzigly/pycoflow | cd8071d7f17d3a78a2b0d028f1ddf7d838d6840c | [
"Apache-2.0"
] | null | null | null | from utils.time import TimeUtils
class Packet(object):
"""
a packet abstraction
"""
def __init__(self, shuffle_id, packet_time, src_ip, src_port, dst_ip, dst_port, packet_size):
self.stage_id = str(shuffle_id)
self.packet_time = packet_time
self.src_ip = src_ip
self.src_port = str(src_port)
self.dst_ip = dst_ip
self.dst_port = str(dst_port)
self.packet_size = packet_size
def __str__(self):
return str(self.stage_id) + " " + TimeUtils.time_to_string(self.packet_time) + " "\
+ self.src_ip + ":" + self.src_port + " " + self.dst_ip + ":" + self.dst_port\
+ " " + str(self.packet_size)
@classmethod
def from_line_str(cls, flow_line):
"""
get packet object from a line in captured file.
:param flow_line: a line in captured file.
:return: a packet object
"""
try:
[shuffle_code, packet_time, src_ip, src_port, dst_ip, dst_port, packet_size] = flow_line.split("\t")
stage_id = str((int(shuffle_code) / 4) - 1)
packet_time = TimeUtils.time_convert(packet_time)
packet_size = int(packet_size)
except ValueError:
return None
else:
return cls(stage_id, packet_time, src_ip, src_port, dst_ip, dst_port, packet_size) | 37.081081 | 112 | 0.604956 | 186 | 1,372 | 4.11828 | 0.247312 | 0.104439 | 0.041775 | 0.058747 | 0.387728 | 0.227154 | 0.177546 | 0.177546 | 0.177546 | 0.177546 | 0 | 0.002053 | 0.290087 | 1,372 | 37 | 113 | 37.081081 | 0.784394 | 0.099125 | 0 | 0 | 0 | 0 | 0.006768 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0 | 0.04 | 0.04 | 0.32 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85bf7ae76c0c5a06e68958f1e4c6f2fe3564483 | 3,530 | py | Python | notebooks/get_unique_kmers_per_celltype.py | czbiohub/scrnaseq-for-the-99-percent | 616e35c7596e2ce060d3ffaa84904b0ba0f235f4 | [
"MIT"
] | 2 | 2021-07-03T17:56:36.000Z | 2021-07-04T20:03:25.000Z | notebooks/get_unique_kmers_per_celltype.py | czbiohub/scrnaseq-for-the-99-percent | 616e35c7596e2ce060d3ffaa84904b0ba0f235f4 | [
"MIT"
] | null | null | null | notebooks/get_unique_kmers_per_celltype.py | czbiohub/scrnaseq-for-the-99-percent | 616e35c7596e2ce060d3ffaa84904b0ba0f235f4 | [
"MIT"
] | null | null | null |
import argparse
import glob
import os
import pandas as pd
import scanpy as sc
from joblib import Parallel, delayed
from IPython.display import display
from tqdm import tqdm
SHARED_CELLTYPES = [
"Alveolar Epithelial Type 2",
"B cell",
"Capillary",
"Dendritic",
"Fibroblast",
"Macrophage",
"Monocyte",
"Natural Killer T cell",
"Smooth Muscle and Myofibroblast",
"T cell",
]
def describe(df, random=False):
print(df.shape)
print("--- First 5 entries ---")
display(df.head())
if random:
print("--- Random subset ---")
display(df.sample(5))
def process_hash2kmer(parquet, adata_shared, celltype_col):
hash2kmer = pd.read_parquet(parquet)
describe(hash2kmer)
hash2kmer_with_celltypes = hash2kmer.join(
adata_shared.obs[celltype_col], on="cell_id"
)
hash2kmer_celltype_unique_hashvals = hash2kmer_with_celltypes.drop_duplicates(
[
"kmer_in_sequence",
"kmer_in_alphabet",
"hashval",
"gene_name",
"alignment_status",
"broad_group",
"cell_id",
]
)
describe(hash2kmer_celltype_unique_hashvals)
parquet_out = parquet.replace(".parquet", "__unique_kmers_per_celltype.parquet")
hash2kmer_celltype_unique_hashvals.to_parquet(parquet_out)
# Show number of aligned/unaligned k-mers per celltype
per_celltype_alignment_status_kmers = hash2kmer_celltype_unique_hashvals.groupby(
celltype_col, observed=True
).alignment_status.value_counts()
print(per_celltype_alignment_status_kmers)
def main():
p = argparse.ArgumentParser()
# base directory containing a 2--single-cell-kmers folder which contains sketch id directories with sig2kmer csvs
p.add_argument("species_base_dir")
p.add_argument(
"--kmer-subdir",
default="2--single-cell-kmers",
type=str,
help="Subdirectory containing csvs within each per-sketch id subdirectory",
)
p.add_argument(
"--h5ad",
default="/home/olga/data_sm/immune-evolution/h5ads/human-lemur-mouse-bat/human-lemur-mouse-bat__lung_only.h5ad",
help=("Location of the AnnData h5ad object of single-cell data"),
)
p.add_argument(
"--n-jobs",
default=3,
type=int,
help=(
"Number of jobs to do in parallel. By default, 3 for the 3 molecule types (DNA, protein, Dayhoff)"
),
)
p.add_argument(
"--celltype-col",
default="broad_group",
help=(
"Column name endcoding the cell type in the h5ad AnnData object, i.e. an adata.obs column"
),
)
args = p.parse_args()
adata = sc.read(args.h5ad)
adata.obs = adata.obs.reset_index().set_index("cell_id")
adata_shared = adata[adata.obs[args.celltype_col].isin(SHARED_CELLTYPES)]
parquets = glob.iglob(
os.path.join(
args.species_base_dir,
args.kmer_subdir,
"*", # This is the sketch_id, e.g. alphabet-DNA__ksize-21__scaled-10
"hash2kmer.parquet",
)
)
if args.n_jobs > 1:
Parallel(n_jobs=args.n_jobs)(
delayed(process_hash2kmer)(parquet, adata_shared, args.celltype_col)
for parquet in parquets
)
else:
for parquet in tqdm(parquets):
print("hash2kmer parquet:", parquet)
process_hash2kmer(parquet, adata_shared, args.celltype_col)
if __name__ == "__main__":
main()
| 28.015873 | 120 | 0.64051 | 425 | 3,530 | 5.094118 | 0.407059 | 0.035566 | 0.027714 | 0.057275 | 0.089607 | 0.045266 | 0.045266 | 0.045266 | 0 | 0 | 0 | 0.012913 | 0.254108 | 3,530 | 125 | 121 | 28.24 | 0.809343 | 0.064023 | 0 | 0.07767 | 0 | 0.029126 | 0.260079 | 0.041225 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029126 | false | 0 | 0.07767 | 0 | 0.106796 | 0.048544 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85c06f9d0a8133d01212d5db1a25774ca5a7ab5 | 11,631 | py | Python | train/inference.py | sjtu-tcloud/Tiny-OFA | 4b0c3228d96e0a0a16b6a73d8c65afddea7bad49 | [
"MIT"
] | null | null | null | train/inference.py | sjtu-tcloud/Tiny-OFA | 4b0c3228d96e0a0a16b6a73d8c65afddea7bad49 | [
"MIT"
] | null | null | null | train/inference.py | sjtu-tcloud/Tiny-OFA | 4b0c3228d96e0a0a16b6a73d8c65afddea7bad49 | [
"MIT"
] | null | null | null | import argparse
import cv2
import numpy as np
import torch.nn.functional as F
from torch.utils.data import DataLoader
import torch.nn as nn
import torch
import matplotlib.pyplot as plt
def select_device(device='', apex=False, batch_size=None):
# device = 'cpu' or '0' or '0,1,2,3'
cpu_request = device.lower() == 'cpu'
if device and not cpu_request: # if device requested other than 'cpu'
os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable
assert torch.cuda.is_available(), 'CUDA unavailable, invalid device %s requested' % device # check availablity
cuda = False if cpu_request else torch.cuda.is_available()
if cuda:
c = 1024 ** 2 # bytes to MB
ng = torch.cuda.device_count()
if ng > 1 and batch_size: # check that batch_size is compatible with device_count
assert batch_size % ng == 0, 'batch-size %g not multiple of GPU count %g' % (batch_size, ng)
x = [torch.cuda.get_device_properties(i) for i in range(ng)]
s = 'Using CUDA ' + ('Apex ' if apex else '') # apex for mixed precision https://github.com/NVIDIA/apex
for i in range(0, ng):
if i == 1:
s = ' ' * len(s)
print("%sdevice%g _CudaDeviceProperties(name='%s', total_memory=%dMB)" %
(s, i, x[i].name, x[i].total_memory / c))
else:
print('Using CPU')
print('') # skip a line
return torch.device('cuda:0' if cuda else 'cpu')
# 改动了权重数据的量化
def uniform_quantize(k):
class qfn(torch.autograd.Function):
@staticmethod
def forward(ctx, input):
if k == 32:
out = input
elif k == 1:
out = torch.sign(input)
else:
n = float(2 ** k - 1)
out = torch.round(input * n) / n
return out
@staticmethod
def backward(ctx, grad_output):
grad_input = grad_output.clone()
return grad_input
return qfn().apply
class weight_quantize_fn(nn.Module):
def __init__(self, w_bit):
super(weight_quantize_fn, self).__init__()
assert w_bit <= 8 or w_bit == 32
self.w_bit = w_bit
# 符号位 占一位
self.uniform_q = uniform_quantize(k=w_bit - 1)
def forward(self, x):
if self.w_bit == 32:
weight = torch.tanh(x)
weight_q = weight / torch.max(torch.abs(weight))
elif self.w_bit == 1:
E = torch.mean(torch.abs(x)).detach()
weight_q = (self.uniform_q(x / E) + 1) / 2 * E
else:
weight = torch.tanh(x)
weight = weight / torch.max(torch.abs(weight))
# 想量化到带符号的 k bit
weight_q = self.uniform_q(weight)
return weight_q
class activation_quantize_fn(nn.Module):
def __init__(self, a_bit):
super(activation_quantize_fn, self).__init__()
assert a_bit <= 8 or a_bit == 32
self.a_bit = a_bit
self.uniform_q = uniform_quantize(k=a_bit)
def forward(self, x):
if self.a_bit == 32:
activation_q = torch.clamp(x, 0, 6)
else:
activation_q = self.uniform_q(torch.clamp(x, 0, 1))
# print(np.unique(activation_q.detach().numpy()))
return activation_q
def conv2d_Q_fn(w_bit):
class Conv2d_Q(nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1, bias=True):
super(Conv2d_Q, self).__init__(in_channels, out_channels, kernel_size, stride,
padding, dilation, groups, bias)
self.w_bit = w_bit
self.quantize_fn = weight_quantize_fn(w_bit=w_bit)
def forward(self, input, order=None):
weight_q = self.quantize_fn(self.weight)
return F.conv2d(input, weight_q, self.bias, self.stride,
self.padding, self.dilation, self.groups)
return Conv2d_Q
def create_grids(self, img_size=416, ng=(13, 13), device='cpu', type=torch.float32):
nx, ny = ng # x and y grid size
self.img_size = max(img_size)
self.stride = self.img_size / max(ng)
# build xy offsets
yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)])
self.grid_xy = torch.stack((xv, yv), 2).to(device).type(type).view((1, 1, ny, nx, 2))
# build wh gains
self.anchor_vec = self.anchors.to(device) / self.stride
self.anchor_wh = self.anchor_vec.view(1, self.na, 1, 1, 2).to(device).type(type)
self.ng = torch.Tensor(ng).to(device)
self.nx = nx
self.ny = ny
class YOLOLayer(nn.Module):
def __init__(self, anchors):
super(YOLOLayer, self).__init__()
self.anchors = torch.Tensor(anchors)
self.na = len(anchors) # number of anchors (3)
self.no = 6 # number of outputs
self.nx = 0 # initialize number of x gridpoints
self.ny = 0 # initialize number of y gridpoints
def forward(self, p, img_size):
bs, _, ny, nx = p.shape # bs, 255, 13, 13
if (self.nx, self.ny) != (nx, ny):
create_grids(self, img_size, (nx, ny), p.device, p.dtype)
# p.view(bs, 255, 13, 13) -- > (bs, 3, 13, 13, 85) # (bs, anchors, grid, grid, classes + xywh)
p = p.view(bs, self.na, self.no, self.ny, self.nx).permute(0, 1, 3, 4, 2).contiguous() # prediction
if self.training:
return p
else: # inference
# s = 1.5 # scale_xy (pxy = pxy * s - (s - 1) / 2)
io = p.clone() # inference output
io[..., :2] = torch.sigmoid(io[..., :2]) + self.grid_xy # xy
io[..., 2:4] = torch.exp(io[..., 2:4]) * self.anchor_wh # wh yolo method
# io[..., 2:4] = ((torch.sigmoid(io[..., 2:4]) * 2) ** 3) * self.anchor_wh # wh power method
io[..., :4] *= self.stride # 原始像素尺度
torch.sigmoid_(io[..., 4:])
return io.view(bs, -1, self.no), p
class TestNetQua(nn.Module):
def __init__(self):
super(TestNetQua, self).__init__()
W_BIT = 8
A_BIT = 8
conv2d_q = conv2d_Q_fn(W_BIT)
# act_q = activation_quantize_fn(4)
self.layers = nn.Sequential(
conv2d_q(3, 16, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(16),
activation_quantize_fn(A_BIT),
nn.MaxPool2d(2, stride=2),
conv2d_q(16, 32, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(32),
activation_quantize_fn(A_BIT),
nn.MaxPool2d(2, stride=2),
conv2d_q(32, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
activation_quantize_fn(A_BIT),
nn.MaxPool2d(2, stride=2),
conv2d_q(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
activation_quantize_fn(A_BIT),
nn.MaxPool2d(2, stride=2),
conv2d_q(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
activation_quantize_fn(A_BIT),
conv2d_q(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
activation_quantize_fn(A_BIT),
conv2d_q(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
activation_quantize_fn(A_BIT),
conv2d_q(64, 64, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(64),
activation_quantize_fn(A_BIT),
# nn.Conv2d(256, 18, kernel_size=1, stride=1, padding=0)
conv2d_q(64, 36, kernel_size=1, stride=1, padding=0)
)
self.yololayer = YOLOLayer([[20,20], [20,20], [20,20], [20,20], [20,20], [20,20]])
self.yolo_layers = [self.yololayer]
def forward(self, x):
img_size = x.shape[-2:]
yolo_out, out = [], []
x = self.layers(x)
x = self.yololayer(x, img_size)
yolo_out.append(x)
if self.training: # train
return yolo_out
else: # test
io, p = zip(*yolo_out) # inference output, training output
return torch.cat(io, 1), p
return x
def inference(weights=None,
batch_size=16,
img_size=416,
model=None,
path = None):
# Initialize/load model and set device
if model is None:
device = select_device(opt.device, batch_size=batch_size)
# Initialize model
model = TestNetQua().to(device)
model.nc = 1
model.arc = 'default'
if weights.endswith('.pt'): # pytorch format
model.load_state_dict(torch.load(weights, map_location=device)['model'])
if torch.cuda.device_count() > 1:
model = nn.DataParallel(model)
# load image
img = cv2.imread(path) # BGR
assert img is not None, 'Image Not Found ' + path
h0, w0 = img.shape[:2] # orig hw
fig = plt.figure()
plt.subplot()
interp = cv2.INTER_LINEAR # LINEAR for training, AREA for testing
img = cv2.resize(img, (img_size, img_size // 2), interpolation=interp)
plt.imshow(img)
img = np.expand_dims(np.transpose(img,[2,0,1]),axis=0).copy()
img= torch.from_numpy(img)
model.eval()
with torch.no_grad():
img = img.to(device).float()/255.0 # uint8 to float32, 0 - 255 to 0.0 - 1.0
# run the model
inference_out, training_out = model(img)
inference_out = inference_out.view(inference_out.shape[0], 6, -1)
print(inference_out.shape)
inference_out_t = torch.zeros_like(inference_out[:, 0, :])
for i in range(inference_out.shape[1]):
inference_out_t += inference_out[:, i, :]
inference_out_t = inference_out_t.view(inference_out_t.shape[0], -1, 6) / 6
print(inference_out_t.shape)
FloatTensor = torch.cuda.FloatTensor if inference_out_t.is_cuda else torch.FloatTensor
n = inference_out_t.size(0)
p_boxes = FloatTensor(n, 4)
pred_boxes = inference_out_t[...,:4]
pred_conf = inference_out_t[...,4]
for i in range(n):
_, index = pred_conf[i].max(0) # 返回每一列最大值组成的数据
p_boxes[i] = pred_boxes[i][index]
print(p_boxes.shape)
print(p_boxes)
img = img.cpu().numpy()
p_boxes = p_boxes.cpu().numpy()
print(p_boxes)
bs, channel, h, w = img.shape
# Convert bounding box format from [x, y, w, h] to [x1, y1, x2, y2]
y = torch.zeros_like(p_boxes) if isinstance(p_boxes, torch.Tensor) else np.zeros_like(p_boxes)
y[:, 0] = p_boxes[:, 0] - p_boxes[:, 2] / 2
y[:, 1] = p_boxes[:, 1] - p_boxes[:, 3] / 2
y[:, 2] = p_boxes[:, 0] + p_boxes[:, 2] / 2
y[:, 3] = p_boxes[:, 1] + p_boxes[:, 3] / 2
y=y.T
print(y)
plt.plot(y[[0,2,2,0,0]],y[[1,1,3,3,1]],'.-')
plt.axis('off')
fig.savefig("test.png")
plt.close()
return
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='test.py')
parser.add_argument('--weights', type=str, default='weights/test_best.pt', help='weights path')
parser.add_argument('--batch-size', type=int, default=8, help='size of each image batch')
parser.add_argument('--img-size', type=int, default=416, help='inference size (pixels)')
parser.add_argument('--device', default='', help='device id (i.e. 0 or 0,1) or cpu')
parser.add_argument('--path',type=str,default= "../data/data_test/boat1/000001.jpg")
opt = parser.parse_args()
print(opt)
# Test
inference(opt.weights,
opt.batch_size,
opt.img_size,
path = opt.path)
| 35.568807 | 119 | 0.58602 | 1,686 | 11,631 | 3.870107 | 0.186833 | 0.033103 | 0.033716 | 0.011034 | 0.235249 | 0.178238 | 0.154636 | 0.126437 | 0.116628 | 0.116628 | 0 | 0.041859 | 0.274955 | 11,631 | 326 | 120 | 35.677914 | 0.731887 | 0.10343 | 0 | 0.169355 | 0 | 0 | 0.044228 | 0.00636 | 0 | 0 | 0 | 0 | 0.020161 | 1 | 0.068548 | false | 0 | 0.032258 | 0 | 0.181452 | 0.040323 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85c539a97ae71d736c294ca3cb5f24812af1a4f | 1,057 | py | Python | signer/tests/test_sign_auth.py | anandrgit/snet-marketplace-service | 22dd66e9e34a65580eaffa70928bbdb1f67061e8 | [
"MIT"
] | null | null | null | signer/tests/test_sign_auth.py | anandrgit/snet-marketplace-service | 22dd66e9e34a65580eaffa70928bbdb1f67061e8 | [
"MIT"
] | null | null | null | signer/tests/test_sign_auth.py | anandrgit/snet-marketplace-service | 22dd66e9e34a65580eaffa70928bbdb1f67061e8 | [
"MIT"
] | null | null | null | import unittest
from eth_account.messages import defunct_hash_message
from web3.auto import w3
import web3
from signer.signature_authenticator import main
class TestSignAuth(unittest.TestCase):
def test_generate_sign(self):
username = 'test-user'
org_id = 'snet'
group_id = 'cOyJHJdvvig73r+o8pijgMDcXOX+bt8LkvIeQbufP7g='
service_id = 'example-service'
block_number = 1234
signature = 'h9Ssz1bi+aT4NKERkGqJOfx2E9/4Y9czj+YNr4XzXDcnlay37v9Jfown278MFF+VrKsz1r1Ip/CeppwtjhiBtAA='
headers = {
'x-username': username,
'x-organizationid': org_id,
'x-groupid': group_id,
'x-serviceid': service_id,
'x-currentblocknumber': block_number,
'x-signature': signature
}
event = dict()
event['headers'] = headers
event['methodArn'] = 'abc'
response = main(event, None)
assert response['policyDocument']['Statement'][0]['Effect'] == 'Allow'
if __name__ == '__main__':
unittest.main()
| 29.361111 | 110 | 0.639546 | 104 | 1,057 | 6.288462 | 0.615385 | 0.013761 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036709 | 0.252602 | 1,057 | 35 | 111 | 30.2 | 0.791139 | 0 | 0 | 0 | 0 | 0 | 0.28193 | 0.124882 | 0 | 0 | 0 | 0 | 0.035714 | 1 | 0.035714 | false | 0 | 0.178571 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85ea0cb5114725b2704ed7b3f23ebcfa494ab11 | 22,881 | py | Python | dialogflow_v2beta1/gapic/agents_client.py | dxiao2003/dialogflow-python-client-v2 | 05a1d3f0682de2c7d8c0c4db3fa5fea8934dfe72 | [
"Apache-2.0"
] | 1 | 2019-03-31T23:25:46.000Z | 2019-03-31T23:25:46.000Z | dialogflow_v2beta1/gapic/agents_client.py | dxiao2003/dialogflow-python-client-v2 | 05a1d3f0682de2c7d8c0c4db3fa5fea8934dfe72 | [
"Apache-2.0"
] | null | null | null | dialogflow_v2beta1/gapic/agents_client.py | dxiao2003/dialogflow-python-client-v2 | 05a1d3f0682de2c7d8c0c4db3fa5fea8934dfe72 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/google/googleapis/blob/master/google/cloud/dialogflow/v2beta1/agent.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google engineers.
#
# The only allowed edits are to method and file documentation. A 3-way
# merge preserves those additions if the generated source changes.
"""Accesses the google.cloud.dialogflow.v2beta1 Agents API."""
import functools
import pkg_resources
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import google.api_core.page_iterator
import google.api_core.path_template
import google.api_core.protobuf_helpers
from dialogflow_v2beta1.gapic import agents_client_config
from dialogflow_v2beta1.gapic import enums
from dialogflow_v2beta1.proto import agent_pb2
from google.protobuf import empty_pb2
from google.protobuf import struct_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution('dialogflow').version
class AgentsClient(object):
"""
Manages conversational agents.
Refer to `agents documentation <https://dialogflow.com/docs/agents>`_ for
more details about agents.
Standard methods.
"""
SERVICE_ADDRESS = 'dialogflow.googleapis.com:443'
"""The default address of the service."""
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_DEFAULT_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', )
# The name of the interface for this client. This is the key used to find
# method configuration in the client_config dictionary
_INTERFACE_NAME = ('google.cloud.dialogflow.v2beta1.Agents')
@classmethod
def project_path(cls, project):
"""Returns a fully-qualified project resource name string."""
return google.api_core.path_template.expand(
'projects/{project}',
project=project, )
def __init__(self,
channel=None,
credentials=None,
client_config=agents_client_config.config,
client_info=None):
"""Constructor.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. If specified, then the ``credentials``
argument is ignored.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_config (dict):
A dictionary of call options for each method. If not specified
the default configuration is used. Generally, you only need
to set this if you're developing your own client library.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
if channel is not None and credentials is not None:
raise ValueError(
'channel and credentials arguments to {} are mutually '
'exclusive.'.format(self.__class__.__name__))
if channel is None:
channel = google.api_core.grpc_helpers.create_channel(
self.SERVICE_ADDRESS,
credentials=credentials,
scopes=self._DEFAULT_SCOPES)
self.agents_stub = (agent_pb2.AgentsStub(channel))
# Operations client for methods that return long-running operations
# futures.
self.operations_client = (
google.api_core.operations_v1.OperationsClient(channel))
if client_info is None:
client_info = (
google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO)
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
interface_config = client_config['interfaces'][self._INTERFACE_NAME]
method_configs = google.api_core.gapic_v1.config.parse_method_configs(
interface_config)
self._get_agent = google.api_core.gapic_v1.method.wrap_method(
self.agents_stub.GetAgent,
default_retry=method_configs['GetAgent'].retry,
default_timeout=method_configs['GetAgent'].timeout,
client_info=client_info)
self._search_agents = google.api_core.gapic_v1.method.wrap_method(
self.agents_stub.SearchAgents,
default_retry=method_configs['SearchAgents'].retry,
default_timeout=method_configs['SearchAgents'].timeout,
client_info=client_info)
self._train_agent = google.api_core.gapic_v1.method.wrap_method(
self.agents_stub.TrainAgent,
default_retry=method_configs['TrainAgent'].retry,
default_timeout=method_configs['TrainAgent'].timeout,
client_info=client_info)
self._export_agent = google.api_core.gapic_v1.method.wrap_method(
self.agents_stub.ExportAgent,
default_retry=method_configs['ExportAgent'].retry,
default_timeout=method_configs['ExportAgent'].timeout,
client_info=client_info)
self._import_agent = google.api_core.gapic_v1.method.wrap_method(
self.agents_stub.ImportAgent,
default_retry=method_configs['ImportAgent'].retry,
default_timeout=method_configs['ImportAgent'].timeout,
client_info=client_info)
self._restore_agent = google.api_core.gapic_v1.method.wrap_method(
self.agents_stub.RestoreAgent,
default_retry=method_configs['RestoreAgent'].retry,
default_timeout=method_configs['RestoreAgent'].timeout,
client_info=client_info)
# Service calls
def get_agent(self,
parent,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT):
"""
Retrieves the specified agent.
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.AgentsClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> response = client.get_agent(parent)
Args:
parent (str): Required. The project that the agent to fetch is associated with.
Format: ``projects/<Project ID>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
Returns:
A :class:`~dialogflow_v2beta1.types.Agent` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
request = agent_pb2.GetAgentRequest(parent=parent)
return self._get_agent(request, retry=retry, timeout=timeout)
def search_agents(self,
parent,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT):
"""
Returns the list of agents.
Since there is at most one conversational agent per project, this method is
useful primarily for listing all agents across projects the caller has
access to. One can achieve that with a wildcard project collection id \"-\".
Refer to [List
Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections).
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.AgentsClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>>
>>> # Iterate over all results
>>> for element in client.search_agents(parent):
... # process element
... pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in client.search_agents(parent, options=CallOptions(page_token=INITIAL_PAGE)):
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The project to list agents from.
Format: ``projects/<Project ID or '-'>``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~dialogflow_v2beta1.types.Agent` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
request = agent_pb2.SearchAgentsRequest(
parent=parent, page_size=page_size)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._search_agents, retry=retry, timeout=timeout),
request=request,
items_field='agents',
request_token_field='page_token',
response_token_field='next_page_token')
return iterator
def train_agent(self,
parent,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT):
"""
Trains the specified agent.
Operation<response: google.protobuf.Empty,
metadata: google.protobuf.Struct>
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.AgentsClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> response = client.train_agent(parent)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The project that the agent to train is associated with.
Format: ``projects/<Project ID>``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
Returns:
A :class:`~dialogflow_v2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
request = agent_pb2.TrainAgentRequest(parent=parent)
operation = self._train_agent(request, retry=retry, timeout=timeout)
return google.api_core.operation.from_gapic(
operation,
self.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct)
def export_agent(self,
parent,
agent_uri=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT):
"""
Exports the specified agent to a ZIP file.
Operation<response: ExportAgentResponse,
metadata: google.protobuf.Struct>
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.AgentsClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> response = client.export_agent(parent)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The project that the agent to export is associated with.
Format: ``projects/<Project ID>``.
agent_uri (str): Optional. The URI to export the agent to. Note: The URI must start with
\"gs://\". If left unspecified, the serialized agent is returned inline.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
Returns:
A :class:`~dialogflow_v2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
request = agent_pb2.ExportAgentRequest(
parent=parent, agent_uri=agent_uri)
operation = self._export_agent(request, retry=retry, timeout=timeout)
return google.api_core.operation.from_gapic(
operation,
self.operations_client,
agent_pb2.ExportAgentResponse,
metadata_type=struct_pb2.Struct)
def import_agent(self,
parent,
agent_uri=None,
agent_content=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT):
"""
Imports the specified agent from a ZIP file.
Uploads new intents and entity types without deleting the existing ones.
Intents and entity types with the same name are replaced with the new
versions from ImportAgentRequest.
Operation<response: google.protobuf.Empty,
metadata: google.protobuf.Struct>
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.AgentsClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> response = client.import_agent(parent)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The project that the agent to import is associated with.
Format: ``projects/<Project ID>``.
agent_uri (str): The URI to a file containing the agent to import. Note: The URI must
start with \"gs://\".
agent_content (bytes): The agent to import.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
Returns:
A :class:`~dialogflow_v2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(
agent_uri=agent_uri,
agent_content=agent_content, )
request = agent_pb2.ImportAgentRequest(
parent=parent, agent_uri=agent_uri, agent_content=agent_content)
operation = self._import_agent(request, retry=retry, timeout=timeout)
return google.api_core.operation.from_gapic(
operation,
self.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct)
def restore_agent(self,
parent,
agent_uri=None,
agent_content=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT):
"""
Restores the specified agent from a ZIP file.
Replaces the current agent version with a new one. All the intents and
entity types in the older version are deleted.
Operation<response: google.protobuf.Empty,
metadata: google.protobuf.Struct>
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.AgentsClient()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> response = client.restore_agent(parent)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
parent (str): Required. The project that the agent to restore is associated with.
Format: ``projects/<Project ID>``.
agent_uri (str): The URI to a file containing the agent to restore. Note: The URI must
start with \"gs://\".
agent_content (bytes): The agent to restore.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
Returns:
A :class:`~dialogflow_v2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Sanity check: We have some fields which are mutually exclusive;
# raise ValueError if more than one is sent.
google.api_core.protobuf_helpers.check_oneof(
agent_uri=agent_uri,
agent_content=agent_content, )
request = agent_pb2.RestoreAgentRequest(
parent=parent, agent_uri=agent_uri, agent_content=agent_content)
operation = self._restore_agent(request, retry=retry, timeout=timeout)
return google.api_core.operation.from_gapic(
operation,
self.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct)
| 42.688433 | 103 | 0.610944 | 2,512 | 22,881 | 5.421178 | 0.154857 | 0.038332 | 0.055368 | 0.031723 | 0.633426 | 0.57659 | 0.543031 | 0.534733 | 0.534733 | 0.528859 | 0 | 0.006581 | 0.30934 | 22,881 | 535 | 104 | 42.768224 | 0.855154 | 0.556881 | 0 | 0.35 | 0 | 0 | 0.047311 | 0.008498 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.14375 | 0 | 0.2625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85f0a9d03c4bf7f2534588b2a3e5bafe7f3de65 | 3,862 | py | Python | todo/api_1_0/todo.py | l769829723/todo | 7c2da38996d244709e0b7a2041e1e973f6b2743b | [
"MIT"
] | null | null | null | todo/api_1_0/todo.py | l769829723/todo | 7c2da38996d244709e0b7a2041e1e973f6b2743b | [
"MIT"
] | null | null | null | todo/api_1_0/todo.py | l769829723/todo | 7c2da38996d244709e0b7a2041e1e973f6b2743b | [
"MIT"
] | null | null | null | from flask import current_app, jsonify
from flask_restful import Resource, fields, marshal_with, marshal
from flask_restful import reqparse
from flask_restful import abort
from flask_jwt_extended import jwt_required
from todo.api_1_0 import api
from todo.models import Todo
parser = reqparse.RequestParser()
parser.add_argument('name', type=str, location='json', required=True, help="Specified a todo name.")
parser.add_argument('is_done', type=bool, location='json', required=True, help="Specified a todo is done flag.")
parser.add_argument('is_important', type=bool, location='json', required=True, help="Specified a todo is important flag.")
class ToDoMixin:
fields = dict(
id=fields.Integer,
name=fields.String,
publish_time=fields.DateTime,
is_done=fields.Boolean,
is_important=fields.Boolean
)
def get_object_or_404(self, id):
todo = Todo.query.get(id)
if todo is None:
jsonify(message="Task {} doesn\'t exist".format(id)), 404
else:
return todo
class ToDoList(Resource, ToDoMixin):
method_decorators = [jwt_required]
def get(self):
get_parser = reqparse.RequestParser()
get_parser.add_argument('page', type=int, location='args', required=False)
args = get_parser.parse_args()
page = args.get('page', 1)
page_fields = dict(
prev=fields.Boolean,
next=fields.Boolean,
total=fields.Integer,
per=fields.Integer,
current=fields.Integer
)
todos_fields = dict(
page=fields.Nested(page_fields),
todos=fields.Nested(ToDoMixin.fields)
)
pagination = Todo.query.order_by(
Todo.is_done.desc(),
# Todo.is_important.desc(),
Todo.publish_time.desc()
).paginate(
page,
per_page=current_app.config['COUNTS_OF_PER_PAGE'],
error_out=False
)
todos_data = dict(
page=dict(
prev=pagination.has_prev,
next=pagination.has_next,
total=pagination.total,
per=current_app.config['COUNTS_OF_PER_PAGE'],
current=page
),
todos=pagination.items
)
return jsonify(marshal(todos_data, todos_fields))
def post(self):
todo = Todo()
args = parser.parse_args()
todo.name = args.get('name')
todo.is_done = args.get('is_done')
todo.is_important = args.get('is_important')
todo.save()
return jsonify(marshal(todo, self.fields)), 201
class ToDo(Resource, ToDoMixin):
method_decorators = [jwt_required]
@marshal_with(ToDoMixin.fields)
def get(self, todo_id):
return jsonify(self.get_object_or_404(todo_id))
def put(self, todo_id):
todo = self.get_object_or_404(todo_id)
args = parser.parse_args()
todo.name = args.get('name')
todo.is_done = args.get('is_done')
todo.is_important = args.get('is_important')
todo.save()
return jsonify(marshal(todo, self.fields))
def delete(self, todo_id):
todo = self.get_object_or_404(todo_id)
todo.delete()
return jsonify({'message': 'Task {} has been deleted.'.format(todo_id)}), 201
api.add_url_rule('/todos/', view_func=ToDoList.as_view('todolist'))
api.add_url_rule('/todos/<int:todo_id>/', view_func=ToDo.as_view('todo'))
# def format_request_datetime(value, name):
# try:
# value = datetime.datetime.strptime(
# value,
# current_app.config['DATETIME_FORMAT_STRING']
# )
# except ValueError:
# return ValueError('Specified datetime format like {}.'.format(current_app.config['DATETIME_FORMAT_STRING']))
# return value
| 31.398374 | 122 | 0.626359 | 475 | 3,862 | 4.890526 | 0.235789 | 0.023246 | 0.029272 | 0.024107 | 0.334051 | 0.318554 | 0.249677 | 0.212656 | 0.194576 | 0.194576 | 0 | 0.008395 | 0.25971 | 3,862 | 122 | 123 | 31.655738 | 0.804127 | 0.093993 | 0 | 0.157303 | 0 | 0 | 0.088876 | 0.006021 | 0 | 0 | 0 | 0.008197 | 0 | 1 | 0.067416 | false | 0 | 0.123596 | 0.011236 | 0.325843 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b85f4419b531490a1f189c9143fd6d571b665fca | 598 | py | Python | src/encoded/tests/test_upgrade_ontology_term.py | 4dn-dcic/fourfron | 29601961706d2371b982e57ae085e8ebec3b2714 | [
"MIT"
] | 11 | 2016-11-23T02:33:13.000Z | 2021-06-18T14:21:20.000Z | src/encoded/tests/test_upgrade_ontology_term.py | 4dn-dcic/fourfron | 29601961706d2371b982e57ae085e8ebec3b2714 | [
"MIT"
] | 1,159 | 2016-11-21T15:40:24.000Z | 2022-03-29T03:18:38.000Z | src/encoded/tests/test_upgrade_ontology_term.py | 4dn-dcic/fourfron | 29601961706d2371b982e57ae085e8ebec3b2714 | [
"MIT"
] | 5 | 2017-01-27T16:36:15.000Z | 2019-06-14T14:39:54.000Z | import pytest
pytestmark = [pytest.mark.setone, pytest.mark.working]
@pytest.fixture
def ontology_term_1(so_ont, award, lab):
return{
"schema_version": '1',
"term_id": 'SO:0001111',
"term_name": 'so_term',
"source_ontology": so_ont['@id']
}
def test_ontology_term_1_2(
app, ontology_term_1, so_ont):
migrator = app.registry['upgrader']
value = migrator.upgrade('ontology_term', ontology_term_1, current_version='1', target_version='2')
assert value['schema_version'] == '2'
assert value['source_ontologies'][0] == so_ont['@id']
| 26 | 103 | 0.658863 | 79 | 598 | 4.683544 | 0.443038 | 0.162162 | 0.140541 | 0.081081 | 0.097297 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035052 | 0.188963 | 598 | 22 | 104 | 27.181818 | 0.727835 | 0 | 0 | 0 | 0 | 0 | 0.207358 | 0 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.125 | false | 0 | 0.0625 | 0.0625 | 0.1875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8630bb57bbccbd49f4c70144ec449cc19b1d2ee | 332 | py | Python | misc.py | xyukiono/tf-image-augm | f5d14b33cc284f6310d0fac634c4b8e3391106fc | [
"MIT"
] | 3 | 2021-03-07T04:14:39.000Z | 2021-11-15T10:29:21.000Z | misc.py | xyukiono/tf-image-augm | f5d14b33cc284f6310d0fac634c4b8e3391106fc | [
"MIT"
] | null | null | null | misc.py | xyukiono/tf-image-augm | f5d14b33cc284f6310d0fac634c4b8e3391106fc | [
"MIT"
] | 2 | 2020-08-07T07:51:19.000Z | 2021-04-03T17:10:27.000Z | import tensorflow as tf
def get_rank(inputs):
return len(inputs.get_shape())
def get_xy_axis(ndim):
if ndim == 4:
yaxis = 1
xaxis = 2
elif ndim == 3:
yaxis = 0
xaxis = 1
else:
raise ValueError('Input tensor must be 4D or 3D tensor')
return xaxis, yaxis | 22.133333 | 65 | 0.551205 | 46 | 332 | 3.891304 | 0.717391 | 0.067039 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.038095 | 0.36747 | 332 | 15 | 66 | 22.133333 | 0.814286 | 0 | 0 | 0 | 0 | 0 | 0.112853 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.153846 | false | 0 | 0.076923 | 0.076923 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b865e5aa5f63090f7a3d75e390fedaea4ff9c4eb | 8,747 | py | Python | hexrd/ui/indexing/run.py | bnmajor/hexrdgui | d19f7cf4a4469b0d3b6978f2f65c5e8a6bd81785 | [
"BSD-3-Clause"
] | null | null | null | hexrd/ui/indexing/run.py | bnmajor/hexrdgui | d19f7cf4a4469b0d3b6978f2f65c5e8a6bd81785 | [
"BSD-3-Clause"
] | null | null | null | hexrd/ui/indexing/run.py | bnmajor/hexrdgui | d19f7cf4a4469b0d3b6978f2f65c5e8a6bd81785 | [
"BSD-3-Clause"
] | null | null | null | import os
import numpy as np
from PySide2.QtCore import QObject, QThreadPool, Signal
from PySide2.QtWidgets import QDialog, QMessageBox, QTableView, QVBoxLayout
from hexrd import constants as const
from hexrd import fitgrains, indexer, instrument
from hexrd.findorientations import (
create_clustering_parameters, find_orientations,
generate_eta_ome_maps, generate_orientation_fibers,
run_cluster
)
from hexrd.fitgrains import fit_grains
from hexrd.transforms import xfcapi
from hexrd.xrdutil import EtaOmeMaps
from hexrd.ui.async_worker import AsyncWorker
from hexrd.ui.hexrd_config import HexrdConfig
from hexrd.ui.indexing.create_config import create_indexing_config
from hexrd.ui.indexing.fit_grains_options_dialog import FitGrainsOptionsDialog
from hexrd.ui.indexing.fit_grains_results_model import FitGrainsResultsModel
from hexrd.ui.indexing.ome_maps_select_dialog import OmeMapsSelectDialog
from hexrd.ui.indexing.ome_maps_viewer_dialog import OmeMapsViewerDialog
from hexrd.ui.progress_dialog import ProgressDialog
class IndexingRunner(QObject):
progress_text = Signal(str)
def __init__(self, parent=None):
super(IndexingRunner, self).__init__(parent)
self.parent = parent
self.ome_maps_select_dialog = None
self.ome_maps_viewer_dialog = None
self.fit_grains_dialog = None
self.fit_grains_results = None
self.thread_pool = QThreadPool(self.parent)
self.progress_dialog = ProgressDialog(self.parent)
self.ome_maps = None
self.progress_text.connect(self.progress_dialog.setLabelText)
def clear(self):
self.ome_maps_select_dialog = None
self.ome_maps_viewer_dialog = None
self.fit_grains_dialog = None
self.ome_maps = None
def run(self):
# We will go through these steps:
# 1. Have the user select/generate eta omega maps
# 2. Have the user view and threshold the eta omega maps
# 3. Run the indexing
self.select_ome_maps()
def select_ome_maps(self):
dialog = OmeMapsSelectDialog(self.parent)
dialog.accepted.connect(self.ome_maps_selected)
dialog.rejected.connect(self.clear)
dialog.show()
self.ome_maps_select_dialog = dialog
def ome_maps_selected(self):
dialog = self.ome_maps_select_dialog
if dialog is None:
return
if dialog.method_name == 'load':
self.ome_maps = EtaOmeMaps(dialog.file_name)
self.ome_maps_select_dialog = None
self.view_ome_maps()
else:
# Create a full indexing config
config = create_indexing_config()
# Setup to generate maps in background
self.progress_dialog.setWindowTitle('Generating Eta Omega Maps')
self.progress_dialog.setRange(0, 0) # no numerical updates
worker = AsyncWorker(self.run_eta_ome_maps, config)
self.thread_pool.start(worker)
worker.signals.result.connect(self.view_ome_maps)
worker.signals.finished.connect(self.progress_dialog.accept)
self.progress_dialog.exec_()
def run_eta_ome_maps(self, config):
self.ome_maps = generate_eta_ome_maps(config, save=False)
def view_ome_maps(self):
# Now, show the Ome Map viewer
dialog = OmeMapsViewerDialog(self.ome_maps, self.parent)
dialog.accepted.connect(self.ome_maps_viewed)
dialog.rejected.connect(self.clear)
dialog.show()
self.ome_maps_viewer_dialog = dialog
def ome_maps_viewed(self):
# The dialog should have automatically updated our internal config
# Let's go ahead and run the indexing!
# For now, always use all hkls from eta omega maps
hkls = list(range(len(self.ome_maps.iHKLList)))
indexing_config = HexrdConfig().indexing_config
indexing_config['find_orientations']['seed_search']['hkl_seeds'] = hkls
# Create a full indexing config
config = create_indexing_config()
# Setup to run indexing in background
self.progress_dialog.setWindowTitle('Find Orientations')
self.progress_dialog.setRange(0, 0) # no numerical updates
worker = AsyncWorker(self.run_indexer, config)
self.thread_pool.start(worker)
worker.signals.result.connect(self.view_fit_grains_options)
worker.signals.finished.connect(self.progress_dialog.accept)
self.progress_dialog.exec_()
def run_indexer(self, config):
# Generate the orientation fibers
self.update_progress_text('Generating orientation fibers')
self.qfib = generate_orientation_fibers(config, self.ome_maps)
# Find orientations
self.update_progress_text('Running indexer (paintGrid)')
ncpus = config.multiprocessing
self.completeness = indexer.paintGrid(
self.qfib,
self.ome_maps,
etaRange=np.radians(config.find_orientations.eta.range),
omeTol=np.radians(config.find_orientations.omega.tolerance),
etaTol=np.radians(config.find_orientations.eta.tolerance),
omePeriod=np.radians(config.find_orientations.omega.period),
threshold=config.find_orientations.threshold,
doMultiProc=ncpus > 1,
nCPUs=ncpus)
print('Indexing complete')
def view_fit_grains_options(self):
# Run dialog for user options
dialog = FitGrainsOptionsDialog(self.parent)
dialog.accepted.connect(self.fit_grains_options_accepted)
dialog.rejected.connect(self.clear)
self.fit_grains_dialog = dialog
dialog.show()
def fit_grains_options_accepted(self):
# Create a full indexing config
config = create_indexing_config()
# Setup to run in background
self.progress_dialog.setWindowTitle('Fit Grains')
self.progress_dialog.setRange(0, 0) # no numerical updates
worker = AsyncWorker(self.run_fit_grains, config)
self.thread_pool.start(worker)
worker.signals.result.connect(self.view_fit_grains_results)
worker.signals.finished.connect(self.progress_dialog.accept)
self.progress_dialog.exec_()
def run_fit_grains(self, config):
min_samples, mean_rpg = create_clustering_parameters(config,
self.ome_maps)
kwargs = {
'compl': self.completeness,
'qfib': self.qfib,
'qsym': config.material.plane_data.getQSym(),
'cfg': config,
'min_samples': min_samples,
'compl_thresh': config.find_orientations.clustering.completeness,
'radius': config.find_orientations.clustering.radius
}
self.update_progress_text('Running clustering')
qbar, cl = run_cluster(**kwargs)
# Generate grains table
num_grains = qbar.shape[1]
if num_grains == 0:
QMessageBox.warning(self.parent, 'No Grains', 'Clustering found no grains')
return
shape = (num_grains, 21)
grains_table = np.empty(shape)
gw = instrument.GrainDataWriter(array=grains_table)
for gid, q in enumerate(qbar.T):
phi = 2*np.arccos(q[0])
n = xfcapi.unitRowVector(q[1:])
grain_params = np.hstack([phi*n, const.zeros_3, const.identity_6x1])
gw.dump_grain(gid, 1., 0., grain_params)
gw.close()
self.update_progress_text(f'Found {num_grains} grains. Running fit optimization.')
self.fit_grains_results = fit_grains(config, grains_table, write_spots_files=False)
print('Fit Grains Complete')
def view_fit_grains_results(self):
for result in self.fit_grains_results:
print(result)
# Build grains table
num_grains = len(self.fit_grains_results)
shape = (num_grains, 21)
grains_table = np.empty(shape)
gw = instrument.GrainDataWriter(array=grains_table)
for result in self.fit_grains_results:
gw.dump_grain(*result)
gw.close()
# Display grains table in popup dialog
dialog = QDialog(self.parent)
dialog.setWindowTitle('Fit Grains Results')
model = FitGrainsResultsModel(grains_table, dialog)
view = QTableView(dialog)
view.setModel(model)
view.verticalHeader().hide()
view.resizeColumnToContents(0)
layout = QVBoxLayout(dialog)
layout.addWidget(view)
dialog.setLayout(layout)
dialog.resize(960, 320)
dialog.exec_()
def update_progress_text(self, text):
self.progress_text.emit(text)
| 36.752101 | 91 | 0.676118 | 1,041 | 8,747 | 5.465898 | 0.21806 | 0.039367 | 0.036731 | 0.020035 | 0.368541 | 0.327944 | 0.255009 | 0.239192 | 0.224429 | 0.224429 | 0 | 0.004986 | 0.243398 | 8,747 | 237 | 92 | 36.907173 | 0.85479 | 0.084943 | 0 | 0.247059 | 0 | 0 | 0.044236 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.082353 | false | 0 | 0.105882 | 0 | 0.211765 | 0.017647 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8665efbb59680e81bde70cca98ce18deed1a1a5 | 12,345 | py | Python | polyaxon_cli/cli/build.py | vfdev-5/polyaxon-cli | 9232c3b614d3025b9e31c79fbe632cd35fcfcc64 | [
"MIT"
] | null | null | null | polyaxon_cli/cli/build.py | vfdev-5/polyaxon-cli | 9232c3b614d3025b9e31c79fbe632cd35fcfcc64 | [
"MIT"
] | null | null | null | polyaxon_cli/cli/build.py | vfdev-5/polyaxon-cli | 9232c3b614d3025b9e31c79fbe632cd35fcfcc64 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import sys
import click
from polyaxon_cli.cli.getters.build import get_build_or_local
from polyaxon_cli.client import PolyaxonClient
from polyaxon_cli.client.exceptions import PolyaxonHTTPError, PolyaxonShouldExitError
from polyaxon_cli.logger import clean_outputs
from polyaxon_cli.managers.build_job import BuildJobManager
from polyaxon_cli.utils import cache
from polyaxon_cli.utils.formatting import (
Printer,
dict_tabulate,
get_meta_response,
get_resources,
list_dicts_to_tabulate
)
from polyaxon_cli.utils.log_handler import get_logs_handler
from polyaxon_cli.utils.validation import validate_tags
from polyaxon_client.exceptions import PolyaxonClientException
def get_build_details(_build):
if _build.description:
Printer.print_header("Build description:")
click.echo('{}\n'.format(_build.description))
if _build.resources:
get_resources(_build.resources.to_dict(), header="Build resources:")
response = _build.to_light_dict(
humanize_values=True,
exclude_attrs=[
'uuid', 'config', 'project', 'description', 'resources', 'is_clone', 'build_job'
])
Printer.print_header("Build info:")
dict_tabulate(Printer.add_status_color(response))
@click.group()
@click.option('--project', '-p', type=str, help="The project name, e.g. 'mnist' or 'adam/mnist'")
@click.option('--build', '-b', type=int, help="The build id.")
@click.pass_context
@clean_outputs
def build(ctx, project, build): # pylint:disable=redefined-outer-name
"""Commands for build jobs."""
ctx.obj = ctx.obj or {}
ctx.obj['project'] = project
ctx.obj['build'] = build
@build.command()
@click.pass_context
@clean_outputs
def get(ctx):
"""Get build job.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build -b 1 get
```
\b
```bash
$ polyaxon build --build=1 --project=project_name get
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
try:
response = PolyaxonClient().build_job.get_build(user, project_name, _build)
cache.cache(config_manager=BuildJobManager, response=response)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
get_build_details(response)
@build.command()
@click.pass_context
@clean_outputs
def delete(ctx):
"""Delete build job.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Example:
\b
```bash
$ polyaxon build delete
```
\b
```bash
$ polyaxon build -b 2 delete
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
if not click.confirm("Are sure you want to delete build job `{}`".format(_build)):
click.echo('Existing without deleting build job.')
sys.exit(1)
try:
response = PolyaxonClient().build_job.delete_build(
user, project_name, _build)
# Purge caching
BuildJobManager.purge()
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not delete job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
if response.status_code == 204:
Printer.print_success("Experiment `{}` was delete successfully".format(_build))
@build.command()
@click.option('--name', type=str,
help='Name of the build, must be unique within the project, could none.')
@click.option('--description', type=str, help='Description of the build.')
@click.option('--tags', type=str, help='Tags of the build, comma separated values.')
@click.pass_context
@clean_outputs
def update(ctx, name, description, tags):
"""Update build.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Example:
\b
```bash
$ polyaxon build -b 2 update --description="new description for my build"
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
update_dict = {}
if name:
update_dict['name'] = name
if description:
update_dict['description'] = description
tags = validate_tags(tags)
if tags:
update_dict['tags'] = tags
if not update_dict:
Printer.print_warning('No argument was provided to update the build.')
sys.exit(0)
try:
response = PolyaxonClient().build_job.update_build(
user, project_name, _build, update_dict)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not update build `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Build updated.")
get_build_details(response)
@build.command()
@click.option('--yes', '-y', is_flag=True, default=False,
help="Automatic yes to prompts. "
"Assume \"yes\" as answer to all prompts and run non-interactively.")
@click.pass_context
@clean_outputs
def stop(ctx, yes):
"""Stop build job.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build stop
```
\b
```bash
$ polyaxon build -b 2 stop
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
if not yes and not click.confirm("Are sure you want to stop "
"job `{}`".format(_build)):
click.echo('Existing without stopping build job.')
sys.exit(0)
try:
PolyaxonClient().build_job.stop(user, project_name, _build)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not stop build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Build job is being stopped.")
@build.command()
@click.pass_context
@clean_outputs
def bookmark(ctx):
"""Bookmark build job.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build bookmark
```
\b
```bash
$ polyaxon build -b 2 bookmark
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
try:
PolyaxonClient().build_job.bookmark(user, project_name, _build)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not bookmark build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Build job bookmarked.")
@build.command()
@click.pass_context
@clean_outputs
def unbookmark(ctx):
"""Unbookmark build job.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build unbookmark
```
\b
```bash
$ polyaxon build -b 2 unbookmark
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
try:
PolyaxonClient().build_job.unbookmark(user, project_name, _build)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not unbookmark build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
Printer.print_success("Build job unbookmarked.")
@build.command()
@click.option('--page', type=int, help="To paginate through the list of statuses.")
@click.pass_context
@clean_outputs
def statuses(ctx, page):
"""Get build job statuses.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build -b 2 statuses
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
page = page or 1
try:
response = PolyaxonClient().build_job.get_statuses(user, project_name, _build, page=page)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get status for build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
meta = get_meta_response(response)
if meta:
Printer.print_header('Statuses for build job `{}`.'.format(_build))
Printer.print_header('Navigation:')
dict_tabulate(meta)
else:
Printer.print_header('No statuses found for build job `{}`.'.format(_build))
objects = list_dicts_to_tabulate(
[Printer.add_status_color(o.to_light_dict(humanize_values=True), status_key='status')
for o in response['results']])
if objects:
Printer.print_header("Statuses:")
objects.pop('job', None)
dict_tabulate(objects, is_list_dict=True)
@build.command()
@click.option('--gpu', '-g', is_flag=True, help='List build GPU resources.')
@click.pass_context
@clean_outputs
def resources(ctx, gpu):
"""Get build job resources.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build -b 2 resources
```
For GPU resources
\b
```bash
$ polyaxon build -b 2 resources --gpu
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
try:
message_handler = Printer.gpu_resources if gpu else Printer.resources
PolyaxonClient().build_job.resources(user,
project_name,
_build,
message_handler=message_handler)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get resources for build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
@build.command()
@click.option('--past', '-p', is_flag=True, help="Show the past logs.")
@click.option('--follow', '-f', is_flag=True, default=False,
help="Stream logs after showing past logs.")
@click.option('--hide_time', is_flag=True, default=False,
help="Whether or not to hide timestamps from the log stream.")
@click.pass_context
@clean_outputs
def logs(ctx, past, follow, hide_time):
"""Get build logs.
Uses [Caching](/polyaxon_cli/introduction#Caching)
Examples:
\b
```bash
$ polyaxon build -b 2 logs
```
\b
```bash
$ polyaxon build logs
```
"""
user, project_name, _build = get_build_or_local(ctx.obj.get('project'), ctx.obj.get('build'))
if past:
try:
response = PolyaxonClient().build_job.logs(
user, project_name, _build, stream=False)
get_logs_handler(handle_job_info=False,
show_timestamp=not hide_time,
stream=False)(response.content.decode().split('\n'))
print()
if not follow:
return
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
if not follow:
Printer.print_error('Could not get logs for job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
try:
PolyaxonClient().build_job.logs(
user,
project_name,
_build,
message_handler=get_logs_handler(handle_job_info=False, show_timestamp=not hide_time))
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get logs for build job `{}`.'.format(_build))
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
| 30.257353 | 98 | 0.649251 | 1,471 | 12,345 | 5.272604 | 0.138001 | 0.03507 | 0.043837 | 0.048994 | 0.589608 | 0.552476 | 0.471506 | 0.444301 | 0.396596 | 0.396596 | 0 | 0.003016 | 0.221223 | 12,345 | 407 | 99 | 30.331695 | 0.803724 | 0.130822 | 0 | 0.400901 | 0 | 0 | 0.168611 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04955 | false | 0.045045 | 0.058559 | 0 | 0.112613 | 0.153153 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b86eb08a911b1c356121c97276a70920f9c421d8 | 2,091 | py | Python | thrustrap/strided_repeated_range.py | hanswenzel/opticks | b75b5929b6cf36a5eedeffb3031af2920f75f9f0 | [
"Apache-2.0"
] | 11 | 2020-07-05T02:39:32.000Z | 2022-03-20T18:52:44.000Z | thrustrap/strided_repeated_range.py | hanswenzel/opticks | b75b5929b6cf36a5eedeffb3031af2920f75f9f0 | [
"Apache-2.0"
] | null | null | null | thrustrap/strided_repeated_range.py | hanswenzel/opticks | b75b5929b6cf36a5eedeffb3031af2920f75f9f0 | [
"Apache-2.0"
] | 4 | 2020-09-03T20:36:32.000Z | 2022-01-19T07:42:21.000Z | #!/usr/bin/env python
#
# Copyright (c) 2019 Opticks Team. All Rights Reserved.
#
# This file is part of Opticks
# (see https://bitbucket.org/simoncblyth/opticks).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def stride_repeat_0(a, stride, repeat):
o = []
it = len(a)/stride
for item in range(0,it):
for r in range(0,repeat):
for offset in range(0,stride):
j = item*stride + offset
o.append(a[j])
return o
def stride_repeat_1(a, stride, repeat):
o = []
sr = stride*repeat
it = len(a)/stride
n = sr*it
for _ in range(0,n):
j = stride*(_/sr) + (_ % stride)
o.append(a[j])
pass
return o
def repeat_0(a, repeat):
"""
"""
o = []
for i in range(0,len(a)):
for r in range(0,repeat):
o.append(a[i])
return o
def repeat_1(a, repeat):
"""Unnest the repeat loop"""
o = []
n = len(a)*repeat
for _ in range(0,n):
o.append( a[_/repeat])
return o
def stride_0(a, stride, offset):
o = []
n = len(a)/stride
for _ in range(0,n):
o.append( stride*a[_] + offset)
return o
if __name__ == '__main__':
a = [0,1,2,3]
s20 = [0,2]
s21 = [1,3]
sr23 = [0, 1, 0, 1, 0, 1, 2, 3, 2, 3, 2, 3]
r2 = [0,0,1,1,2,2,3,3]
assert stride_repeat_0(a, 2,3) == sr23
assert stride_repeat_1(a, 2,3) == sr23
assert repeat_0(a,2) == r2
assert repeat_1(a,2) == r2
assert stride_0(a,2,0) == s20
assert stride_0(a,2,1) == s21
| 22.728261 | 76 | 0.575323 | 335 | 2,091 | 3.501493 | 0.310448 | 0.047741 | 0.054561 | 0.028133 | 0.121057 | 0.063086 | 0.032396 | 0 | 0 | 0 | 0 | 0.055256 | 0.290292 | 2,091 | 91 | 77 | 22.978022 | 0.735175 | 0.336203 | 0 | 0.387755 | 0 | 0 | 0.005926 | 0 | 0 | 0 | 0 | 0 | 0.122449 | 1 | 0.102041 | false | 0.020408 | 0 | 0 | 0.204082 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b870c85d7add3bfb6ac4d70d1573583a1c5fe820 | 2,398 | py | Python | holmes/validators/anchor_without_any_text.py | scorphus/holmes-api | 6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59 | [
"MIT"
] | null | null | null | holmes/validators/anchor_without_any_text.py | scorphus/holmes-api | 6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59 | [
"MIT"
] | null | null | null | holmes/validators/anchor_without_any_text.py | scorphus/holmes-api | 6b3c76d4299fecf2d8799d7b5c3c6a6442cacd59 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
from holmes.validators.base import Validator
from holmes.facters.links import REMOVE_HASH
from holmes.utils import _
class AnchorWithoutAnyTextValidator(Validator):
@classmethod
def get_empty_anchors_parsed_value(cls, value):
return ', '.join([
'<a href="%s" target="_blank">#%s</a>' % (href, index)
for index, href in enumerate(value)
])
@classmethod
def get_violation_definitions(cls):
return {
'empty.anchors': {
'title': _('Empty anchor(s) found'),
'description': _(
'Empty anchors are not good for Search Engines. '
'Empty anchors were found for links to: %s.'),
'value_parser': cls.get_empty_anchors_parsed_value,
'category': _('SEO'),
'generic_description': _(
'By using empty anchor text won\'t prevent search '
'engines from indexing your pages but you will lose a '
'good opportunity to add relevance to your pages. '
'Google uses anchor text in order to qualify the '
'resources you create a reference to. '
'In consequences if a page got links with "example" '
'as anchor text pointing to itself, this page '
'relevance on "example" request will increase. '
'So better do not let empty anchor text and choose '
'wisely the words (or keywords) you use in it.')
}
}
def validate(self):
links = self.get_links()
links_with_empty_anchor = []
for link in links:
href = link.get('href').strip()
href = REMOVE_HASH.sub('', href)
if href and not link.text_content() and not link.findall('img'):
href = self.normalize_url(href)
if not href:
continue
links_with_empty_anchor.append(href)
if links_with_empty_anchor:
self.add_violation(
key='empty.anchors',
value=links_with_empty_anchor,
points=20 * len(links_with_empty_anchor)
)
def get_links(self):
return self.review.data.get('page.all_links', None)
| 36.892308 | 76 | 0.547957 | 267 | 2,398 | 4.76779 | 0.464419 | 0.069128 | 0.054988 | 0.078555 | 0.040848 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001961 | 0.361968 | 2,398 | 64 | 77 | 37.46875 | 0.830065 | 0.015847 | 0 | 0.038462 | 0 | 0 | 0.300254 | 0.009754 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.057692 | 0.057692 | 0.211538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b872ff766ae1a53e3f9c7841286c2e4fd35564d7 | 14,441 | py | Python | tests/client/test_client.py | aspuru-guzik-group/molar | a3e0c337bd8a41c94b2c25831c95048cc7614f04 | [
"BSD-3-Clause"
] | 4 | 2021-07-20T18:49:44.000Z | 2021-10-15T00:58:12.000Z | tests/client/test_client.py | aspuru-guzik-group/molar | a3e0c337bd8a41c94b2c25831c95048cc7614f04 | [
"BSD-3-Clause"
] | null | null | null | tests/client/test_client.py | aspuru-guzik-group/molar | a3e0c337bd8a41c94b2c25831c95048cc7614f04 | [
"BSD-3-Clause"
] | 2 | 2022-01-07T17:57:42.000Z | 2022-01-13T21:00:20.000Z | # std
from time import sleep
# external
import pytest
# molar
from molar.exceptions import MolarBackendError
class TestClientLogin:
def test_headers(self, client):
headers = client.headers
assert "User-Agent" in headers.keys()
assert "Authorization" in headers.keys()
def test_test_token(self, client):
client.test_token()
def test_login_other_database(self, new_database_client, new_database):
new_database_client.test_token()
class TestClientDatabase:
def test_get_database_requests(self, client):
requests = client.get_database_requests()
assert len(requests) == 0
def test_database_creation_request(self, client):
client.database_creation_request("new_database", ["compchem@head"])
df = client.get_database_requests()
assert len(df) == 1
def test_approve_request(self, client, new_database_client):
out = client.approve_database("new_database")
assert "msg" in out.keys() # Check for message
new_database_client.test_token()
def test_get_database_information(self, client, new_database_client):
requests = client.get_database_information()
assert "table_name" in requests.keys()
def test_remove_request(self, client):
with pytest.raises(MolarBackendError):
client.remove_database_request("new_database")
client.database_creation_request("test", ["compchem@head"])
client.remove_database_request("test")
def test_database_removed(self, client, new_database_client):
client.remove_database("new_database")
sleep(3)
with pytest.raises(MolarBackendError):
new_database_client.test_token()
class TestClientAlembic:
def test_get_alembic_revisions(self, client):
client.get_alembic_revisions()
def test_alembic_downgrade(self, new_database_client, new_database):
new_database_client.alembic_downgrade("-1")
def test_alembic_upgrade(self, new_database_client, new_database):
new_database_client.alembic_upgrade("heads")
class TestClientUser:
def test_get_users(self, client):
pandas = client.get_users()
assert len(pandas) == 1
def test_add_user(self, client):
response = client.add_user(
email="anew@email.com",
password="blablablabla",
full_name="Bucky Tooth",
is_active=True,
is_superuser=False,
)
assert response["msg"] == "User anew@email.com created"
pandas = client.get_users()
assert len(pandas) == 2
def test_get_user_by_email(self, client):
pandas = client.get_user_by_email("anew@email.com")
assert pandas["full_name"] == "Bucky Tooth"
with pytest.raises(MolarBackendError):
message = client.get_user_by_email("fake@email.com")
def test_register_new_user(self, client):
answer = client.register_user(
email="registereduser@email.com",
password="password",
full_name="Chip Skylark",
)
assert (
answer["msg"]
== "User registereduser@email.com has been register. Ask your database admin to activate this account"
)
pandas = client.get_user_by_email("registereduser@email.com")
assert pandas["is_active"] is False
def test_activate_user(self, client):
response = client.activate_user("registereduser@email.com")
assert response["msg"] == "User registereduser@email.com is now active!"
pandas = client.get_user_by_email("registereduser@email.com")
assert pandas["is_active"] is True
def test_deactivate_user(self, client):
response = client.deactivate_user("registereduser@email.com")
assert response["msg"] == "User registereduser@email.com is now deactivated!"
pandas = client.get_user_by_email("registereduser@email.com")
assert pandas["is_active"] is False
def test_delete_user(self, client):
response = client.delete_user(email="anew@email.com")
assert response["msg"] == "User anew@email.com has been deleted!"
with pytest.raises(MolarBackendError):
pandas = client.get_user_by_email(email="anew@email.com")
response = client.delete_user(email="registereduser@email.com")
assert response["msg"] == "User registereduser@email.com has been deleted!"
class TestClientEventstore:
def test_view_entries(self, client, new_database_client, new_database):
# verifying that the database is empty and working
pandas = new_database_client.view_entries("new_database")
assert len(pandas) == 0
def test_create_entry(self, new_database_client):
# create first entry
pandas = new_database_client.create_entry(
database_name="new_database", types="molecule", data={"smiles": "abc"}
)
assert pandas["type"] == "molecule"
# check the number of eventstores and making sure it's the right one
pandas = new_database_client.view_entries("new_database")
assert len(pandas) == 1
assert pandas.iloc[0]["type"] == "molecule"
# making sure that the entry is in the database as an entry
pandas = new_database_client.query_database(
database_name="new_database", types="molecule"
)
assert pandas.iloc[0]["smiles"] == "abc"
assert len(pandas) == 1
def test_update_entry(self, new_database_client):
# get the id of the item in the database
item = new_database_client.query_database(
database_name="new_database", types="molecule"
)
# update that item
updated_resp = new_database_client.update_entry(
database_name="new_database",
uuid=item.iloc[0]["molecule_id"],
types="molecule",
data={"smiles": "hyp"},
)
assert updated_resp["type"] == "molecule"
# checking that the database stored the change and only has one item still
pandas = new_database_client.query_database(
database_name="new_database", types="molecule"
)
assert pandas.iloc[0]["smiles"] == "hyp"
assert len(pandas) == 1
# an error should be raised when it isn't a real id
with pytest.raises(MolarBackendError):
new_database_client.update_entry(
database_name="new_database",
uuid="000000000000",
types="what",
data="what",
)
def test_delete_entry(self, new_database_client):
# get the id of the item in the database
pandas = new_database_client.view_entries("new_database")
item = pandas.iloc[0]
# delete that item
deleted_entry = new_database_client.delete_entry(
database_name="new_database", types=item["type"], uuid=item["uuid"]
)
assert deleted_entry["type"] == "molecule"
# check the eventstores to note that there are 3 events that happened and check the latest for delete
pandas = new_database_client.view_entries("new_database")
assert pandas.iloc[2]["event"] == "delete"
assert len(pandas) == 3
# there should be no more entries in the database that has that id
with pytest.raises(MolarBackendError):
new_database_client.delete_entry(
database_name="new_database", uuid=item["uuid"], types=item["type"]
)
class TestClientQuery:
# @pytest.fixture(autouse=True, scope="class")
# def insert_data(client, new_database_client, new_database):
# molecule = new_database_client.create_entry(
# database_name="new_database",
# types="molecule",
# data={
# "smiles": "abc",
# "metadata": {
# "test": "test",
# "test_filters": "abc",
# "canthisbeanything": "cycle",
# }
# }
# )
# new_database_client.create_entry(
# database_name="new_database",
# types="molecule",
# data={
# "smiles": "abbae",
# "canthisbeanything": "hi",
# "metadata": {
# "name": "benzoic acid",
# "filter": "cycle",
# }
# }
# )
# conformer = new_database_client.create_entry(
# database_name="new_database",
# types="conformer",
# data={
# "x": [0],
# "y": [1],
# "z": [2],
# "atomic_numbers": [2],
# "canthisbeanything": "hi",
# "molecule_id": molecule["uuid"],
# "metadata": {
# "name": "benzene",
# "filter": "cycle",
# }
# }
# )
# software = new_database_client.create_entry(
# database_name="new_database",
# types="software",
# data={
# "name": "cp2k",
# "version": "v1.0",
# "canthisbeanything": "hi",
# }
# )
# new_database_client.create_entry(
# database_name="new_database",
# types="calculation",
# data={
# "conformer_id": conformer["uuid"],
# "software_id": software["uuid"],
# "output_conformer_id": conformer["uuid"],
# "canthisbeanything": "hi",
# }
# )
# moletype = new_database_client.create_entry(
# database_name="new_database",
# types="molecule_type",
# data={
# "name": "test_type"
# }
# )
# new_database_client.create_entry(
# database_name="new_database",
# types="molecule",
# data={
# "smiles": "def",
# "molecule_type_id": moletype["uuid"],
# }
# )
@pytest.fixture(autouse=True, scope="class")
def insert_dummy_data(self, new_database_client, new_database):
molecule = new_database_client.create_entry(
database_name="new_database",
types="molecule",
data={
"smiles": "abc",
"metadata": {
"test": "test",
"test_filters": "abc",
},
},
)
conformer = new_database_client.create_entry(
database_name="new_database",
types="conformer",
data={
"x": [0],
"y": [1],
"z": [2],
"atomic_numbers": [2],
"molecule_id": molecule["uuid"],
},
)
software = new_database_client.create_entry(
database_name="new_database",
types="software",
data={
"name": "cp2k",
"version": "v1.0",
},
)
new_database_client.create_entry(
database_name="new_database",
types="calculation",
data={
"conformer_id": conformer["uuid"],
"software_id": software["uuid"],
"output_conformer_id": conformer["uuid"],
},
)
event = new_database_client.create_entry(
database_name="new_database",
types="molecule_type",
data={
"name": "test_type",
},
)
new_database_client.create_entry(
database_name="new_database",
types="molecule",
data={
"smiles": "def",
"molecule_type_id": event["uuid"],
},
)
def test_simple_query(self, new_database_client):
pandas = new_database_client.query_database(
database_name="new_database", types="molecule"
)
assert len(pandas) == 2
pandas = new_database_client.query_database(
database_name="new_database",
types=["molecule.smiles"],
)
pandas = new_database_client.query_database(
database_name="new_database",
types=["molecule_type.name"],
)
pandas = new_database_client.query_database(
database_name="new_database",
types=["molecule.smiles", "molecule_type.name"],
)
assert len(pandas) == 2
assert "molecule.smiles" in pandas.columns
assert "molecule_type.name" in pandas.columns
def test_query_with_field(self, new_database_client):
pandas = new_database_client.query_database(
database_name="new_database", types="molecule.metadata.test"
)
assert len(pandas) == 2
assert pandas.iloc[0]["molecule.metadata.test"] == "test"
assert pandas.iloc[1]["molecule.metadata.test"] is None
pandas = new_database_client.query_database(
database_name="new_database",
types=["molecule.metadata.test", "molecule.smiles"],
)
def test_filters(self, new_database_client):
pandas = new_database_client.query_database(
database_name="new_database",
types="molecule",
filters={
"type": "molecule.smiles",
"op": "==",
"value": "abc",
},
)
assert len(pandas) == 1
assert pandas.iloc[0]["smiles"] == "abc"
pandas = new_database_client.query_database(
database_name="new_database",
types="molecule",
filters={
"type": "molecule.smiles",
"op": "==",
"value": "molecule.metadata.test_filters",
},
)
assert len(pandas) == 1
def test_bad_query(self, new_database_client):
with pytest.raises(MolarBackendError):
new_database_client.query_database(
database_name="doesntexist",
types="molecule",
)
with pytest.raises(TypeError):
new_database_client.query_database(
database_name="new_database",
)
| 35.394608 | 114 | 0.569559 | 1,465 | 14,441 | 5.357679 | 0.137201 | 0.140145 | 0.123455 | 0.087909 | 0.649 | 0.568862 | 0.534336 | 0.493566 | 0.46146 | 0.428972 | 0 | 0.005287 | 0.318953 | 14,441 | 407 | 115 | 35.481572 | 0.792781 | 0.192438 | 0 | 0.314607 | 0 | 0 | 0.161985 | 0.033019 | 0 | 0 | 0 | 0 | 0.149813 | 1 | 0.104869 | false | 0.007491 | 0.011236 | 0 | 0.138577 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b873800b8abe1f462d2216a00cdedb8b89f1ebf0 | 3,015 | py | Python | threepress/load-for-search.py | srilatha44/threepress | 263b8aee8353806bd860bc22daf019e2155b8d0f | [
"BSD-3-Clause"
] | 2 | 2020-05-03T16:54:33.000Z | 2021-11-24T21:05:05.000Z | threepress/load-for-search.py | srilatha44/threepress | 263b8aee8353806bd860bc22daf019e2155b8d0f | [
"BSD-3-Clause"
] | 1 | 2022-02-12T09:20:29.000Z | 2022-02-12T09:20:29.000Z | threepress/load-for-search.py | srilatha44/threepress | 263b8aee8353806bd860bc22daf019e2155b8d0f | [
"BSD-3-Clause"
] | 1 | 2022-02-12T09:02:02.000Z | 2022-02-12T09:02:02.000Z | #!/usr/bin/env python
import sys, os, logging
from lxml import etree
from datetime import datetime
from settings import TEI
logging.basicConfig(level=logging.WARNING)
if not len(sys.argv) == 2:
logging.error("Usage: load-for-search path-to-tei-xml")
sys.exit(2)
parser = etree.XMLParser(remove_blank_text=True)
xml = etree.parse(sys.argv[1], parser)
sys.path.append('/home/liza/threepress')
os.environ['DJANGO_SETTINGS_MODULE'] = 'threepress.settings'
from search.models import Document
logging.info("Current documents loaded: " + ', '.join([t.title for t in Document.objects.all()]))
def xpath(field, xml):
t1 = xml.xpath(field, namespaces={'tei': TEI})
if t1:
x = t1[0]
if hasattr(x, 'text'):
return x.text.strip()
return x
return u""
def chapter(xml_root, db_obj, document, ordinal_start):
chapter_ordinal = ordinal_start
chapter_count = len(xml_root.xpath("tei:div[@type='chapter']", namespaces={'tei': TEI}))
if chapter_count == 1:
chapter_default_name = 'Complete story'
else:
chapter_default_name = 'Chapter'
for chapter in xml_root.xpath("tei:div[@type='chapter']", namespaces={'tei': TEI}):
chapter_id = xpath('@xml:id', chapter)
chapter_title = xpath('tei:head[1]', chapter) or chapter_default_name
content = etree.tostring(chapter, encoding='utf-8', pretty_print=True, xml_declaration=False)
logging.debug("Setting ordinal to %d " % chapter_ordinal)
c = db_obj.chapter_set.create(id=chapter_id,
title=chapter_title,
document=document,
ordinal=chapter_ordinal,
content=content)
chapter_ordinal += 1
return chapter_ordinal
title = xpath('//tei:title', xml)
author = xpath('//tei:author', xml)
id = xpath('/tei:TEI/@xml:id', xml)
d = Document(id=id,
title=title,
author=author,
add_date=datetime.now(),
pub_date=datetime.now()
)
d.save()
logging.info("Adding content for id %s" % d.id)
chapter_ordinal = 1
# Do we have parts?
if len(xml.xpath("//tei:div[@type='part']", namespaces={'tei': TEI})) > 0:
part_ordinal = 1
for part in xml.xpath("//tei:div[@type='part']", namespaces={'tei': TEI}):
part_id = xpath('@xml:id', part)
part_title = xpath('tei:head[1]', part)
logging.debug("Adding part", part_title.encode('utf-8'))
p = d.part_set.create(id=part_id,
title=part_title,
ordinal=part_ordinal,
label='part')
chapter_ordinal = chapter(part, p, d, chapter_ordinal)
part_ordinal += 1
else:
logging.info("Adding chapters only")
chapter_ordinal = chapter(xml.xpath("//tei:body", namespaces={'tei': TEI})[0], d, d, chapter_ordinal)
logging.debug(d.chapter_set.all())
| 30.765306 | 105 | 0.602653 | 388 | 3,015 | 4.559278 | 0.309278 | 0.079141 | 0.054268 | 0.033917 | 0.114189 | 0.093838 | 0.093838 | 0.093838 | 0.093838 | 0.050876 | 0 | 0.008036 | 0.257048 | 3,015 | 97 | 106 | 31.082474 | 0.781696 | 0.012604 | 0 | 0.028986 | 0 | 0 | 0.148485 | 0.046128 | 0 | 0 | 0 | 0 | 0 | 1 | 0.028986 | false | 0 | 0.072464 | 0 | 0.15942 | 0.014493 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8766eb92d1fe0bf2fc7f955638c73f8ecd6478b | 2,030 | py | Python | contrib/whisper-auto-update.py | timgates42/whisper | 8d21c5694bcf907e9b7318e4c198b1a4a7b25f71 | [
"Apache-2.0"
] | 833 | 2015-01-01T00:13:28.000Z | 2022-03-29T16:10:35.000Z | contrib/whisper-auto-update.py | timgates42/whisper | 8d21c5694bcf907e9b7318e4c198b1a4a7b25f71 | [
"Apache-2.0"
] | 221 | 2015-01-06T00:51:34.000Z | 2022-01-06T18:57:05.000Z | contrib/whisper-auto-update.py | timgates42/whisper | 8d21c5694bcf907e9b7318e4c198b1a4a7b25f71 | [
"Apache-2.0"
] | 237 | 2015-01-08T03:08:09.000Z | 2022-03-31T01:55:33.000Z | #!/usr/bin/env python
import sys
import time
import signal
import optparse
try:
import whisper
except ImportError:
raise SystemExit('[ERROR] Please make sure whisper is installed properly')
# update this callback to do the logic you want.
# a future version could use a config while in which this fn is defined.
def update_value(timestamp, value):
if value is None:
return value
return value * 1024 * 1024 * 1024
# Ignore SIGPIPE
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
now = int(time.time())
yesterday = now - (60 * 60 * 24)
option_parser = optparse.OptionParser(usage='''%prog [options] path''')
option_parser.add_option(
'--from', default=yesterday, type='int', dest='_from',
help=("Unix epoch time of the beginning of "
"your requested interval (default: 24 hours ago)"))
option_parser.add_option(
'--until', default=now, type='int',
help="Unix epoch time of the end of your requested interval (default: now)")
option_parser.add_option(
'--pretty', default=False, action='store_true',
help="Show human-readable timestamps instead of unix times")
(options, args) = option_parser.parse_args()
if len(args) < 1:
option_parser.print_usage()
sys.exit(1)
path = args[0]
from_time = int(options._from)
until_time = int(options.until)
try:
data = whisper.fetch(path, from_time, until_time)
if not data:
raise SystemExit('No data in selected timerange')
(timeInfo, values_old) = data
except whisper.WhisperException as exc:
raise SystemExit('[ERROR] %s' % str(exc))
(start, end, step) = timeInfo
t = start
for value_old in values_old:
value_str_old = str(value_old)
value_new = update_value(t, value_old)
value_str_new = str(value_new)
if options.pretty:
timestr = time.ctime(t)
else:
timestr = str(t)
print("%s\t%s -> %s" % (timestr, value_str_old, value_str_new))
try:
if value_new is not None:
whisper.update(path, value_new, t)
t += step
except whisper.WhisperException as exc:
raise SystemExit('[ERROR] %s' % str(exc))
| 26.363636 | 78 | 0.709852 | 303 | 2,030 | 4.630363 | 0.40264 | 0.051319 | 0.042766 | 0.044904 | 0.161083 | 0.118318 | 0.086957 | 0.086957 | 0.086957 | 0.086957 | 0 | 0.013658 | 0.170443 | 2,030 | 76 | 79 | 26.710526 | 0.819477 | 0.075369 | 0 | 0.175439 | 0 | 0 | 0.202883 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017544 | false | 0 | 0.105263 | 0 | 0.157895 | 0.035088 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8779de3bf26b6a5d91aefea5b19035252b80392 | 3,865 | py | Python | src/main.py | floriansto/data-backup-tool | d8ba9019467633126cb0de3922c949686fb8cb21 | [
"MIT"
] | null | null | null | src/main.py | floriansto/data-backup-tool | d8ba9019467633126cb0de3922c949686fb8cb21 | [
"MIT"
] | null | null | null | src/main.py | floriansto/data-backup-tool | d8ba9019467633126cb0de3922c949686fb8cb21 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys
import os
import yaml
import click
import logging
import subprocess
from logging.handlers import RotatingFileHandler
from datetime import datetime
from Init import Init
from Backup import Backup
@click.command()
@click.option('-s', '--ssh', is_flag=True, default=False, help='Use ssh connection to get backup files')
@click.option('-h', '--host', default='127.0.0.1', type=str, help='When using ssh, connect to this host. May be hostname or ip adress')
@click.option('-p', '--port', default=22, type=int, help='When using ssh, use this port')
@click.option('-u', '--user', default='root', type=str, help='When using ssh, use this user')
@click.option('-n', '--no-relatives', is_flag=True, default=False, help='Do not use relative path names. Disables the -R option of rsync.')
@click.option('-v', '--verbose', is_flag=True, default=False, help='Additional output to the logfile')
@click.argument('config', type=str)
def main(config, host, port, user, ssh, no_relatives, verbose):
now = datetime.now()
logfile = '/var/log/dbt/backup_{}.log'.format(host)
if not os.path.exists(os.path.dirname(logfile)):
os.makedirs(os.path.dirname(logfile))
rotate_logs = os.path.exists(logfile)
loglevel = logging.DEBUG if verbose else logging.INFO
logger = logging.getLogger()
logger.setLevel(loglevel)
handler = RotatingFileHandler(logfile, maxBytes=50000, backupCount=10)
handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(filename)-10s %(lineno)-4d %(message)s'))
logger.addHandler(handler)
if rotate_logs:
logger.handlers[0].doRollover()
logger.info('==============================================')
# Parse configuration yaml file
if config is None or not os.path.isfile(config):
logger.error('Error: invalid config file: {}'.format(config))
raise FileNotFoundError
lockfile = config + '.lock'
if os.path.exists(lockfile):
logger.error('{} exists in the filesystem'.format(lockfile))
raise FileExistsError
open(lockfile, 'a').close()
with open(config) as f:
yml_config = yaml.safe_load(f)
yml_config['target_dir'] = yml_config['target_dir'].rstrip('/')
yml_config['user'] = user
yml_config['port'] = port
yml_config['host'] = host
yml_config['no_rels'] = no_relatives
yml_config['ssh'] = ssh
yml_config['lockfile'] = lockfile
logger.debug('Backup invoked with the following options:')
logger.info(' Configuration file: {}'.format(config))
logger.debug(' Don''t use relative paths: {}'.format(no_relatives))
if ssh:
logger.debug(' ssh: {}'.format(ssh))
logger.debug(' host: {}'.format(host))
logger.debug(' user: {}'.format(user))
logger.debug(' port: {}'.format(port))
cmd = ['nc', '-z', '-v', host, str(port)]
ret = subprocess.run(cmd, stderr=subprocess.PIPE)
if ret.returncode != 0:
logger.error('Port {} is not open on {}'.format(port, host))
logger.error(ret.stderr)
exit(ret.returncode)
# Check for doubled entries in the prio field
prios = []
for i in yml_config['intervals']:
prios.append(i['prio'])
if len(prios) != len(set(prios)):
logger.error('Double defined priorities in {} found'.format(config))
raise KeyError
# Setup base folders and if needed create a new full backup
init = Init(now, yml_config)
backup = Backup(yml_config, init.get_backup_target(), now)
os.remove(lockfile)
end = datetime.now()
seconds = (end - now).total_seconds()
hours, remainder = divmod(seconds, 3600)
minutes, seconds = divmod(remainder, 60)
logger.info('Execution time: {} hrs {} mins {} secs'.format(hours, minutes, seconds))
if __name__ == '__main__':
main()
| 36.809524 | 139 | 0.65097 | 508 | 3,865 | 4.88189 | 0.385827 | 0.043548 | 0.012097 | 0.020565 | 0.062097 | 0.062097 | 0 | 0 | 0 | 0 | 0 | 0.008601 | 0.18784 | 3,865 | 104 | 140 | 37.163462 | 0.781459 | 0.038292 | 0 | 0 | 0 | 0.0125 | 0.229526 | 0.019397 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0125 | false | 0 | 0.125 | 0 | 0.1375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b87a60185253811912f711c62dc930e470f30573 | 5,990 | py | Python | __main__.py | Lemmie101/TextAnalysis | a57f37d05dc11369a334ccae70a6236cec89b21b | [
"MIT"
] | null | null | null | __main__.py | Lemmie101/TextAnalysis | a57f37d05dc11369a334ccae70a6236cec89b21b | [
"MIT"
] | null | null | null | __main__.py | Lemmie101/TextAnalysis | a57f37d05dc11369a334ccae70a6236cec89b21b | [
"MIT"
] | null | null | null | import math
from multiprocessing.pool import ThreadPool
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForSequenceClassification, \
AutoModelForTokenClassification, pipeline
from view import View
# Model names
summarization_model = "sshleifer/distilbart-cnn-12-6"
classification_model = "distilbert-base-uncased-finetuned-sst-2-english"
ner_model = "dslim/bert-base-NER"
def download_summarization_model():
AutoTokenizer.from_pretrained(summarization_model)
AutoModelForSeq2SeqLM.from_pretrained(summarization_model)
def download_classification_model():
AutoTokenizer.from_pretrained(classification_model)
AutoModelForSequenceClassification.from_pretrained(classification_model)
def download_ner_model():
AutoTokenizer.from_pretrained(ner_model)
AutoModelForSequenceClassification.from_pretrained(ner_model)
def summarize(text: str, min_length: int, max_length: int) -> str:
pipe = pipeline(task="summarization", model=summarization_model)
summary = pipe(text, min_length=min_length, max_length=max_length)[0].get('summary_text').strip()
return ".".join(summary.split(" .")).strip()
def classify(text: str):
pipe = pipeline(task="text-classification", model=classification_model)
results = pipe(text)[0]
sentiment = results.get('label')
confidence_level = "{0}%".format(round(results.get('score') * 100, 1))
return sentiment, confidence_level
def named_entity_recognition(text: str):
model = AutoModelForTokenClassification.from_pretrained(ner_model)
tokenizer = AutoTokenizer.from_pretrained(ner_model)
pipe = pipeline('ner', model=model, tokenizer=tokenizer)
results = pipe(text)
person_list = []
organisation_list = []
location_list = []
misc_list = []
def append_entity_list(_entity_name, _entity_type):
if "PER" in _entity_type:
person_list.append(_entity_name)
elif "ORG" in _entity_type:
organisation_list.append(_entity_name)
elif "LOC" in _entity_type:
location_list.append(_entity_name)
elif "MIS" in _entity_type:
misc_list.append(_entity_name)
"""
Sample results:
[{'entity': 'B-LOC', 'score': 0.9996414, 'index': 113, 'word': 'Northern', 'start': 598, 'end': 606},
{'entity': 'I-LOC', 'score': 0.9991061, 'index': 114, 'word': 'Ireland', 'start': 607, 'end': 614},
{'entity': 'B-LOC', 'score': 0.99974483, 'index': 116, 'word': 'Wales', 'start': 619, 'end': 624},
{'entity': 'B-LOC', 'score': 0.9777434, 'index': 118, 'word': 'Down', 'start': 626, 'end': 630},
{'entity': 'I-LOC', 'score': 0.9698499, 'index': 119, 'word': '##ing', 'start': 630, 'end': 633},
{'entity': 'I-LOC', 'score': 0.9832339, 'index': 120, 'word': 'Street', 'start': 634, 'end': 640},
{'entity': 'B-MISC', 'score': 0.9880397, 'index': 177, 'word': 'Co', 'start': 953, 'end': 955},
{'entity': 'I-MISC', 'score': 0.7533177, 'index': 178, 'word': '##vid', 'start': 955, 'end': 958}]
As we loop through the results, we will check whether each entity is connected to the entity before. If it is, we
will combine them together.
If it is not connected, we will append the previous entity name into their respective list and save the current
entity name.
"""
entity_name = ""
for index, current_result in enumerate(results):
if index == 0:
entity_name = current_result.get("word")
continue
previous_result = results[index - 1]
if current_result.get("start") - 1 == previous_result.get("end"):
entity_name = "{0} {1}".format(entity_name, current_result.get("word"))
elif current_result.get("start") == previous_result.get("end"):
entity_name = "{0}{1}".format(entity_name, current_result.get("word").replace("#", ""))
else:
entity_type = results[index - 1].get("entity")
append_entity_list(entity_name, entity_type)
entity_name = current_result.get("word")
# Save the last entity.
if index + 1 == len(results):
entity_type = results[-1].get("entity")
append_entity_list(entity_name, entity_type)
return list(set(person_list)), list(set(organisation_list)), list(set(location_list)), list(set(misc_list))
view = View()
def summarize_callback(summary):
view.set_summary(summary)
view.set_article_word_count(str(len(view.get_article().split())) + " words")
view.set_summary_word_count(str(len(summary.split())) + " words")
view.model_completed_analysis()
def classify_callback(results):
view.set_sentiment(results[0])
view.set_confidence(results[1])
view.model_completed_analysis()
def ner_callback(results):
view.set_person(", ".join(results[0]))
view.set_organisation(", ".join(results[1]))
view.set_location(", ".join(results[2]))
view.set_misc(", ".join(results[3]))
view.model_completed_analysis()
def analyse():
view.disable_analyse_button()
article = view.get_article()
if view.get_option() == "PERCENTAGE":
word_count = len(article.split())
min_length = word_count / 100.0 * view.get_min_parameter()
min_length = int(math.ceil(min_length))
max_length = word_count / 100.0 * view.get_max_parameter()
max_length = int(max_length)
else:
min_length = view.get_min_parameter()
max_length = view.get_max_parameter()
pool = ThreadPool(processes=3)
pool.apply_async(summarize, (article, min_length, max_length), callback=summarize_callback)
pool.apply_async(classify, (article,), callback=classify_callback)
pool.apply_async(named_entity_recognition, (article,), callback=ner_callback)
if __name__ == '__main__':
download_summarization_model()
download_classification_model()
download_ner_model()
view.analyse_button.configure(command=analyse)
view.run()
| 34.825581 | 117 | 0.676795 | 735 | 5,990 | 5.284354 | 0.246259 | 0.041195 | 0.013903 | 0.022657 | 0.180484 | 0.096807 | 0.081359 | 0.058702 | 0.058702 | 0.058702 | 0 | 0.035707 | 0.181803 | 5,990 | 171 | 118 | 35.02924 | 0.756784 | 0.005509 | 0 | 0.089109 | 0 | 0 | 0.055613 | 0.01583 | 0 | 0 | 0 | 0 | 0 | 1 | 0.108911 | false | 0 | 0.039604 | 0 | 0.178218 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b87c9108b5567b69fe89a7eb022614ceeeece3be | 1,164 | py | Python | app/io_manager.py | danvf/py-challenge | 3d9b3469b7491dc2cf639a58a995a3f456da08f9 | [
"MIT"
] | null | null | null | app/io_manager.py | danvf/py-challenge | 3d9b3469b7491dc2cf639a58a995a3f456da08f9 | [
"MIT"
] | null | null | null | app/io_manager.py | danvf/py-challenge | 3d9b3469b7491dc2cf639a58a995a3f456da08f9 | [
"MIT"
] | null | null | null | from phone.phone_interface import PhoneInterface
from util import constants
def read_input(input_file, phone: PhoneInterface) -> str:
input_text = ''
output_text = []
use_phone = {
constants.PRESS_BUTTON_CALL: phone.press_button_call(),
constants.PRESS_BUTTON_DISMISS: phone.press_button_dismiss(),
constants.FLAG_AVATAR_DISPLAYED: phone.flag_avatar_displayed(),
constants.FLAG_POPUP_NO_NETWORK: phone.flag_popup_no_network(),
constants.FLAG_POPUP_CALL_DISMISSED: phone.flag_popup_call_dismissed(),
constants.FLAG_POPUP_ENDING_CALL: phone.flag_popup_ending_call(),
}
with open(input_file, 'r') as i:
input_text = i.read()
for input_line in input_text.splitlines():
next_entry = input_line.lower()
if next_entry in use_phone:
output_text.append(use_phone[next_entry])
else:
output_text.append(constants.NONEXISTENT_INPUT)
output_text.append('\n')
return ''.join(output_text)
def write_output(output_file, output_text) -> None:
with open(output_file, 'w') as o:
o.truncate(0)
o.write(output_text)
| 31.459459 | 79 | 0.696735 | 151 | 1,164 | 5 | 0.344371 | 0.092715 | 0.071523 | 0.047682 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001085 | 0.207904 | 1,164 | 36 | 80 | 32.333333 | 0.817787 | 0 | 0 | 0 | 0 | 0 | 0.003436 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.074074 | 0 | 0.185185 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b87ebd308bee46667aa1a5dc879a124c982c0957 | 19,950 | py | Python | application.py | marco-83/shopping | 7ddd38ba0d006846763944fbb80cb8a437023f5a | [
"MIT"
] | null | null | null | application.py | marco-83/shopping | 7ddd38ba0d006846763944fbb80cb8a437023f5a | [
"MIT"
] | null | null | null | application.py | marco-83/shopping | 7ddd38ba0d006846763944fbb80cb8a437023f5a | [
"MIT"
] | null | null | null | import os
#export FLASK_APP=application
import sqlite3
import datetime
import calendar
import itertools
from collections import defaultdict
from flask import Flask, flash, jsonify, redirect, render_template, request, session, url_for
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError
from werkzeug.security import check_password_hash, generate_password_hash
#from decimal import *
#import math
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Ensure templates are auto-reloaded
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
## Custom filter
#app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
#db = SQL("sqlite:///shopping.db")
#conn = sqlite3.connect('shopping.db')
#db = conn.cursor()
## Make sure API key is set
#if not os.environ.get("API_KEY"):
# raise RuntimeError("API_KEY not set")
@app.route("/", methods=["GET", "POST"])
@login_required
def index():
"""Main page"""
ID = session["user_id"]
if request.method == "POST":
units = request.form.get("unit_select")
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
# Update user's units
db.execute("UPDATE users SET units = ? WHERE id = ?", (units, ID))
## optional thing I might add: update pantry and menu? ##
conn.commit()
conn.close()
return render_template("index.html", units_selected=units)
else:
units = units_lookup(ID)[0]
return render_template("index.html", units_selected=units)
@app.route("/shopping", methods=["GET", "POST"])
@login_required
def shopping_list():
"""Generate shopping list"""
ID = session["user_id"]
if request.method == "POST":
date = request.form.get("week_beginning")
session["date"] = date
return redirect("shopping")
else:
if session.get("date") is None:
date = datetime.date.today().strftime("%d-%m-%Y")
session["date"] = date
else:
date = session["date"]
weekday = findDay(date)
dates = dates_days(date)
all_data = meal_plan_import(ID, dates)
# Filter on relevant dates
dates_in_all_data = list(filter(lambda i: i['date'] in dates.keys(), all_data))
# Select meals (returns a list)
meals = list(map(lambda d: d['meal'], dates_in_all_data))
# a = ingredients_lookup(ID, meals[1])[1]["ingredient"]
all_ingredients = []
all_quantity = []
all_units = []
for m in meals:
recipes = ingredients_lookup(ID, m)
for item in recipes:
all_ingredients.append(item['ingredient'])
all_quantity.append(item['quantity'])
all_units.append(item['unit'])
all_data = [{'ingredient': d, 'quantity': n, 'unit': m} for d, n, m in zip(all_ingredients, all_quantity, all_units)]
all_ingredients = []
all_quantity = []
all_units = []
pantry = pantry_lookup(ID)
for item in pantry:
all_ingredients.append(item['ingredient'])
all_quantity.append(item['quantity'])
all_units.append(item['unit'])
# Pantry quantities are negative
all_pantry = [{'ingredient': d, 'quantity': -n, 'unit': m} for d, n, m in
zip(all_ingredients, all_quantity, all_units)]
# Merge the two lists. Now all_data is the shopping list (positive quantities) and pantry (negative quantities)
all_data = all_data + all_pantry
# Sum quantity for each unique ingredient & quantity pair.
# This will net out what is required for shopping and what is in the pantry already
counts = defaultdict(lambda: [0, 0])
for line in all_data:
entry = counts[(line['ingredient'], line['unit'])]
entry[0] += line['quantity']
entry[1] += 1
all_data_net = [{'ingredient': k[0], 'unit': k[1], 'quantity': v[0]}
for k, v in counts.items()]
# Remove negative entries, which are things left in the pantry
shopping_list = list(filter(lambda i: i['quantity'] > 0, all_data_net))
return render_template("shopping.html", weekday=weekday, date=date, shopping_list=shopping_list)
def units_lookup(user_id):
"""Show ingredients"""
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
t = (user_id, )
# Query database for user's units
c = db.execute("SELECT units FROM users WHERE id = ?", t)
output = c.fetchone()
conn.close()
return output
def findDay(date):
date_convert = datetime.datetime.strptime(date, '%d-%m-%Y').weekday()
return calendar.day_name[date_convert]
def findDate(date):
date_convert = datetime.datetime.strptime(date, '%d-%m-%Y')
return date_convert
def dates_days(date):
"""Dictionary with dates (keys) and weekdays (values)"""
keys = []
for i in range(0, 7):
keys.append((findDate(date) + datetime.timedelta(days=i)).strftime('%d-%m-%Y'))
values = []
for i in keys:
values.append(findDay(i))
dates = dict(zip(keys, values))
return dates
def meals_lookup(user_id):
"""Query database for meals specified by user"""
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
c = db.execute("SELECT meal FROM meals WHERE id = ?", (user_id,))
output = c.fetchall()
conn.close()
meals = []
for i in output:
meals.append(i[0])
return meals
def meal_plan_import(user_id, dates):
"""Import meal plan (if already created)"""
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
all_dates = []
all_meal_numbers = []
all_meals = []
for i in list(dates.keys()):
c = db.execute("SELECT date, meal_number, meal FROM meal_plan WHERE id = ? AND date = ?", (user_id, i))
output = c.fetchall()
for tup in output:
all_dates.append(tup[0])
all_meal_numbers.append(tup[1])
all_meals.append(tup[2])
conn.close()
# Convert to a list of dictionaries (easier to look up)
all_data = [{'date': d, 'meal_no': n, 'meal': m} for d, n, m in zip(all_dates, all_meal_numbers, all_meals)]
return all_data
@app.route("/plan", methods=["GET", "POST"])
@login_required
def plan():
"""Define meal plan"""
ID = session["user_id"]
if session.get("meals") is None:
meals = meals_lookup(ID)
session["meals"] = meals
else:
meals = session["meals"]
if request.method == "POST":
date = request.form.get("week_beginning")
#return str(findDate(date) + datetime.timedelta(days=7))
weekday = findDay(date)
dates = dates_days(date)
session["date"] = date
session["weekday"] = weekday
session["dates"] = dates
all_data = meal_plan_import(ID, dates)
dates_in_all_data = list(filter(lambda i: i['date'] in dates.keys(), all_data))
return render_template("plan.html", date=date, weekday=weekday, dates=dates, meals=meals,
dates_in_all_data=dates_in_all_data)
else:
if session.get("date") is None:
date = datetime.date.today().strftime("%d-%m-%Y")
session["date"] = date
else:
date = session["date"]
if session.get("weekday") is None:
weekday = findDay(date)
session["weekday"] = weekday
else:
weekday = session["weekday"]
if session.get("dates") is None:
dates = dates_days(date)
session["dates"] = dates
else:
dates = session["dates"]
all_data = meal_plan_import(ID, dates)
# Only return all_data for the selected dates to plan.html
dates_in_all_data = list(filter(lambda i: i['date'] in dates.keys(), all_data))
return render_template("plan.html", date=date, weekday=weekday, dates=dates, meals=meals,
dates_in_all_data=dates_in_all_data)
@app.route("/meal_plan", methods=["GET", "POST"])
@login_required
def meal_plan():
"""Update meal plan"""
ID = session["user_id"]
date = session["date"]
dates_list = []
for i in range(0, 7):
dates_list.append((findDate(date) + datetime.timedelta(days=i)).strftime('%d-%m-%Y'))
meal_number_list = [str(1), str(2), str(3)]
all_form_items = list(
map(
lambda x: "".join(x),
itertools.product(["meal["], dates_list, ["_"], meal_number_list, ["]"])
)
)
all_meals = []
for i in all_form_items:
all_meals.append(request.form.get(i))
all_dates = []
for i in all_form_items:
date = i.split('[', 1)[1].split('_')[0]
all_dates.append(date)
all_meal_numbers = []
for i in all_form_items:
meal_number = i.split('_', 1)[1].split(']')[0]
all_meal_numbers.append(meal_number)
# Combine all data into a list of dictionaries
all_data = [{'date': d, 'meal_no': n, 'meal': m} for d, n, m in zip(all_dates, all_meal_numbers, all_meals)]
all_data = list(filter(lambda i: i['meal'] is not None, all_data))
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
# Update user's meal plan in database
for i in all_data:
db.execute("INSERT OR REPLACE INTO meal_plan (id, date, meal_number, meal) VALUES (?, ?, ?, ?)",
(ID, i.get("date"), i.get("meal_no"), i.get("meal")))
conn.commit()
conn.close()
return redirect("plan")
@app.route("/pantry", methods=["GET"])
@login_required
def pantry():
"""Show pantry"""
ingredients = pantry_lookup(user_id=session["user_id"])
return render_template("pantry.html", ingredients=ingredients)
@app.route("/pantry_add", methods=["GET", "POST"])
@login_required
def pantry_add():
"""Add items to the pantry"""
ingredient = request.form.get("update_ingredients[1]")
quantity = request.form.get("update_ingredients[2]")
units = request.form.get("update_ingredients[3]")
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
t = (session["user_id"], ingredient, quantity, units)
#return str(t)
# Delete ingredient
db.execute("INSERT INTO pantry VALUES(?, ?, ?, ?)", t)
conn.commit()
conn.close()
updated_pantry = pantry_lookup(user_id=session["user_id"])
return render_template("pantry.html", ingredients=updated_pantry)
@app.route("/pantry_delete", defaults={'ingredient': ''}) # If ingredient is blank, it can still be deleted.
@app.route("/pantry_delete/<ingredient>")
@login_required
def pantry_delete(ingredient):
"""Delete an ingredient from the pantry"""
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
t = (session["user_id"], ingredient)
# Delete ingredient
db.execute("DELETE FROM pantry WHERE id = ? AND ingredient = ?", t)
conn.commit()
conn.close()
updated_pantry = pantry_lookup(user_id=session["user_id"])
return render_template("pantry.html", ingredients=updated_pantry)
def pantry_lookup(user_id):
"""Show ingredients"""
conn = sqlite3.connect('shopping.db')
conn.row_factory = sqlite3.Row # To get column names returned with SQL query. Result of fetchone is now a dictionary
db = conn.cursor()
t = (user_id, )
# Query database for ingredients in pantry
c = db.execute("SELECT * FROM pantry WHERE id = ?", t)
output = c.fetchall()
conn.close()
return output
def ingredients_lookup(user_id, meal):
"""Show ingredients"""
conn = sqlite3.connect('shopping.db')
conn.row_factory = sqlite3.Row # To get column names returned with SQL query. Result of fetchone is now a dictionary
db = conn.cursor()
t = (user_id, meal)
# Query database for ingredients in meal
c = db.execute("SELECT * FROM recipes WHERE id = ? AND meal = ?", t)
output = c.fetchall()
conn.close()
return output
@app.route("/meal", methods=["GET", "POST"])
@login_required
def meal():
"""Design your meal"""
if request.method == "POST":
session["meal_select"] = request.form.get("meal_select")
ingredients = ingredients_lookup(user_id=session["user_id"], meal=session["meal_select"])
return render_template("meal.html", meals=session["meals"], meal_selected=session["meal_select"],
ingredients=ingredients)
else:
meals = meals_lookup(session["user_id"])
session["meals"] = meals
return render_template("meal.html", meals=meals, meal_selected=None)
@app.route("/add_meal", methods=["POST"])
@login_required
def add_meal():
"""Design your meal"""
new_meal = request.form.get("new_meal")
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
# Query database to check if user has already created a meal with that name
t = (session["user_id"], new_meal)
c = db.execute("SELECT * FROM meals WHERE id = ? AND meal = ?", t)
rows = c.fetchone()
# Ensure meal does not already exist
if rows is not None:
conn.close()
return apology("meal already exists", 400)
db.execute("INSERT INTO meals(id, meal) VALUES(?, ?)", t)
conn.commit()
conn.close()
return redirect("meal")
@app.route("/ingredients_delete", defaults={'ingredient': ''}) # If ingredient is blank, it can still be deleted.
@app.route("/ingredients_delete/<ingredient>")
@login_required
def ingredients_delete(ingredient):
"""Delete an ingredient from a meal"""
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
t = (session["user_id"], session["meal_select"], ingredient)
# Delete ingredient
db.execute("DELETE FROM recipes WHERE id = ? AND meal = ? AND ingredient = ?", t)
conn.commit()
conn.close()
ingredients = ingredients_lookup(user_id=session["user_id"], meal=session["meal_select"])
return render_template("meal.html", meals=session["meals"], meal_selected=session["meal_select"],
ingredients=ingredients)
@app.route("/ingredients_add", methods=["GET", "POST"])
@login_required
def ingredients_add():
"""Add an ingredient to a meal"""
ingredient = request.form.get("update_ingredients[1]")
quantity = request.form.get("update_ingredients[2]")
units = request.form.get("update_ingredients[3]")
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
# Ensure a meal has been selected
if session.get("meal_select") is None:
return apology("must select a meal", 400)
t = (session["user_id"], session["meal_select"], ingredient, quantity, units)
# Add ingredient to database
db.execute("INSERT INTO recipes VALUES(?, ?, ?, ?, ?)", t)
conn.commit()
conn.close()
updated_ingredients = ingredients_lookup(user_id=session["user_id"], meal=session["meal_select"])
return render_template("meal.html", meals=session["meals"], meal_selected=session["meal_select"],
ingredients=updated_ingredients)
@app.route("/check", methods=["GET"])
def check():
"""Return true if username available, else false, in JSON format"""
username = request.form.get("username")
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
# Query database for usernames
taken = db.execute("SELECT username FROM users").fetchone()
conn.close()
return apology(str(taken), 400)
if not len(str(username)) > 0:
return jsonify(False)
for taken_username in taken:
if username == taken_username["username"]:
return jsonify(False), 400
return jsonify(True), 200
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
conn = sqlite3.connect('shopping.db')
conn.row_factory = sqlite3.Row # To get column names returned with SQL query. Result of fetchone is now a dictionary
db = conn.cursor()
# Query database for username
t = (request.form.get("username"),)
c = db.execute("SELECT * FROM users WHERE username = ?", t)
rows = c.fetchone()
conn.close()
# Ensure username exists and password is correct
if rows is None or not check_password_hash(rows["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/register", methods=["GET", "POST"])
def register():
"""Register user"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 400)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 400)
# Ensure password matches
elif request.form.get("password") != request.form.get("confirmation"):
return apology("passwords must match", 400)
conn = sqlite3.connect('shopping.db')
db = conn.cursor()
# Query database for username
t = (request.form.get("username"),)
c = db.execute("SELECT * FROM users WHERE username = ?", t)
rows = c.fetchone()
# Ensure username does not already exist
check()
# Ensure username does not already exist
if rows is not None:
return apology("username already exists", 400)
# Add username and password to database
else:
username = request.form.get("username")
hash = generate_password_hash(request.form.get("password"))
user_hash = (username, hash)
db.execute("INSERT INTO users(username, hash) VALUES(?, ?)", user_hash)
# Remember which user has logged in
t = (request.form.get("username"),)
c = db.execute("SELECT id FROM users WHERE username = ?", t)
session["user_id"] = c.fetchone()[0]
conn.commit()
conn.close()
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("register.html")
def errorhandler(e):
"""Handle error"""
if not isinstance(e, HTTPException):
e = InternalServerError()
return apology(e.name, e.code)
# Listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
| 29.252199 | 124 | 0.632231 | 2,574 | 19,950 | 4.782828 | 0.122766 | 0.018033 | 0.02843 | 0.033791 | 0.548128 | 0.505564 | 0.449598 | 0.407684 | 0.369832 | 0.356348 | 0 | 0.006337 | 0.232682 | 19,950 | 681 | 125 | 29.295154 | 0.797883 | 0.168872 | 0 | 0.505051 | 0 | 0 | 0.165304 | 0.012566 | 0 | 0 | 0 | 0 | 0 | 1 | 0.063131 | false | 0.025253 | 0.040404 | 0 | 0.212121 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b880cffc94f39f243feb59afb5660689d62a63c9 | 3,621 | py | Python | docs/auto_examples/plot_example2.py | MachineLearningBCAM/MRCpy | e397fa0443b98d8754798a59c8f5c8b28782d5f5 | [
"MIT"
] | 28 | 2021-03-22T09:41:16.000Z | 2022-03-15T18:21:23.000Z | docs/auto_examples/plot_example2.py | MachineLearningBCAM/MRCpy | e397fa0443b98d8754798a59c8f5c8b28782d5f5 | [
"MIT"
] | 1 | 2021-08-08T14:02:30.000Z | 2021-08-09T10:11:38.000Z | examples/plot_example2.py | MachineLearningBCAM/MRCpy | e397fa0443b98d8754798a59c8f5c8b28782d5f5 | [
"MIT"
] | 1 | 2021-08-09T08:06:26.000Z | 2021-08-09T08:06:26.000Z | # -*- coding: utf-8 -*-
"""
.. _ex2:
Example: Use of CMRC with different settings
============================================
Example of using CMRC with some of the common classification datasets with
different losses and feature mappings settings. We load the different datasets
and use 10-Fold Cross-Validation to generate the partitions for train and test.
We separate 1 partition each time for testing and use the others for training.
On each iteration we calculate
the classification error. We also calculate the mean training time.
You can check a more elaborated example in :ref:`ex_comp`.
"""
import time
import numpy as np
from sklearn import preprocessing
from sklearn.model_selection import StratifiedKFold
from MRCpy import CMRC
# Import the datasets
from MRCpy.datasets import *
# Data sets
loaders = [load_mammographic, load_haberman, load_indian_liver,
load_diabetes, load_credit]
dataName = ["mammographic", "haberman", "indian_liver",
"diabetes", "credit"]
def runCMRC(phi, loss):
res_mean = np.zeros(len(dataName))
res_std = np.zeros(len(dataName))
# We fix the random seed to that the stratified kfold performed
# is the same through the different executions
random_seed = 0
# Iterate through each of the dataset and fit the CMRC classfier.
for j, load in enumerate(loaders):
# Loading the dataset
X, Y = load(return_X_y=True)
r = len(np.unique(Y))
n, d = X.shape
# Print the dataset name
print(" ############## \n " + dataName[j] + " n= " + str(n) +
" , d= " + str(d) + ", cardY= " + str(r))
# Create the CMRC object initilized with the corresponding parameters
clf = CMRC(phi=phi, loss=loss, use_cvx=True,
solver='MOSEK', max_iters=10000, s=0.3)
# Generate the partitions of the stratified cross-validation
cv = StratifiedKFold(n_splits=10, random_state=random_seed,
shuffle=True)
cvError = list()
auxTime = 0
# Paired and stratified cross-validation
for train_index, test_index in cv.split(X, Y):
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = Y[train_index], Y[test_index]
# Normalizing the data
std_scale = preprocessing.StandardScaler().fit(X_train, y_train)
X_train = std_scale.transform(X_train)
X_test = std_scale.transform(X_test)
# Save start time for computing training time
startTime = time.time()
# Train the model
clf.fit(X_train, y_train)
# Save the training time
auxTime += time.time() - startTime
# Predict the class for test instances
y_pred = clf.predict(X_test)
# Calculate the error made by CMRC classificator
cvError.append(np.average(y_pred != y_test))
res_mean[j] = np.average(cvError)
res_std[j] = np.std(cvError)
# Calculating the mean training time
auxTime = auxTime / 10
print(" error= " + ": " + str(res_mean[j]) + " +/- " +
str(res_std[j]) + "\n avg_train_time= " + ": " +
str(auxTime) + ' secs' + "\n ############## \n\n")
if __name__ == '__main__':
print('*** Example (CMRC with the additional\
marginal constraints) *** \n\n')
print('1. Using 0-1 loss and relu feature mapping \n\n')
runCMRC(phi='relu', loss='0-1')
print('2. Using log loss and relu feature mapping \n\n')
runCMRC(phi='relu', loss='log')
| 31.763158 | 79 | 0.615023 | 477 | 3,621 | 4.545073 | 0.358491 | 0.016605 | 0.019373 | 0.017528 | 0.055351 | 0.041513 | 0.041513 | 0.041513 | 0.041513 | 0.041513 | 0 | 0.009063 | 0.26871 | 3,621 | 113 | 80 | 32.044248 | 0.809668 | 0.339133 | 0 | 0 | 0 | 0 | 0.113176 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02 | false | 0 | 0.12 | 0 | 0.14 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b880ed0ca805aea893535008c791331fa638834b | 4,379 | py | Python | picodaqa/mpDataGraphs.py | GuenterQuast/picoDAQ | 92138bb16b6433a51e59f90dd12a587ee941f657 | [
"BSD-2-Clause"
] | 6 | 2018-03-19T16:39:11.000Z | 2021-06-22T20:24:16.000Z | picodaqa/mpDataGraphs.py | GuenterQuast/picoDAQ | 92138bb16b6433a51e59f90dd12a587ee941f657 | [
"BSD-2-Clause"
] | null | null | null | picodaqa/mpDataGraphs.py | GuenterQuast/picoDAQ | 92138bb16b6433a51e59f90dd12a587ee941f657 | [
"BSD-2-Clause"
] | 3 | 2018-02-12T02:39:44.000Z | 2021-04-12T18:27:34.000Z | # -*- coding: utf-8 -*-
'''effective Voltage and signal history in TKinter window'''
from __future__ import print_function, division, unicode_literals
from __future__ import absolute_import
import sys, time, numpy as np
import matplotlib
matplotlib.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
if sys.version_info[0] < 3:
import Tkinter as Tk
import tkMessageBox as mbox
from tkFileDialog import asksaveasfilename
else:
import tkinter as Tk
from tkinter import messagebox as mbox
from tkinter.filedialog import asksaveasfilename
import matplotlib.pyplot as plt, matplotlib.animation as anim
# import DataGraphs class
from .DataGraphs import *
def mpDataGraphs(Q, conf, WaitTime=500.,
name='effective Voltage', XYmode= False, cmdQ=None):
'''effective Voltage of data passed via multiprocessing.Queue
Args:
conf: picoConfig object
Q: multiprocessing.Queue()
'''
# Generator to provide data to animation
def yieldEvt_fromQ():
# random consumer of Buffer Manager, receives an event copy
# via a Queue from package mutiprocessing
interval = WaitTime/1000. # in ms
cnt = 0
lagging = False
while True:
T0 = time.time()
if not Q.empty():
data = Q.get()
if type(data) != np.ndarray:
break # received end event
cnt+=1
yield (cnt, data)
else:
yield None # send empty event if no new data
# guarantee correct timing
dtcor = interval - time.time() + T0
if dtcor > 0. :
time.sleep(dtcor)
if lagging:
LblStatus.config(text='')
lagging=False
else:
lagging=True
LblStatus.config(text='! lagging !', fg='red')
# print('*==* yieldEvt_fromQ: received END event')
sys.exit()
def cmdResume():
cmdQ.put('R')
buttonP.config(text='Pause', fg='blue', state=Tk.NORMAL)
buttonR.config(state=Tk.DISABLED)
def cmdPause():
cmdQ.put('P')
buttonP.config(text='paused', fg='grey', state=Tk.DISABLED)
buttonR.config(state=Tk.NORMAL)
def cmdEnd():
cmdQ.put('E')
def cmdSave():
cmdPause()
try:
filename = asksaveasfilename(initialdir='.', initialfile='DGraphs.png',
title='select file name')
figDG.savefig(filename)
except:
pass
# ------- executable part --------
# print(' -> mpDataGraph starting')
DG = DataGraphs(WaitTime, conf, name, XYmode)
figDG = DG.fig
# generate a simple window for graphics display as a tk.DrawingArea
root = Tk.Tk()
root.wm_title("Data Graphs")
# handle destruction of top-level window
def _delete_window():
if mbox.askokcancel("Quit", "Really destroy main window ?"):
print("Deleting main window")
root.destroy()
root.protocol("WM_DELETE_WINDOW", _delete_window)
# Comand buttons
frame = Tk.Frame(master=root)
frame.grid(row=0, column=8)
frame.pack(padx=5, side=Tk.BOTTOM)
buttonE = Tk.Button(frame, text='End', fg='red', command=cmdEnd)
buttonE.grid(row=0, column=8)
blank = Tk.Label(frame, width=7, text="")
blank.grid(row=0, column=7)
clock = Tk.Label(frame)
clock.grid(row=0, column=5)
buttonSv = Tk.Button(frame,width=8,text='save',fg='purple', command=cmdSave)
buttonSv.grid(row=0, column=4)
buttonP = Tk.Button(frame,width=8,text='Pause',fg='blue', command=cmdPause)
buttonP.grid(row=0, column=3)
buttonR = Tk.Button(frame,width=8,text='Resume',fg='blue', command=cmdResume)
buttonR.grid(row=0, column=2)
buttonR.config(state=Tk.DISABLED)
LblStatus = Tk.Label(frame, width=13, text="")
LblStatus.grid(row=0, column=0)
canvas = FigureCanvasTkAgg(figDG, master=root)
canvas.draw()
canvas.get_tk_widget().pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)
canvas._tkcanvas.pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)
# set up matplotlib animation
tw = max(WaitTime-50., 0.5) # smaller than WaitTime to allow for processing
DGAnim = anim.FuncAnimation(figDG, DG, yieldEvt_fromQ,
interval = tw, init_func = DG.init,
blit=True, fargs=None, repeat=True, save_count=None)
# save_count=None is a (temporary) work-around
# to fix memory leak in animate
Tk.mainloop()
print('*==* mpDataGraphs: terminating')
sys.exit()
| 28.809211 | 79 | 0.654944 | 573 | 4,379 | 4.954625 | 0.411867 | 0.019725 | 0.022543 | 0.039451 | 0.075731 | 0.045439 | 0.021134 | 0.021134 | 0.021134 | 0 | 0 | 0.012896 | 0.220827 | 4,379 | 151 | 80 | 29 | 0.819168 | 0.194337 | 0 | 0.091837 | 0 | 0 | 0.066303 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0.010204 | 0.132653 | 0 | 0.204082 | 0.030612 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b88134bdc71472e6866cb763c5674cdc28d0f582 | 19,647 | py | Python | CrystFEL_Jupyter_utilities/panel.py | European-XFEL/CrystFEL-Jupyter-utilities | 0f7fabe51042fdb1951a4bf1aeb1ef3533ef1be4 | [
"BSD-3-Clause"
] | 3 | 2021-07-29T14:36:53.000Z | 2021-11-25T15:59:11.000Z | CrystFEL_Jupyter_utilities/panel.py | European-XFEL/CrystFEL-Jupyter-utilities | 0f7fabe51042fdb1951a4bf1aeb1ef3533ef1be4 | [
"BSD-3-Clause"
] | 12 | 2019-09-16T12:54:57.000Z | 2020-03-17T09:10:08.000Z | CrystFEL_Jupyter_utilities/panel.py | European-XFEL/CrystFEL-Jupyter-utilities | 0f7fabe51042fdb1951a4bf1aeb1ef3533ef1be4 | [
"BSD-3-Clause"
] | null | null | null | """Module representing detector.
Creates a detector list from a geometry file (crystfel type) and
matrix size for the image.
"""
import logging
import sys
import numpy as np
# remove all the handlers.
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
LOGGER = logging.getLogger(__name__)
# create console handler with a higher log level
ch = logging.StreamHandler()
# create formatter and add it to the handlers
formatter = logging.Formatter(
'%(levelname)s | %(filename)s | %(funcName)s | %(lineno)d | %(message)s\n')
ch.setFormatter(formatter)
# add the handlers to logger
LOGGER.addHandler(ch)
LOGGER.setLevel("INFO")
class Detector:
"""Representing a detector.
Attributes
----------
min_fs : int
Min index in a column.
min_ss : int
Min index in a row.
max_fs : int
Max index in a column.
max_ss : int
Max index in a row.
xfs : double
Fast scan directions, value x.
yfs : double
Fast scan directions, value y.
xss : double
Slow scan directions, value x.
yss : double
Slow scan directions, value y.
corner_x : double
Coordinates of the panel corner from geom file.
corner_y : double
Coordinates of the panel corner from geom file.
array : numpy.array
Detector data.
position : tuple
Panel coordinates on the final image.
peaks_search : list
List of peaks from the stream file.
peaks_reflection : list
Another peak list from the stream file. (peak like the
check-near-bragg script does).
"""
def __init__(self, image_size, name, min_fs, min_ss, max_fs, max_ss, xfs,
yfs, xss, yss, corner_x, corner_y, data):
"""
Parameters
----------
image_size : tuple
Image size.
name : Python unicode str (on py3)
The name of the detector.
min_fs : int
Min index in a column.
min_ss : int
Min index in a row.
max_fs : int
Max index in a column.
max_ss : int
Max index in a row.
xfs: double
Fast scan directions, value x.
yfs: double
Fast scan directions, value y.
xss: double
Slow scan directions, value x.
yss: double
Slow scan directions, value y.
corner_x : double
Coordinates of the panel corner from geom file.
corner_y : double
Coordinates of the panel corner from geom file.
data : numpy.array
Data from each panel from part of the data - dataset in h5 file.
"""
self.name = name
self.min_fs = min_fs
self.min_ss = min_ss
self.max_fs = max_fs
self.max_ss = max_ss
self.xfs = xfs
self.yfs = yfs
self.xss = xss
self.yss = yss
self.corner_x = corner_x
self.corner_y = corner_y
self.array = np.copy(data[self.min_ss: self.max_ss + 1,
self.min_fs: self.max_fs + 1])
# my position in matrix
self.position = (0, 0)
self.peaks_search = []
self.peaks_reflection = []
self.image_size = image_size
def get_peaks_search(self):
"""Returns peaks from peak search.
Returns
-------
peaks_search : touple
The peaks_search list.
"""
return self.peaks_search
def get_peaks_reflection(self):
"""Returns peaks from reflections measured after indexing
as in the script 'check-near-bragg'.
Returns
-------
peaks_reflection : touple
The peaks_reflection list.
"""
return self.peaks_reflection
def get_array_rotated(self, center_x, center_y):
"""Returns array data for each panel after rotation.
Parameters
----------
center_x : int
Displacement of centre x-axis.
center_y : int
Displacement of centre y-axis.
Returns
-------
array : numpy.array
The numpy.array for panel after rotation.
"""
self.type_rotation(center_x, center_y)
return self.array
def type_rotation(self, center_x, center_y):
"""By comparing xfs, yfs, xss and yss verifies which kind of rotation
should be applied.
Parameters
----------
center_x : int
Displacement of centre x-axis.
center_y : int
Displacement of centre y-axis.
"""
if (np.abs(self.xfs) < np.abs(self.xss) and
np.abs(self.yfs) > np.abs(self.yss)):
if self.xss > 0 and self.yfs < 0:
self.rot_y_x(center_x, center_y)
elif self.xss < 0 and self.yfs > 0:
self.rot_y_2x(center_x, center_y)
elif (np.abs(self.xfs) > np.abs(self.xss) and
np.abs(self.yfs) < np.abs(self.yss)):
if self.xfs < 0 and self.yss < 0:
self.rot_y(center_x, center_y)
elif self.xfs > 0 and self.yss > 0:
self.rot_x(center_x, center_y)
else:
LOGGER.critical("{} Unknown rotation!".format(self.name))
sys.exit(1)
def rot_x(self, center_x, center_y):
"""Rotation along x-axis, columns stay the same, rows are switched.
Parameters
----------
center_x : int
Displacement of centre x-axis.
center_y : int
Displacement of centre y-axis.
"""
# rotation x
self.array = self.array[::-1, :]
# The position of the panel
# position x
pos_x = int(np.round(self.image_size[0]/2.0 - self.corner_y -
self.array.shape[0], 0))
# position y
pos_y = int(np.round(self.image_size[1]/2.0 + self.corner_x, 0))
# position + displacement.
self.position = (pos_x + center_x, pos_y + center_y)
# two loop for:
for peak_search in self.peaks_search:
# for check peak detection
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_search['ss_px'] -= self.min_ss
peak_search['fs_px'] -= self.min_fs
# setting position after rotation
peak_search['ss_px'] = (self.array.shape[0] -
1 - peak_search['ss_px'])
posx = peak_search['fs_px'] + self.position[1]
posy = peak_search['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_search['position'] = (posx, posy)
for peak_reflection in self.peaks_reflection:
# for script near bragg
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_reflection['ss_px'] -= self.min_ss
peak_reflection['fs_px'] -= self.min_fs
# setting position after rotation
peak_reflection['ss_px'] = (self.array.shape[0] - 1
- peak_reflection['ss_px'])
posx = peak_reflection['fs_px'] + self.position[1]
posy = peak_reflection['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_reflection['position'] = (posx, posy)
def rot_y(self, center_x, center_y):
"""Rotation along y-axis, columns order is reversed,
rows stay the same.
Parameters
----------
center_x : int
Displacement of centre x-axis.
center_y : int
Displacement of centre y-axis.
"""
# rotation y
self.array = self.array[:, ::-1]
# The position of the panel
# position y
pos_y = (int(self.image_size[1]/2) + int(self.corner_x) -
int(self.array.shape[1]))
# position x
pos_x = int(self.image_size[0]/2) - int(self.corner_y)
# position + displacement.
self.position = (pos_x + center_x, pos_y + center_y)
# two loop for:
for peak_search in self.peaks_search:
# for check peak detection
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_search['ss_px'] -= self.min_ss
peak_search['fs_px'] -= self.min_fs
# setting position after rotation
peak_search['fs_px'] = (self.array.shape[1] -
1 - peak_search['fs_px'])
posx = peak_search['fs_px'] + self.position[1]
posy = peak_search['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_search['position'] = (posx, posy)
for peak_reflection in self.peaks_reflection:
# for script near bragg
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_reflection['ss_px'] -= self.min_ss
peak_reflection['fs_px'] -= self.min_fs
# setting position after rotation
peak_reflection['fs_px'] = (self.array.shape[1] -
1 - peak_reflection['fs_px'])
posx = peak_reflection['fs_px'] + self.position[1]
posy = peak_reflection['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_reflection['position'] = (posx, posy)
def rot_y_x(self, center_x, center_y):
"""Rotation along y=x diagonal.
Parameters
----------
center_x : int
Displacement of centre x-axis.
center_y : int
Displacement of centre y-axis.
"""
# rotation y=x diagonal
self.array = np.rot90(self.array)[:, ::-1]
# The position of the panel
# position y
pos_y = int(np.round(self.image_size[1]/2.0 + self.corner_x -
self.array.shape[1], 0))
# position x
pos_x = int(np.round(self.image_size[0]/2.0 - self.corner_y -
self.array.shape[0], 0))
# position + displacement.
self.position = (pos_x + center_x, pos_y + center_y)
# two loop for:
for peak_search in self.peaks_search:
# for check peak detection
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_search['ss_px'] -= self.min_ss
peak_search['fs_px'] -= self.min_fs
# setting position after rotation
old_fs_px = peak_search['fs_px']
old_ss_px = peak_search['ss_px']
peak_search['ss_px'] = self.array.shape[0] - old_fs_px - 1
peak_search['fs_px'] = self.array.shape[1] - old_ss_px - 1
posx = peak_search['fs_px'] + self.position[1]
posy = peak_search['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_search['position'] = (posx, posy)
for peak_reflection in self.peaks_reflection:
# for script near bragg
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_reflection['ss_px'] -= self.min_ss
peak_reflection['fs_px'] -= self.min_fs
# setting position after rotation
old_fs_px = peak_reflection['fs_px']
old_ss_px = peak_reflection['ss_px']
peak_reflection['ss_px'] = self.array.shape[0] - old_fs_px - 1
peak_reflection['fs_px'] = self.array.shape[1] - old_ss_px - 1
posx = peak_reflection['fs_px'] + self.position[1]
posy = peak_reflection['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_reflection['position'] = (posx, posy)
def rot_y_2x(self, center_x, center_y):
"""Rotation along y=-x transpose.
Parameters
----------
center_x : int
Displacement of centre x-axis.
center_y : int
Displacement of centre y-axis.
"""
# rotation y=-x transpose
self.array = np.transpose(self.array)
# The position of the panel
# position x
pos_x = int(np.round(self.image_size[0]/2.0 - self.corner_y, 0))
# position y
pos_y = int(np.round(self.image_size[1]/2.0 + self.corner_x, 0))
# position + displacement.
self.position = (pos_x + center_x, pos_y + center_y)
# two loop for
for peak_search in self.peaks_search:
# for check peak detection
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_search['ss_px'] -= self.min_ss
peak_search['fs_px'] -= self.min_fs
old_ss_px = peak_search['ss_px']
peak_search['ss_px'] = peak_search['fs_px']
peak_search['fs_px'] = old_ss_px
posx = peak_search['fs_px'] + self.position[1]
posy = peak_search['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_search['position'] = (posx, posy)
for peak_reflection in self.peaks_reflection:
# for script near bragg
# setting the peak relative
# to the upper left corner of the panel
# default: upper left corner of the matrix data
peak_reflection['ss_px'] -= self.min_ss
peak_reflection['fs_px'] -= self.min_fs
# setting position after rotation
old_ss_px = peak_reflection['ss_px']
peak_reflection['ss_px'] = peak_reflection['fs_px']
peak_reflection['fs_px'] = old_ss_px
posx = peak_reflection['fs_px'] + self.position[1]
posy = peak_reflection['ss_px'] + self.position[0]
# new position of the peak in the panel after rotation
peak_reflection['position'] = (posx, posy)
def get_detectors(raw_data_from_h5, image_size, geom,
peaks_search, peaks_reflections):
"""Creates a dictionary with detector class objects as items and
panel names as in the geometry file as keys. Function reads 'raw' data
for each panel from the h5 file.
Parameters
----------
raw_data_from_h5 : numpy.array
Data from h5 for all detectors.
image_size : tuple
Image size.
geom : dict
Dictionary with the geometry information loaded from the geomfile.
peaks_search : dict
Dictionary with list of Peaks detector name and value list.
peaks_reflections : dict
Dictionary with list of Peaks detector name and value list.
Returns
-------
panels : dict
Dictionary with class Detector object.
"""
panels = {panel_name: Detector(name=panel_name, image_size=image_size,
corner_x=geom["panels"][panel_name]["cnx"],
corner_y=geom["panels"][panel_name]["cny"],
min_fs=geom["panels"][panel_name]["min_fs"],
min_ss=geom["panels"][panel_name]["min_ss"],
max_fs=geom["panels"][panel_name]["max_fs"],
max_ss=geom["panels"][panel_name]["max_ss"],
xfs=geom["panels"][panel_name]["xfs"],
yfs=geom["panels"][panel_name]["yfs"],
xss=geom["panels"][panel_name]["xss"],
yss=geom["panels"][panel_name]["yss"],
data=raw_data_from_h5)
for panel_name in geom["panels"]}
# complete all panels with a list of peaks they have.
# peaks which `check peak detection` shows
# and peaks which `near bragg` shows.
for name in panels:
try:
panels[name].peaks_search = peaks_search[name]
except Exception:
pass
try:
panels[name].peaks_reflection = peaks_reflections[name]
except Exception:
pass
return panels
class BadRegion:
"""Class for mapping bad pixel regions on the image.
Regions are read from the geometry file.
Attributes
----------
name : str
Bad region name from geom file.
image_size : tuple
Image size.
min_x : int
Range x_min bad region.
min_y : int
Range y_min bad region.
max_x : int
Range x_max bad region.
max_y : int
Range y_max bad region.
"""
def __init__(self, image_size, name, min_x, max_x, min_y, max_y):
"""
Parameters
----------
name : str
Bad region name from geom file.
image_size : tuple
Image size.
min_x : int
Range x_min bad region.
min_y : int
Range y_min bad region.
max_x : int
Range x_max bad region.
max_y : int
Range y_max bad region.
"""
self.name = name
self.image_size = image_size
self.min_x = int(np.round(min_x + self.image_size[1]/2, 0))
self.max_x = int(np.round(max_x + self.image_size[1]/2, 0))
self.min_y = int(np.round(-min_y + self.image_size[0]/2, 0))
self.max_y = int(np.round(-max_y + self.image_size[0]/2, 0))
# check if the bad region range are not outside my image size
if self.min_x < 0:
self.min_x = 0
if self.max_x > self.image_size[0] - 1:
self.max_x = self.image_size[0] - 1
if self.min_y > self.image_size[1] - 1:
self.min_y = self.image_size[1] - 1
if self.max_y < 0:
self.max_y = 0
self.shape = (self.min_y - self.max_y, self.max_x - self.min_x)
# bad region as numpy.array zeros
self.array = np.zeros(self.shape)
def get_array(self):
"""Returns array data.
Returns
-------
array : numpy.array
The numpy.array for BadRegion.
"""
return self.array
def bad_places(image_size, geom):
"""Creates a dictionary with bad pixel regions from geom file.
Parameters
----------
image_size : tuple
Image size.
geom : dict
Dictionary with the geometry information loaded from the geomfile.
Returns
-------
bad_places : dict
dictionary with class BadRegion object
"""
bad_places = {bad_name: BadRegion(image_size, bad_name,
geom['bad'][bad_name]['min_x'],
geom['bad'][bad_name]['max_x'],
geom['bad'][bad_name]['min_y'],
geom['bad'][bad_name]['max_y'])
for bad_name in geom['bad']}
return bad_places
| 33.075758 | 79 | 0.554741 | 2,534 | 19,647 | 4.124704 | 0.085635 | 0.022962 | 0.024876 | 0.026024 | 0.672981 | 0.636051 | 0.628492 | 0.61969 | 0.580367 | 0.562763 | 0 | 0.009003 | 0.349824 | 19,647 | 593 | 80 | 33.131535 | 0.809222 | 0.369013 | 0 | 0.366337 | 0 | 0.004951 | 0.052794 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.064356 | false | 0.009901 | 0.014851 | 0 | 0.118812 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
b8819290feecd723601f0d9bc71ae2146d1493c3 | 927 | py | Python | notebooks/figures/plot_svc.py | d9w/computational_intelligence | aafcb3aacad0640468bf7bc0b01d0d8cafed6ee3 | [
"MIT"
] | 2 | 2020-07-17T21:15:51.000Z | 2020-08-15T03:29:51.000Z | notebooks/figures/plot_svc.py | d9w/computational_intelligence | aafcb3aacad0640468bf7bc0b01d0d8cafed6ee3 | [
"MIT"
] | null | null | null | notebooks/figures/plot_svc.py | d9w/computational_intelligence | aafcb3aacad0640468bf7bc0b01d0d8cafed6ee3 | [
"MIT"
] | 4 | 2018-04-23T11:29:00.000Z | 2020-05-16T05:34:07.000Z | import matplotlib.pyplot as plt
import numpy as np
def plot_svc_decision_function(model, ax=None, plot_support=True):
"""Plot the decision function for a 2D SVC"""
if ax is None:
ax = plt.gca()
xlim = ax.get_xlim()
ylim = ax.get_ylim()
# create grid to evaluate model
x = np.linspace(xlim[0], xlim[1], 30)
y = np.linspace(ylim[0], ylim[1], 30)
Y, X = np.meshgrid(y, x)
xy = np.vstack([X.ravel(), Y.ravel()]).T
P = model.decision_function(xy).reshape(X.shape)
# plot decision boundary and margins
ax.contour(X, Y, P, colors='k',
levels=[-1, 0, 1], alpha=0.5,
linestyles=['--', '-', '--'])
# plot support vectors
if plot_support:
ax.scatter(model.support_vectors_[:, 0],
model.support_vectors_[:, 1],
s=300, linewidth=1, facecolors='none');
ax.set_xlim(xlim)
ax.set_ylim(ylim)
| 30.9 | 66 | 0.580367 | 136 | 927 | 3.852941 | 0.455882 | 0.091603 | 0.015267 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029326 | 0.264293 | 927 | 29 | 67 | 31.965517 | 0.739003 | 0.135922 | 0 | 0 | 0 | 0 | 0.01261 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.095238 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |